index
int64 0
10k
| blob_id
stringlengths 40
40
| step-1
stringlengths 13
984k
| step-2
stringlengths 6
1.23M
⌀ | step-3
stringlengths 15
1.34M
⌀ | step-4
stringlengths 30
1.34M
⌀ | step-5
stringlengths 64
1.2M
⌀ | step-ids
sequencelengths 1
5
|
---|---|---|---|---|---|---|---|
500 | 040942e2e09b5c2df5c08207b9c033471b117608 |
from flask import Flask, url_for, render_template, request
import os
import blescan
import sys
import requests
import logging
from logging.handlers import RotatingFileHandler
import json
from datetime import datetime
import bluetooth._bluetooth as bluez
app = Flask(__name__)
@app.route('/sivut/')
def default_page():
dev_id = 0
try:
sock = bluez.hci_open_dev(dev_id)
app.logger.info("ble thread started")
except:
app.logger.info("error accessing bluetooth device...")
sys.exit(1)
blescan.hci_le_set_scan_parameters(sock)
blescan.hci_enable_le_scan(sock)
returnedList = blescan.parse_events(sock, 10)
app.logger.info(returnedList)
print "----------"
setti = set()
stop_name = ""
for beacon in returnedList:
if '2f234454cf6d4a0fadf2f4911ba9ffa6' in beacon:
app.logger.info("beacon loydetty")
r = requests.get("http://stop2.herokuapp.com/stop/2f234454-cf6d-4a0f-adf2-f4911ba9ffa6")
content = r.content
content = json.loads(content)
stop_name = content['stop_name']
palautus = "<h3>Press button to stop bus:</h3> "
for asd in content['schedule']:
setti.add(asd['line'])
arrival = datetime.fromtimestamp(int(asd['arrival'])).strftime('%H:%M')
palautus += " <div class='btn btn-lg stop_bus' style='margin:5px;color:white;background:#F092CD;' id='" + asd['line'] + "'>" + asd['line'] + " " + arrival \
+ "</div> "
content = palautus
break
else:
content = "<h3>You're not near stop</h3>"
app.logger.info("beacon EI loydetty")
return render_template('index_templatelocal.html', content=content, setti=setti, stop_name=stop_name)
@app.route('/stops')
def show_stops():
stops = '''
{"name": "718 to Rautatientori (HSL:1020201)", "stops": [
{"code": "3032", "name": "Valtimontie", "gtfsId": "HSL:1240123"},
{"code": "3030", "name": "Sumatrantie", "gtfsId": "HSL:1240106"},
{"code": "3028", "name": "Kumpulan kampus", "gtfsId": "HSL:1240118"},
{"code": "3024", "name": "Vallilan varikko", "gtfsId": "HSL:1220104"},
{"code": "3022", "name": "Ristikkokatu", "gtfsId": "HSL:1220102"},
{"code": "2410", "name": "S\u00f6rn\u00e4inen(M)", "gtfsId": "HSL:1113131"},
{"code": "2404", "name": "Haapaniemi", "gtfsId": "HSL:1112126"},
{"code": "2402", "name": "Hakaniemi", "gtfsId": "HSL:1111114"},
{"code": null, "name": "Rautatientori", "gtfsId": "HSL:1020201"}]}
'''
return render_template('show_stops.html', stops=json.loads(stops))
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5050))
handler = RotatingFileHandler('foo.log', maxBytes=10000, backupCount=1)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
app.run(host='0.0.0.0', port = port)
| null | null | null | null | [
0
] |
501 | 9ab3dd87f17ac75a3831e9ec1f0746ad81fad70d | <mask token>
class MyIde:
<mask token>
class Laptop:
def code(self, ide):
ide.execute()
<mask token>
| class PyCharm:
<mask token>
class MyIde:
def execute(self):
print('MyIde running')
class Laptop:
def code(self, ide):
ide.execute()
<mask token>
| class PyCharm:
def execute(self):
print('pycharm ide runnig')
class MyIde:
def execute(self):
print('MyIde running')
class Laptop:
def code(self, ide):
ide.execute()
<mask token>
obj.code(ide)
| class PyCharm:
def execute(self):
print('pycharm ide runnig')
class MyIde:
def execute(self):
print('MyIde running')
class Laptop:
def code(self, ide):
ide.execute()
ide = MyIde()
obj = Laptop()
obj.code(ide)
|
# Any object containing execute(self) method is considered to be IDE App
# this is Duck typing concept
class PyCharm:
def execute(self):
print("pycharm ide runnig")
class MyIde:
def execute(self):
print("MyIde running")
class Laptop:
def code(self,ide):
ide.execute()
ide=MyIde()
obj=Laptop()
obj.code(ide)
| [
3,
5,
7,
8,
9
] |
502 | 6d61df9ac072100d01a1ce3cf7b4c056f66a163c | <mask token>
class SnakeGame(object):
<mask token>
def reset(self):
return SnakeGame._get_image(self.surface)
def step(self, key):
length = self.snake.length
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
self.done = True
act = [UP, DOWN, LEFT, RIGHT]
self.snake.point(act[key])
self.surface.fill((255, 255, 255))
try:
self.snake.move()
except SnakeException:
self.done = True
if self.done:
state = SnakeGame._get_image(self.surface)
return state, length, self.done, {}
check_eat(self.snake, self.apple)
self.snake.draw(self.surface)
self.apple.draw(self.surface)
font = pygame.font.Font(None, 36)
text = font.render(str(self.snake.length), 1, (10, 10, 10))
text_pos = text.get_rect()
text_pos.centerx = 20
self.surface.blit(text, text_pos)
self.screen.blit(self.surface, (0, 0))
state = SnakeGame._get_image(self.surface)
pygame.display.flip()
pygame.display.update()
self.fpsClock.tick(self.fps + self.snake.length / 3)
return state, self.snake.length, False, {}
<mask token>
| <mask token>
class SnakeGame(object):
<mask token>
def reset(self):
return SnakeGame._get_image(self.surface)
def step(self, key):
length = self.snake.length
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
self.done = True
act = [UP, DOWN, LEFT, RIGHT]
self.snake.point(act[key])
self.surface.fill((255, 255, 255))
try:
self.snake.move()
except SnakeException:
self.done = True
if self.done:
state = SnakeGame._get_image(self.surface)
return state, length, self.done, {}
check_eat(self.snake, self.apple)
self.snake.draw(self.surface)
self.apple.draw(self.surface)
font = pygame.font.Font(None, 36)
text = font.render(str(self.snake.length), 1, (10, 10, 10))
text_pos = text.get_rect()
text_pos.centerx = 20
self.surface.blit(text, text_pos)
self.screen.blit(self.surface, (0, 0))
state = SnakeGame._get_image(self.surface)
pygame.display.flip()
pygame.display.update()
self.fpsClock.tick(self.fps + self.snake.length / 3)
return state, self.snake.length, False, {}
@staticmethod
def _get_image(surface):
ret = list(map(lambda x: list(x), np.zeros((SCREEN_HEIGHT,
SCREEN_WIDTH))))
for j in range(SCREEN_HEIGHT):
for k in range(SCREEN_WIDTH):
ret[j][k] = surface.get_at((k, j))
return np.array(ret)
| <mask token>
class SnakeGame(object):
def __init__(self):
self.screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT),
0, 32)
self.surface = pygame.Surface(self.screen.get_size())
self.surface = self.surface.convert()
self.surface.fill((255, 255, 255))
self.clock = pygame.time.Clock()
self.fps = 60
self.done = False
pygame.key.set_repeat(1, 40)
self.screen.blit(self.surface, (0, 0))
pygame.init()
self.fpsClock = pygame.time.Clock()
self.snake = Snake()
self.apple = Apple()
def reset(self):
return SnakeGame._get_image(self.surface)
def step(self, key):
length = self.snake.length
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
self.done = True
act = [UP, DOWN, LEFT, RIGHT]
self.snake.point(act[key])
self.surface.fill((255, 255, 255))
try:
self.snake.move()
except SnakeException:
self.done = True
if self.done:
state = SnakeGame._get_image(self.surface)
return state, length, self.done, {}
check_eat(self.snake, self.apple)
self.snake.draw(self.surface)
self.apple.draw(self.surface)
font = pygame.font.Font(None, 36)
text = font.render(str(self.snake.length), 1, (10, 10, 10))
text_pos = text.get_rect()
text_pos.centerx = 20
self.surface.blit(text, text_pos)
self.screen.blit(self.surface, (0, 0))
state = SnakeGame._get_image(self.surface)
pygame.display.flip()
pygame.display.update()
self.fpsClock.tick(self.fps + self.snake.length / 3)
return state, self.snake.length, False, {}
@staticmethod
def _get_image(surface):
ret = list(map(lambda x: list(x), np.zeros((SCREEN_HEIGHT,
SCREEN_WIDTH))))
for j in range(SCREEN_HEIGHT):
for k in range(SCREEN_WIDTH):
ret[j][k] = surface.get_at((k, j))
return np.array(ret)
| import pygame
import sys
import time
import random
from snake_gym.envs.modules import *
from pygame.locals import *
import numpy as np
class SnakeGame(object):
def __init__(self):
self.screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT),
0, 32)
self.surface = pygame.Surface(self.screen.get_size())
self.surface = self.surface.convert()
self.surface.fill((255, 255, 255))
self.clock = pygame.time.Clock()
self.fps = 60
self.done = False
pygame.key.set_repeat(1, 40)
self.screen.blit(self.surface, (0, 0))
pygame.init()
self.fpsClock = pygame.time.Clock()
self.snake = Snake()
self.apple = Apple()
def reset(self):
return SnakeGame._get_image(self.surface)
def step(self, key):
length = self.snake.length
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
self.done = True
act = [UP, DOWN, LEFT, RIGHT]
self.snake.point(act[key])
self.surface.fill((255, 255, 255))
try:
self.snake.move()
except SnakeException:
self.done = True
if self.done:
state = SnakeGame._get_image(self.surface)
return state, length, self.done, {}
check_eat(self.snake, self.apple)
self.snake.draw(self.surface)
self.apple.draw(self.surface)
font = pygame.font.Font(None, 36)
text = font.render(str(self.snake.length), 1, (10, 10, 10))
text_pos = text.get_rect()
text_pos.centerx = 20
self.surface.blit(text, text_pos)
self.screen.blit(self.surface, (0, 0))
state = SnakeGame._get_image(self.surface)
pygame.display.flip()
pygame.display.update()
self.fpsClock.tick(self.fps + self.snake.length / 3)
return state, self.snake.length, False, {}
@staticmethod
def _get_image(surface):
ret = list(map(lambda x: list(x), np.zeros((SCREEN_HEIGHT,
SCREEN_WIDTH))))
for j in range(SCREEN_HEIGHT):
for k in range(SCREEN_WIDTH):
ret[j][k] = surface.get_at((k, j))
return np.array(ret)
| null | [
3,
4,
5,
6
] |
503 | d69bffb85d81ab3969bfe7dfe2759fa809890208 | <mask token>
| <mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
| <mask token>
class Migration(migrations.Migration):
dependencies = [('articals', '0001_initial')]
operations = [migrations.AddField(model_name='artical', name='thumb',
field=models.ImageField(blank=True, default='default.png',
upload_to='media/'))]
| from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('articals', '0001_initial')]
operations = [migrations.AddField(model_name='artical', name='thumb',
field=models.ImageField(blank=True, default='default.png',
upload_to='media/'))]
| # Generated by Django 3.1.1 on 2020-10-07 04:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('articals', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='artical',
name='thumb',
field=models.ImageField(blank=True, default='default.png', upload_to='media/'),
),
]
| [
0,
1,
2,
3,
4
] |
504 | 2ff85ac059f160fcc6b39b4298e8216cbad77ab3 | <mask token>
def get_version():
return '2.01'
<mask token>
def restart():
print('restart')
try:
emby_wsocket.stop()
except:
sys.exit()
sys.exit()
print('fin restart')
def save_config(config_file, config):
with open(config_file, 'w') as fw:
json.dump(config, fw, indent=4)
fw.close
try:
emby_wsocket.ws_config = config
emby_wsocket.EmbySession.config = config
except:
emby_wsocket.ws_config = config
<mask token>
def cargar_config(config_file, tv_path, av_path, lang_path):
with open(config_file, 'r') as f:
config = json.load(f)
f.close
config['Version'] = get_version()
default = config.get('Autoscript', False)
config['Autoscript'] = default
default = config.get('enable_all_libraries', False)
config['enable_all_libraries'] = default
default = config.get('TV_model', '')
config['TV_model'] = default
default = config.get('TV_SOURCES', [])
config['TV_SOURCES'] = default
default = config.get('AV_model', '')
config['AV_model'] = default
default = config.get('AV_SOURCES', [])
config['AV_SOURCES'] = default
default = config.get('TV_script_init', '')
config['TV_script_init'] = default
default = config.get('TV_script_end', '')
config['TV_script_end'] = default
default = config.get('av_delay_hdmi', 0)
config['av_delay_hdmi'] = default
default = config.get('AV_Port', 23)
config['AV_Port'] = default
default = config.get('timeout_oppo_mount', 60)
config['timeout_oppo_mount'] = default
default = config.get('language', 'es-ES')
config['language'] = default
default = config.get('default_nfs', False)
config['default_nfs'] = default
default = config.get('wait_nfs', False)
config['wait_nfs'] = default
default = config.get('refresh_time', 5)
config['refresh_time'] = default
default = config.get('check_beta', False)
config['check_beta'] = default
default = config.get('smbtrick', False)
config['smbtrick'] = default
default = config.get('BRDisc', False)
config['BRDisc'] = default
edit_server = 0
server_list = config['servers']
for server in server_list:
default = server.get('Test_OK', False)
server_list[edit_server]['Test_OK'] = default
edit_server = edit_server + 1
if config['TV'] == 'True':
config['TV'] = True
if config['TV'] == 'False':
config['TV'] = False
if config['AV'] == 'True':
config['AV'] = True
if config['AV'] == 'False':
config['AV'] = False
config['servers'] = server_list
config['tv_dirs'] = get_dir_folders(tv_path)
config['av_dirs'] = get_dir_folders(av_path)
config['langs'] = get_dir_folders(lang_path)
return config
def check_version(config):
url = (
'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'
)
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
print(config['check_beta'])
if config['check_beta'] == True:
last_version = version['beta_version']
last_version_file = version['beta_version_file']
else:
last_version = version['curr_version']
last_version_file = version['curr_version_file']
xno_version = get_version()
resp = {}
resp['version'] = last_version
resp['file'] = last_version_file
print(xno_version)
print(last_version)
if xno_version < last_version:
resp['new_version'] = True
else:
resp['new_version'] = False
print(resp)
return resp
def update_version(config, vers_path, cwd):
url = (
'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'
)
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
if config['check_beta'] == True:
last_version = version['beta_version']
last_version_file = version['beta_version_file']
else:
last_version = version['curr_version']
last_version_file = version['curr_version_file']
url2 = ('https://github.com/siberian-git/Xnoppo/raw/main/versions/' +
last_version_file)
headers = {}
response2 = requests.get(url2, headers=headers)
filename = vers_path + last_version_file
with open(filename, 'wb') as f:
f.write(response2.content)
f.close()
shutil.unpack_archive(filename, cwd)
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
if config['TV'] == True and config['TV_model'] != '':
move_files(tv_path + config['TV_model'], lib_path)
if config['AV'] == True and config['AV_model'] != '':
move_files(av_path + config['AV_model'], lib_path)
resp = {}
resp['version'] = last_version
resp['file'] = last_version_file
resp['new_version'] = False
return resp
def cargar_lang(config_file):
with open(config_file.encode(sys.getfilesystemencoding()), 'r',
encoding='latin-1') as f:
config = json.load(f)
f.close
return config
def leer_file(web_file):
with open(web_file, 'r', encoding='utf8') as f:
num = f.read()
f.close
return num
def leer_img(web_file):
with open(web_file, 'rb') as f:
num = f.read()
f.close
return num
def test_path(config, server):
rutas = get_mount_path(server['Emby_Path'] + '/test.mkv', server)
result2 = test_mount_path(config, rutas['Servidor'], rutas['Carpeta'])
return result2
def get_mount_path(movie, server_data):
movie = movie.replace(server_data['Emby_Path'], server_data['Oppo_Path'])
movie = movie.replace('\\\\', '\\')
movie = movie.replace('\\', '/')
word = '/'
inicio = movie.find(word)
inicio = inicio + 1
final = movie.find(word, inicio, len(movie))
servidor = movie[inicio:final]
ultimo = final + 1
result = final + 1
while result > 0:
ultimo = result + 1
result = movie.find(word, ultimo, len(movie))
fichero = movie[ultimo:len(movie)]
final = final + 1
ultimo = ultimo - 1
carpeta = movie[final:ultimo]
resultado = {}
resultado['Servidor'] = servidor
resultado['Carpeta'] = carpeta
resultado['Fichero'] = fichero
return resultado
def test_mount_path(config, servidor, carpeta):
sendnotifyremote(config['Oppo_IP'])
result = check_socket(config)
if result == 0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('EJT', config)
time.sleep(1)
response_data6b = getsetupmenu(config)
while response_data6f.find('devicelist":[]') > 0:
time.sleep(1)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('QPW', config)
device_list = json.loads(response_data6f)
if config['DebugLevel'] > 0:
print(device_list)
nfs = config['default_nfs']
for device in device_list['devicelist']:
if device['name'].upper() == servidor.upper():
if device['sub_type'] == 'nfs':
nfs = True
break
else:
nfs = False
break
if nfs:
response_login = LoginNFS(config, servidor)
else:
response_login = LoginSambaWithOutID(config, servidor)
if config['Always_ON'] == False:
time.sleep(5)
response_data6b = getsetupmenu(config)
if nfs:
response_mount = mountSharedNFSFolder(servidor, carpeta, '', '',
config)
else:
response_mount = mountSharedFolder(servidor, carpeta, '', '',
config)
response = json.loads(response_mount)
if config['Autoscript'] == True:
result = umountSharedFolder(config)
if response['success'] == True:
a = 'OK'
else:
a = 'FAILURE'
return a
else:
print(
'No se puede conectar, revisa las configuraciones o que el OPPO este encendido o en reposo'
)
def test_emby(config):
try:
EmbySession = EmbyHttp(config)
user_info = EmbySession.user_info
if user_info['SessionInfo']['Id'] != '':
return 'OK'
else:
return 'FAILED'
except:
return 'FAILED'
<mask token>
def carga_libraries(config):
try:
EmbySession = EmbyHttp(config)
views_list = EmbySession.get_user_views(EmbySession.user_info[
'User']['Id'])
libraries = []
for view in views_list:
library = {}
library['Name'] = view['Name']
library['Id'] = view['Id']
library['Active'] = False
try:
lib_list = config['Libraries']
except:
lib_list = {}
for lib in lib_list:
if lib['Id'] == view['Id']:
library['Active'] = lib['Active']
libraries.append(library)
config['Libraries'] = libraries
return 0
except:
return 1
def is_library_active(config, libraryname):
for library in config['Libraries']:
if library['Name'] == libraryname:
return library['Active']
return False
def get_selectableFolders(config):
EmbySession = EmbyHttp(config)
MediaFolders = EmbySession.get_emby_selectablefolders()
servers = []
for Folder in MediaFolders:
index = 1
active = is_library_active(config, Folder['Name'])
if config['enable_all_libraries'] == True:
active = True
if active == True:
for SubFolder in Folder['SubFolders']:
server = {}
server['Id'] = SubFolder['Id']
if index > 1:
server['name'] = Folder['Name'] + '(' + str(index) + ')'
else:
server['name'] = Folder['Name']
server['Emby_Path'] = SubFolder['Path']
server['Oppo_Path'] = '/'
try:
serv_list = config['servers']
except:
serv_list = {}
for serv in serv_list:
if server['Emby_Path'] == serv['Emby_Path']:
server['name'] = serv['name']
server['Oppo_Path'] = serv['Oppo_Path']
server['Test_OK'] = serv['Test_OK']
servers.append(server)
index = index + 1
config['servers'] = servers
def get_dir_folders(directory):
os.chdir(directory)
dirs = os.listdir('.')
encontrado = False
list_dir = []
for x in dirs:
if os.path.isdir(x):
list_dir.append(x)
return list_dir
<mask token>
def get_devices(config):
try:
EmbySession = EmbyHttp(config)
devices = EmbySession.get_emby_devices()
index = 0
dev_temp = []
for device in devices['Items']:
try:
if device['Id'] != 'Xnoppo':
device['Name'] = device['Name'] + ' / ' + device['AppName']
device['Id'] = device['ReportedDeviceId']
dev_temp.append(device)
except:
pass
config['devices'] = dev_temp
return 'OK'
except:
return 'FAILURE'
class MyServer(BaseHTTPRequestHandler):
def do_GET(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
resource_path = (cwd + separador + 'web' + separador + 'resources' +
separador)
html_path = cwd + separador + 'web' + separador
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/emby_conf.html':
i = leer_file(html_path + 'emby_conf.html')
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/oppo_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'oppo_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/lib_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'lib_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/path_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'path_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/tv_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'tv_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/av_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'av_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/other_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'other_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/status.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'status.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/help.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'help.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/remote.html':
i = leer_file(html_path + 'remote.html')
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/android-chrome-36x36.png':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'android-chrome-36x36.png')
self.wfile.write(bytes(i))
return 0
if self.path == '/av-receiver-icon-2.jpg':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'av-receiver-icon-2.jpg')
self.wfile.write(bytes(i))
return 0
if self.path == '/dragon.png':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'dragon.png')
self.wfile.write(bytes(i))
return 0
if self.path == '/xnoppo_config':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/xnoppo_config_lib':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
carga_libraries(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/xnoppo_config_dev':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
get_devices(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/check_version':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = check_version(config)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/update_version':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = update_version(config, vers_path, cwd)
restart()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/get_state':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = get_state()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/restart':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
a = 'Restarting'
self.wfile.write(bytes(a, 'utf-8'))
restart()
if self.path == '/refresh_paths':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
get_selectableFolders(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/lang':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = cargar_lang(lang_path + config['language'] + separador +
'lang.js')
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path.find('/send_key?') >= 0:
get_data = self.path
print(get_data)
a = len('/send_key?sendkey=')
b = get_data[a:len(get_data)]
print(b)
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
sendnotifyremote(config['Oppo_IP'])
result = check_socket(config)
if b == 'PON':
if result == 0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('EJT', config)
if config['BRDisc'] == True:
time.sleep(1)
response_data_on = sendremotekey('EJT', config)
time.sleep(1)
response_data6b = getsetupmenu(config)
else:
response_data_on = sendremotekey(b, config)
self.send_response(200)
self.send_header('Content-type', 'text')
self.end_headers()
a = 'ok'
self.wfile.write(bytes(a, 'utf-8'))
return 0
if self.path == '/log.txt':
self.send_response(200)
self.send_header('Content-type', 'text')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = leer_img(cwd + separador + 'emby_xnoppo_client_logging.log')
self.wfile.write(bytes(a))
return 0
else:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(
'<html><head><title>https://pythonbasics.org</title></head>',
'utf-8'))
self.wfile.write(bytes('<p>Request: %s</p>' % self.path, 'utf-8'))
self.wfile.write(bytes('<body>', 'utf-8'))
self.wfile.write(bytes('<p>This is an example web server.</p>',
'utf-8'))
self.wfile.write(bytes('</body></html>', 'utf-8'))
def do_POST(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
resource_path = (cwd + separador + 'web' + separador + 'resources' +
separador)
html_path = cwd + separador + 'web' + separador
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
lib_path = cwd + separador + 'lib' + separador
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/save_config':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
if self.path == '/check_emby':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = test_emby(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
status = get_state()
if status['Playstate'] == 'Not_Connected':
save_config(cwd + separador + 'config.json', config)
emby_wsocket.ws_config = config
restart()
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/check_oppo':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = test_oppo(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/test_path':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
server = json.loads(post_data.decode('utf-8'))
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = test_path(config, server)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(server))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(server), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/navigate_path':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
path_obj = json.loads(post_data.decode('utf-8'))
path = path_obj['path']
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = navigate_folder(path, config)
a_json = json.dumps(a)
print(len(a_json))
self.send_response(200)
self.send_header('Content-Length', len(a_json))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/move_tv':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
move_files(tv_path + config['TV_model'], lib_path)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
restart()
return 0
if self.path == '/move_av':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
move_files(av_path + config['AV_model'], lib_path)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
restart()
return 0
if self.path == '/get_tv_key':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_tv_key(config)
if a == 'OK':
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_conn':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_test_conn(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/get_tv_sources':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_tv_sources(config)
if a == 'OK':
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/get_av_sources':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_hdmi_list(config)
if a != None:
config['AV_SOURCES'] = a
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_init':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_end':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_set_prev(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_on':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_check_power(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_off':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_power_off(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_hdmi':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
<mask token>
| <mask token>
def get_version():
return '2.01'
def thread_function(ws_object):
print('Thread: starting')
ws_object.start()
print('Thread: finishing')
def restart():
print('restart')
try:
emby_wsocket.stop()
except:
sys.exit()
sys.exit()
print('fin restart')
def save_config(config_file, config):
with open(config_file, 'w') as fw:
json.dump(config, fw, indent=4)
fw.close
try:
emby_wsocket.ws_config = config
emby_wsocket.EmbySession.config = config
except:
emby_wsocket.ws_config = config
def get_state():
status = {}
status['Version'] = get_version()
try:
status['Playstate'] = emby_wsocket.EmbySession.playstate
status['playedtitle'] = emby_wsocket.EmbySession.playedtitle
status['server'] = emby_wsocket.EmbySession.server
status['folder'] = emby_wsocket.EmbySession.folder
status['filename'] = emby_wsocket.EmbySession.filename
status['CurrentData'] = emby_wsocket.EmbySession.currentdata
except:
status['Playstate'] = 'Not_Connected'
status['playedtitle'] = ''
status['server'] = ''
status['folder'] = ''
status['filename'] = ''
status['CurrentData'] = ''
status['cpu_perc'] = psutil.cpu_percent()
status['mem_perc'] = psutil.virtual_memory().percent
print(psutil.virtual_memory().percent)
print(status)
return status
def cargar_config(config_file, tv_path, av_path, lang_path):
with open(config_file, 'r') as f:
config = json.load(f)
f.close
config['Version'] = get_version()
default = config.get('Autoscript', False)
config['Autoscript'] = default
default = config.get('enable_all_libraries', False)
config['enable_all_libraries'] = default
default = config.get('TV_model', '')
config['TV_model'] = default
default = config.get('TV_SOURCES', [])
config['TV_SOURCES'] = default
default = config.get('AV_model', '')
config['AV_model'] = default
default = config.get('AV_SOURCES', [])
config['AV_SOURCES'] = default
default = config.get('TV_script_init', '')
config['TV_script_init'] = default
default = config.get('TV_script_end', '')
config['TV_script_end'] = default
default = config.get('av_delay_hdmi', 0)
config['av_delay_hdmi'] = default
default = config.get('AV_Port', 23)
config['AV_Port'] = default
default = config.get('timeout_oppo_mount', 60)
config['timeout_oppo_mount'] = default
default = config.get('language', 'es-ES')
config['language'] = default
default = config.get('default_nfs', False)
config['default_nfs'] = default
default = config.get('wait_nfs', False)
config['wait_nfs'] = default
default = config.get('refresh_time', 5)
config['refresh_time'] = default
default = config.get('check_beta', False)
config['check_beta'] = default
default = config.get('smbtrick', False)
config['smbtrick'] = default
default = config.get('BRDisc', False)
config['BRDisc'] = default
edit_server = 0
server_list = config['servers']
for server in server_list:
default = server.get('Test_OK', False)
server_list[edit_server]['Test_OK'] = default
edit_server = edit_server + 1
if config['TV'] == 'True':
config['TV'] = True
if config['TV'] == 'False':
config['TV'] = False
if config['AV'] == 'True':
config['AV'] = True
if config['AV'] == 'False':
config['AV'] = False
config['servers'] = server_list
config['tv_dirs'] = get_dir_folders(tv_path)
config['av_dirs'] = get_dir_folders(av_path)
config['langs'] = get_dir_folders(lang_path)
return config
def check_version(config):
url = (
'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'
)
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
print(config['check_beta'])
if config['check_beta'] == True:
last_version = version['beta_version']
last_version_file = version['beta_version_file']
else:
last_version = version['curr_version']
last_version_file = version['curr_version_file']
xno_version = get_version()
resp = {}
resp['version'] = last_version
resp['file'] = last_version_file
print(xno_version)
print(last_version)
if xno_version < last_version:
resp['new_version'] = True
else:
resp['new_version'] = False
print(resp)
return resp
def update_version(config, vers_path, cwd):
url = (
'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'
)
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
if config['check_beta'] == True:
last_version = version['beta_version']
last_version_file = version['beta_version_file']
else:
last_version = version['curr_version']
last_version_file = version['curr_version_file']
url2 = ('https://github.com/siberian-git/Xnoppo/raw/main/versions/' +
last_version_file)
headers = {}
response2 = requests.get(url2, headers=headers)
filename = vers_path + last_version_file
with open(filename, 'wb') as f:
f.write(response2.content)
f.close()
shutil.unpack_archive(filename, cwd)
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
if config['TV'] == True and config['TV_model'] != '':
move_files(tv_path + config['TV_model'], lib_path)
if config['AV'] == True and config['AV_model'] != '':
move_files(av_path + config['AV_model'], lib_path)
resp = {}
resp['version'] = last_version
resp['file'] = last_version_file
resp['new_version'] = False
return resp
def cargar_lang(config_file):
with open(config_file.encode(sys.getfilesystemencoding()), 'r',
encoding='latin-1') as f:
config = json.load(f)
f.close
return config
def leer_file(web_file):
with open(web_file, 'r', encoding='utf8') as f:
num = f.read()
f.close
return num
def leer_img(web_file):
with open(web_file, 'rb') as f:
num = f.read()
f.close
return num
def test_path(config, server):
rutas = get_mount_path(server['Emby_Path'] + '/test.mkv', server)
result2 = test_mount_path(config, rutas['Servidor'], rutas['Carpeta'])
return result2
def get_mount_path(movie, server_data):
movie = movie.replace(server_data['Emby_Path'], server_data['Oppo_Path'])
movie = movie.replace('\\\\', '\\')
movie = movie.replace('\\', '/')
word = '/'
inicio = movie.find(word)
inicio = inicio + 1
final = movie.find(word, inicio, len(movie))
servidor = movie[inicio:final]
ultimo = final + 1
result = final + 1
while result > 0:
ultimo = result + 1
result = movie.find(word, ultimo, len(movie))
fichero = movie[ultimo:len(movie)]
final = final + 1
ultimo = ultimo - 1
carpeta = movie[final:ultimo]
resultado = {}
resultado['Servidor'] = servidor
resultado['Carpeta'] = carpeta
resultado['Fichero'] = fichero
return resultado
def test_mount_path(config, servidor, carpeta):
sendnotifyremote(config['Oppo_IP'])
result = check_socket(config)
if result == 0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('EJT', config)
time.sleep(1)
response_data6b = getsetupmenu(config)
while response_data6f.find('devicelist":[]') > 0:
time.sleep(1)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('QPW', config)
device_list = json.loads(response_data6f)
if config['DebugLevel'] > 0:
print(device_list)
nfs = config['default_nfs']
for device in device_list['devicelist']:
if device['name'].upper() == servidor.upper():
if device['sub_type'] == 'nfs':
nfs = True
break
else:
nfs = False
break
if nfs:
response_login = LoginNFS(config, servidor)
else:
response_login = LoginSambaWithOutID(config, servidor)
if config['Always_ON'] == False:
time.sleep(5)
response_data6b = getsetupmenu(config)
if nfs:
response_mount = mountSharedNFSFolder(servidor, carpeta, '', '',
config)
else:
response_mount = mountSharedFolder(servidor, carpeta, '', '',
config)
response = json.loads(response_mount)
if config['Autoscript'] == True:
result = umountSharedFolder(config)
if response['success'] == True:
a = 'OK'
else:
a = 'FAILURE'
return a
else:
print(
'No se puede conectar, revisa las configuraciones o que el OPPO este encendido o en reposo'
)
def test_emby(config):
try:
EmbySession = EmbyHttp(config)
user_info = EmbySession.user_info
if user_info['SessionInfo']['Id'] != '':
return 'OK'
else:
return 'FAILED'
except:
return 'FAILED'
def test_oppo(config):
result = check_socket(config)
if result == 0:
return 'OK'
else:
return 'FAILED'
def carga_libraries(config):
try:
EmbySession = EmbyHttp(config)
views_list = EmbySession.get_user_views(EmbySession.user_info[
'User']['Id'])
libraries = []
for view in views_list:
library = {}
library['Name'] = view['Name']
library['Id'] = view['Id']
library['Active'] = False
try:
lib_list = config['Libraries']
except:
lib_list = {}
for lib in lib_list:
if lib['Id'] == view['Id']:
library['Active'] = lib['Active']
libraries.append(library)
config['Libraries'] = libraries
return 0
except:
return 1
def is_library_active(config, libraryname):
for library in config['Libraries']:
if library['Name'] == libraryname:
return library['Active']
return False
def get_selectableFolders(config):
EmbySession = EmbyHttp(config)
MediaFolders = EmbySession.get_emby_selectablefolders()
servers = []
for Folder in MediaFolders:
index = 1
active = is_library_active(config, Folder['Name'])
if config['enable_all_libraries'] == True:
active = True
if active == True:
for SubFolder in Folder['SubFolders']:
server = {}
server['Id'] = SubFolder['Id']
if index > 1:
server['name'] = Folder['Name'] + '(' + str(index) + ')'
else:
server['name'] = Folder['Name']
server['Emby_Path'] = SubFolder['Path']
server['Oppo_Path'] = '/'
try:
serv_list = config['servers']
except:
serv_list = {}
for serv in serv_list:
if server['Emby_Path'] == serv['Emby_Path']:
server['name'] = serv['name']
server['Oppo_Path'] = serv['Oppo_Path']
server['Test_OK'] = serv['Test_OK']
servers.append(server)
index = index + 1
config['servers'] = servers
def get_dir_folders(directory):
os.chdir(directory)
dirs = os.listdir('.')
encontrado = False
list_dir = []
for x in dirs:
if os.path.isdir(x):
list_dir.append(x)
return list_dir
<mask token>
def get_devices(config):
try:
EmbySession = EmbyHttp(config)
devices = EmbySession.get_emby_devices()
index = 0
dev_temp = []
for device in devices['Items']:
try:
if device['Id'] != 'Xnoppo':
device['Name'] = device['Name'] + ' / ' + device['AppName']
device['Id'] = device['ReportedDeviceId']
dev_temp.append(device)
except:
pass
config['devices'] = dev_temp
return 'OK'
except:
return 'FAILURE'
class MyServer(BaseHTTPRequestHandler):
def do_GET(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
resource_path = (cwd + separador + 'web' + separador + 'resources' +
separador)
html_path = cwd + separador + 'web' + separador
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/emby_conf.html':
i = leer_file(html_path + 'emby_conf.html')
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/oppo_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'oppo_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/lib_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'lib_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/path_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'path_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/tv_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'tv_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/av_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'av_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/other_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'other_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/status.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'status.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/help.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'help.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/remote.html':
i = leer_file(html_path + 'remote.html')
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/android-chrome-36x36.png':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'android-chrome-36x36.png')
self.wfile.write(bytes(i))
return 0
if self.path == '/av-receiver-icon-2.jpg':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'av-receiver-icon-2.jpg')
self.wfile.write(bytes(i))
return 0
if self.path == '/dragon.png':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'dragon.png')
self.wfile.write(bytes(i))
return 0
if self.path == '/xnoppo_config':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/xnoppo_config_lib':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
carga_libraries(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/xnoppo_config_dev':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
get_devices(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/check_version':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = check_version(config)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/update_version':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = update_version(config, vers_path, cwd)
restart()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/get_state':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = get_state()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/restart':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
a = 'Restarting'
self.wfile.write(bytes(a, 'utf-8'))
restart()
if self.path == '/refresh_paths':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
get_selectableFolders(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/lang':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = cargar_lang(lang_path + config['language'] + separador +
'lang.js')
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path.find('/send_key?') >= 0:
get_data = self.path
print(get_data)
a = len('/send_key?sendkey=')
b = get_data[a:len(get_data)]
print(b)
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
sendnotifyremote(config['Oppo_IP'])
result = check_socket(config)
if b == 'PON':
if result == 0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('EJT', config)
if config['BRDisc'] == True:
time.sleep(1)
response_data_on = sendremotekey('EJT', config)
time.sleep(1)
response_data6b = getsetupmenu(config)
else:
response_data_on = sendremotekey(b, config)
self.send_response(200)
self.send_header('Content-type', 'text')
self.end_headers()
a = 'ok'
self.wfile.write(bytes(a, 'utf-8'))
return 0
if self.path == '/log.txt':
self.send_response(200)
self.send_header('Content-type', 'text')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = leer_img(cwd + separador + 'emby_xnoppo_client_logging.log')
self.wfile.write(bytes(a))
return 0
else:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(
'<html><head><title>https://pythonbasics.org</title></head>',
'utf-8'))
self.wfile.write(bytes('<p>Request: %s</p>' % self.path, 'utf-8'))
self.wfile.write(bytes('<body>', 'utf-8'))
self.wfile.write(bytes('<p>This is an example web server.</p>',
'utf-8'))
self.wfile.write(bytes('</body></html>', 'utf-8'))
def do_POST(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
resource_path = (cwd + separador + 'web' + separador + 'resources' +
separador)
html_path = cwd + separador + 'web' + separador
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
lib_path = cwd + separador + 'lib' + separador
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/save_config':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
if self.path == '/check_emby':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = test_emby(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
status = get_state()
if status['Playstate'] == 'Not_Connected':
save_config(cwd + separador + 'config.json', config)
emby_wsocket.ws_config = config
restart()
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/check_oppo':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = test_oppo(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/test_path':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
server = json.loads(post_data.decode('utf-8'))
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = test_path(config, server)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(server))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(server), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/navigate_path':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
path_obj = json.loads(post_data.decode('utf-8'))
path = path_obj['path']
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = navigate_folder(path, config)
a_json = json.dumps(a)
print(len(a_json))
self.send_response(200)
self.send_header('Content-Length', len(a_json))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/move_tv':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
move_files(tv_path + config['TV_model'], lib_path)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
restart()
return 0
if self.path == '/move_av':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
move_files(av_path + config['AV_model'], lib_path)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
restart()
return 0
if self.path == '/get_tv_key':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_tv_key(config)
if a == 'OK':
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_conn':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_test_conn(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/get_tv_sources':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_tv_sources(config)
if a == 'OK':
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/get_av_sources':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_hdmi_list(config)
if a != None:
config['AV_SOURCES'] = a
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_init':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_end':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_set_prev(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_on':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_check_power(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_off':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_power_off(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_hdmi':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
<mask token>
| <mask token>
def get_version():
return '2.01'
def thread_function(ws_object):
print('Thread: starting')
ws_object.start()
print('Thread: finishing')
def restart():
print('restart')
try:
emby_wsocket.stop()
except:
sys.exit()
sys.exit()
print('fin restart')
def save_config(config_file, config):
with open(config_file, 'w') as fw:
json.dump(config, fw, indent=4)
fw.close
try:
emby_wsocket.ws_config = config
emby_wsocket.EmbySession.config = config
except:
emby_wsocket.ws_config = config
def get_state():
status = {}
status['Version'] = get_version()
try:
status['Playstate'] = emby_wsocket.EmbySession.playstate
status['playedtitle'] = emby_wsocket.EmbySession.playedtitle
status['server'] = emby_wsocket.EmbySession.server
status['folder'] = emby_wsocket.EmbySession.folder
status['filename'] = emby_wsocket.EmbySession.filename
status['CurrentData'] = emby_wsocket.EmbySession.currentdata
except:
status['Playstate'] = 'Not_Connected'
status['playedtitle'] = ''
status['server'] = ''
status['folder'] = ''
status['filename'] = ''
status['CurrentData'] = ''
status['cpu_perc'] = psutil.cpu_percent()
status['mem_perc'] = psutil.virtual_memory().percent
print(psutil.virtual_memory().percent)
print(status)
return status
def cargar_config(config_file, tv_path, av_path, lang_path):
with open(config_file, 'r') as f:
config = json.load(f)
f.close
config['Version'] = get_version()
default = config.get('Autoscript', False)
config['Autoscript'] = default
default = config.get('enable_all_libraries', False)
config['enable_all_libraries'] = default
default = config.get('TV_model', '')
config['TV_model'] = default
default = config.get('TV_SOURCES', [])
config['TV_SOURCES'] = default
default = config.get('AV_model', '')
config['AV_model'] = default
default = config.get('AV_SOURCES', [])
config['AV_SOURCES'] = default
default = config.get('TV_script_init', '')
config['TV_script_init'] = default
default = config.get('TV_script_end', '')
config['TV_script_end'] = default
default = config.get('av_delay_hdmi', 0)
config['av_delay_hdmi'] = default
default = config.get('AV_Port', 23)
config['AV_Port'] = default
default = config.get('timeout_oppo_mount', 60)
config['timeout_oppo_mount'] = default
default = config.get('language', 'es-ES')
config['language'] = default
default = config.get('default_nfs', False)
config['default_nfs'] = default
default = config.get('wait_nfs', False)
config['wait_nfs'] = default
default = config.get('refresh_time', 5)
config['refresh_time'] = default
default = config.get('check_beta', False)
config['check_beta'] = default
default = config.get('smbtrick', False)
config['smbtrick'] = default
default = config.get('BRDisc', False)
config['BRDisc'] = default
edit_server = 0
server_list = config['servers']
for server in server_list:
default = server.get('Test_OK', False)
server_list[edit_server]['Test_OK'] = default
edit_server = edit_server + 1
if config['TV'] == 'True':
config['TV'] = True
if config['TV'] == 'False':
config['TV'] = False
if config['AV'] == 'True':
config['AV'] = True
if config['AV'] == 'False':
config['AV'] = False
config['servers'] = server_list
config['tv_dirs'] = get_dir_folders(tv_path)
config['av_dirs'] = get_dir_folders(av_path)
config['langs'] = get_dir_folders(lang_path)
return config
def check_version(config):
url = (
'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'
)
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
print(config['check_beta'])
if config['check_beta'] == True:
last_version = version['beta_version']
last_version_file = version['beta_version_file']
else:
last_version = version['curr_version']
last_version_file = version['curr_version_file']
xno_version = get_version()
resp = {}
resp['version'] = last_version
resp['file'] = last_version_file
print(xno_version)
print(last_version)
if xno_version < last_version:
resp['new_version'] = True
else:
resp['new_version'] = False
print(resp)
return resp
def update_version(config, vers_path, cwd):
url = (
'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'
)
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
if config['check_beta'] == True:
last_version = version['beta_version']
last_version_file = version['beta_version_file']
else:
last_version = version['curr_version']
last_version_file = version['curr_version_file']
url2 = ('https://github.com/siberian-git/Xnoppo/raw/main/versions/' +
last_version_file)
headers = {}
response2 = requests.get(url2, headers=headers)
filename = vers_path + last_version_file
with open(filename, 'wb') as f:
f.write(response2.content)
f.close()
shutil.unpack_archive(filename, cwd)
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
if config['TV'] == True and config['TV_model'] != '':
move_files(tv_path + config['TV_model'], lib_path)
if config['AV'] == True and config['AV_model'] != '':
move_files(av_path + config['AV_model'], lib_path)
resp = {}
resp['version'] = last_version
resp['file'] = last_version_file
resp['new_version'] = False
return resp
def cargar_lang(config_file):
with open(config_file.encode(sys.getfilesystemencoding()), 'r',
encoding='latin-1') as f:
config = json.load(f)
f.close
return config
def leer_file(web_file):
with open(web_file, 'r', encoding='utf8') as f:
num = f.read()
f.close
return num
def leer_img(web_file):
with open(web_file, 'rb') as f:
num = f.read()
f.close
return num
def test_path(config, server):
rutas = get_mount_path(server['Emby_Path'] + '/test.mkv', server)
result2 = test_mount_path(config, rutas['Servidor'], rutas['Carpeta'])
return result2
def get_mount_path(movie, server_data):
movie = movie.replace(server_data['Emby_Path'], server_data['Oppo_Path'])
movie = movie.replace('\\\\', '\\')
movie = movie.replace('\\', '/')
word = '/'
inicio = movie.find(word)
inicio = inicio + 1
final = movie.find(word, inicio, len(movie))
servidor = movie[inicio:final]
ultimo = final + 1
result = final + 1
while result > 0:
ultimo = result + 1
result = movie.find(word, ultimo, len(movie))
fichero = movie[ultimo:len(movie)]
final = final + 1
ultimo = ultimo - 1
carpeta = movie[final:ultimo]
resultado = {}
resultado['Servidor'] = servidor
resultado['Carpeta'] = carpeta
resultado['Fichero'] = fichero
return resultado
def test_mount_path(config, servidor, carpeta):
sendnotifyremote(config['Oppo_IP'])
result = check_socket(config)
if result == 0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('EJT', config)
time.sleep(1)
response_data6b = getsetupmenu(config)
while response_data6f.find('devicelist":[]') > 0:
time.sleep(1)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('QPW', config)
device_list = json.loads(response_data6f)
if config['DebugLevel'] > 0:
print(device_list)
nfs = config['default_nfs']
for device in device_list['devicelist']:
if device['name'].upper() == servidor.upper():
if device['sub_type'] == 'nfs':
nfs = True
break
else:
nfs = False
break
if nfs:
response_login = LoginNFS(config, servidor)
else:
response_login = LoginSambaWithOutID(config, servidor)
if config['Always_ON'] == False:
time.sleep(5)
response_data6b = getsetupmenu(config)
if nfs:
response_mount = mountSharedNFSFolder(servidor, carpeta, '', '',
config)
else:
response_mount = mountSharedFolder(servidor, carpeta, '', '',
config)
response = json.loads(response_mount)
if config['Autoscript'] == True:
result = umountSharedFolder(config)
if response['success'] == True:
a = 'OK'
else:
a = 'FAILURE'
return a
else:
print(
'No se puede conectar, revisa las configuraciones o que el OPPO este encendido o en reposo'
)
def test_emby(config):
try:
EmbySession = EmbyHttp(config)
user_info = EmbySession.user_info
if user_info['SessionInfo']['Id'] != '':
return 'OK'
else:
return 'FAILED'
except:
return 'FAILED'
def test_oppo(config):
result = check_socket(config)
if result == 0:
return 'OK'
else:
return 'FAILED'
def carga_libraries(config):
try:
EmbySession = EmbyHttp(config)
views_list = EmbySession.get_user_views(EmbySession.user_info[
'User']['Id'])
libraries = []
for view in views_list:
library = {}
library['Name'] = view['Name']
library['Id'] = view['Id']
library['Active'] = False
try:
lib_list = config['Libraries']
except:
lib_list = {}
for lib in lib_list:
if lib['Id'] == view['Id']:
library['Active'] = lib['Active']
libraries.append(library)
config['Libraries'] = libraries
return 0
except:
return 1
def is_library_active(config, libraryname):
for library in config['Libraries']:
if library['Name'] == libraryname:
return library['Active']
return False
def get_selectableFolders(config):
EmbySession = EmbyHttp(config)
MediaFolders = EmbySession.get_emby_selectablefolders()
servers = []
for Folder in MediaFolders:
index = 1
active = is_library_active(config, Folder['Name'])
if config['enable_all_libraries'] == True:
active = True
if active == True:
for SubFolder in Folder['SubFolders']:
server = {}
server['Id'] = SubFolder['Id']
if index > 1:
server['name'] = Folder['Name'] + '(' + str(index) + ')'
else:
server['name'] = Folder['Name']
server['Emby_Path'] = SubFolder['Path']
server['Oppo_Path'] = '/'
try:
serv_list = config['servers']
except:
serv_list = {}
for serv in serv_list:
if server['Emby_Path'] == serv['Emby_Path']:
server['name'] = serv['name']
server['Oppo_Path'] = serv['Oppo_Path']
server['Test_OK'] = serv['Test_OK']
servers.append(server)
index = index + 1
config['servers'] = servers
def get_dir_folders(directory):
os.chdir(directory)
dirs = os.listdir('.')
encontrado = False
list_dir = []
for x in dirs:
if os.path.isdir(x):
list_dir.append(x)
return list_dir
def move_files(src, dest):
os.chdir(src)
src_files = os.listdir('.')
for file_name in src_files:
full_file_name = os.path.join(src, file_name)
if os.path.isfile(full_file_name):
shutil.copy(full_file_name, dest)
return 0
def get_devices(config):
try:
EmbySession = EmbyHttp(config)
devices = EmbySession.get_emby_devices()
index = 0
dev_temp = []
for device in devices['Items']:
try:
if device['Id'] != 'Xnoppo':
device['Name'] = device['Name'] + ' / ' + device['AppName']
device['Id'] = device['ReportedDeviceId']
dev_temp.append(device)
except:
pass
config['devices'] = dev_temp
return 'OK'
except:
return 'FAILURE'
class MyServer(BaseHTTPRequestHandler):
def do_GET(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
resource_path = (cwd + separador + 'web' + separador + 'resources' +
separador)
html_path = cwd + separador + 'web' + separador
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/emby_conf.html':
i = leer_file(html_path + 'emby_conf.html')
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/oppo_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'oppo_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/lib_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'lib_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/path_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'path_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/tv_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'tv_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/av_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'av_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/other_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'other_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/status.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'status.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/help.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'help.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/remote.html':
i = leer_file(html_path + 'remote.html')
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/android-chrome-36x36.png':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'android-chrome-36x36.png')
self.wfile.write(bytes(i))
return 0
if self.path == '/av-receiver-icon-2.jpg':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'av-receiver-icon-2.jpg')
self.wfile.write(bytes(i))
return 0
if self.path == '/dragon.png':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'dragon.png')
self.wfile.write(bytes(i))
return 0
if self.path == '/xnoppo_config':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/xnoppo_config_lib':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
carga_libraries(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/xnoppo_config_dev':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
get_devices(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/check_version':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = check_version(config)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/update_version':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = update_version(config, vers_path, cwd)
restart()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/get_state':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = get_state()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/restart':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
a = 'Restarting'
self.wfile.write(bytes(a, 'utf-8'))
restart()
if self.path == '/refresh_paths':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
get_selectableFolders(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/lang':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = cargar_lang(lang_path + config['language'] + separador +
'lang.js')
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path.find('/send_key?') >= 0:
get_data = self.path
print(get_data)
a = len('/send_key?sendkey=')
b = get_data[a:len(get_data)]
print(b)
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
sendnotifyremote(config['Oppo_IP'])
result = check_socket(config)
if b == 'PON':
if result == 0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('EJT', config)
if config['BRDisc'] == True:
time.sleep(1)
response_data_on = sendremotekey('EJT', config)
time.sleep(1)
response_data6b = getsetupmenu(config)
else:
response_data_on = sendremotekey(b, config)
self.send_response(200)
self.send_header('Content-type', 'text')
self.end_headers()
a = 'ok'
self.wfile.write(bytes(a, 'utf-8'))
return 0
if self.path == '/log.txt':
self.send_response(200)
self.send_header('Content-type', 'text')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = leer_img(cwd + separador + 'emby_xnoppo_client_logging.log')
self.wfile.write(bytes(a))
return 0
else:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(
'<html><head><title>https://pythonbasics.org</title></head>',
'utf-8'))
self.wfile.write(bytes('<p>Request: %s</p>' % self.path, 'utf-8'))
self.wfile.write(bytes('<body>', 'utf-8'))
self.wfile.write(bytes('<p>This is an example web server.</p>',
'utf-8'))
self.wfile.write(bytes('</body></html>', 'utf-8'))
def do_POST(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
resource_path = (cwd + separador + 'web' + separador + 'resources' +
separador)
html_path = cwd + separador + 'web' + separador
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
lib_path = cwd + separador + 'lib' + separador
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/save_config':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
if self.path == '/check_emby':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = test_emby(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
status = get_state()
if status['Playstate'] == 'Not_Connected':
save_config(cwd + separador + 'config.json', config)
emby_wsocket.ws_config = config
restart()
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/check_oppo':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = test_oppo(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/test_path':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
server = json.loads(post_data.decode('utf-8'))
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = test_path(config, server)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(server))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(server), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/navigate_path':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
path_obj = json.loads(post_data.decode('utf-8'))
path = path_obj['path']
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = navigate_folder(path, config)
a_json = json.dumps(a)
print(len(a_json))
self.send_response(200)
self.send_header('Content-Length', len(a_json))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/move_tv':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
move_files(tv_path + config['TV_model'], lib_path)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
restart()
return 0
if self.path == '/move_av':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
move_files(av_path + config['AV_model'], lib_path)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
restart()
return 0
if self.path == '/get_tv_key':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_tv_key(config)
if a == 'OK':
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_conn':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_test_conn(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/get_tv_sources':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_tv_sources(config)
if a == 'OK':
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/get_av_sources':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_hdmi_list(config)
if a != None:
config['AV_SOURCES'] = a
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_init':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_end':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_set_prev(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_on':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_check_power(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_off':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_power_off(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_hdmi':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
<mask token>
| import http.server
import socketserver
from http.server import BaseHTTPRequestHandler, HTTPServer
import time
import json
import io
import urllib
import requests
from lib.Emby_ws import xnoppo_ws
from lib.Emby_http import *
from lib.Xnoppo import *
from lib.Xnoppo_TV import *
import lib.Xnoppo_AVR
import shutil
import asyncio
import threading
import logging
import logging.handlers
import psutil
def get_version():
return '2.01'
def thread_function(ws_object):
print('Thread: starting')
ws_object.start()
print('Thread: finishing')
def restart():
print('restart')
try:
emby_wsocket.stop()
except:
sys.exit()
sys.exit()
print('fin restart')
def save_config(config_file, config):
with open(config_file, 'w') as fw:
json.dump(config, fw, indent=4)
fw.close
try:
emby_wsocket.ws_config = config
emby_wsocket.EmbySession.config = config
except:
emby_wsocket.ws_config = config
def get_state():
status = {}
status['Version'] = get_version()
try:
status['Playstate'] = emby_wsocket.EmbySession.playstate
status['playedtitle'] = emby_wsocket.EmbySession.playedtitle
status['server'] = emby_wsocket.EmbySession.server
status['folder'] = emby_wsocket.EmbySession.folder
status['filename'] = emby_wsocket.EmbySession.filename
status['CurrentData'] = emby_wsocket.EmbySession.currentdata
except:
status['Playstate'] = 'Not_Connected'
status['playedtitle'] = ''
status['server'] = ''
status['folder'] = ''
status['filename'] = ''
status['CurrentData'] = ''
status['cpu_perc'] = psutil.cpu_percent()
status['mem_perc'] = psutil.virtual_memory().percent
print(psutil.virtual_memory().percent)
print(status)
return status
def cargar_config(config_file, tv_path, av_path, lang_path):
with open(config_file, 'r') as f:
config = json.load(f)
f.close
config['Version'] = get_version()
default = config.get('Autoscript', False)
config['Autoscript'] = default
default = config.get('enable_all_libraries', False)
config['enable_all_libraries'] = default
default = config.get('TV_model', '')
config['TV_model'] = default
default = config.get('TV_SOURCES', [])
config['TV_SOURCES'] = default
default = config.get('AV_model', '')
config['AV_model'] = default
default = config.get('AV_SOURCES', [])
config['AV_SOURCES'] = default
default = config.get('TV_script_init', '')
config['TV_script_init'] = default
default = config.get('TV_script_end', '')
config['TV_script_end'] = default
default = config.get('av_delay_hdmi', 0)
config['av_delay_hdmi'] = default
default = config.get('AV_Port', 23)
config['AV_Port'] = default
default = config.get('timeout_oppo_mount', 60)
config['timeout_oppo_mount'] = default
default = config.get('language', 'es-ES')
config['language'] = default
default = config.get('default_nfs', False)
config['default_nfs'] = default
default = config.get('wait_nfs', False)
config['wait_nfs'] = default
default = config.get('refresh_time', 5)
config['refresh_time'] = default
default = config.get('check_beta', False)
config['check_beta'] = default
default = config.get('smbtrick', False)
config['smbtrick'] = default
default = config.get('BRDisc', False)
config['BRDisc'] = default
edit_server = 0
server_list = config['servers']
for server in server_list:
default = server.get('Test_OK', False)
server_list[edit_server]['Test_OK'] = default
edit_server = edit_server + 1
if config['TV'] == 'True':
config['TV'] = True
if config['TV'] == 'False':
config['TV'] = False
if config['AV'] == 'True':
config['AV'] = True
if config['AV'] == 'False':
config['AV'] = False
config['servers'] = server_list
config['tv_dirs'] = get_dir_folders(tv_path)
config['av_dirs'] = get_dir_folders(av_path)
config['langs'] = get_dir_folders(lang_path)
return config
def check_version(config):
url = (
'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'
)
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
print(config['check_beta'])
if config['check_beta'] == True:
last_version = version['beta_version']
last_version_file = version['beta_version_file']
else:
last_version = version['curr_version']
last_version_file = version['curr_version_file']
xno_version = get_version()
resp = {}
resp['version'] = last_version
resp['file'] = last_version_file
print(xno_version)
print(last_version)
if xno_version < last_version:
resp['new_version'] = True
else:
resp['new_version'] = False
print(resp)
return resp
def update_version(config, vers_path, cwd):
url = (
'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'
)
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
if config['check_beta'] == True:
last_version = version['beta_version']
last_version_file = version['beta_version_file']
else:
last_version = version['curr_version']
last_version_file = version['curr_version_file']
url2 = ('https://github.com/siberian-git/Xnoppo/raw/main/versions/' +
last_version_file)
headers = {}
response2 = requests.get(url2, headers=headers)
filename = vers_path + last_version_file
with open(filename, 'wb') as f:
f.write(response2.content)
f.close()
shutil.unpack_archive(filename, cwd)
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
if config['TV'] == True and config['TV_model'] != '':
move_files(tv_path + config['TV_model'], lib_path)
if config['AV'] == True and config['AV_model'] != '':
move_files(av_path + config['AV_model'], lib_path)
resp = {}
resp['version'] = last_version
resp['file'] = last_version_file
resp['new_version'] = False
return resp
def cargar_lang(config_file):
with open(config_file.encode(sys.getfilesystemencoding()), 'r',
encoding='latin-1') as f:
config = json.load(f)
f.close
return config
def leer_file(web_file):
with open(web_file, 'r', encoding='utf8') as f:
num = f.read()
f.close
return num
def leer_img(web_file):
with open(web_file, 'rb') as f:
num = f.read()
f.close
return num
def test_path(config, server):
rutas = get_mount_path(server['Emby_Path'] + '/test.mkv', server)
result2 = test_mount_path(config, rutas['Servidor'], rutas['Carpeta'])
return result2
def get_mount_path(movie, server_data):
movie = movie.replace(server_data['Emby_Path'], server_data['Oppo_Path'])
movie = movie.replace('\\\\', '\\')
movie = movie.replace('\\', '/')
word = '/'
inicio = movie.find(word)
inicio = inicio + 1
final = movie.find(word, inicio, len(movie))
servidor = movie[inicio:final]
ultimo = final + 1
result = final + 1
while result > 0:
ultimo = result + 1
result = movie.find(word, ultimo, len(movie))
fichero = movie[ultimo:len(movie)]
final = final + 1
ultimo = ultimo - 1
carpeta = movie[final:ultimo]
resultado = {}
resultado['Servidor'] = servidor
resultado['Carpeta'] = carpeta
resultado['Fichero'] = fichero
return resultado
def test_mount_path(config, servidor, carpeta):
sendnotifyremote(config['Oppo_IP'])
result = check_socket(config)
if result == 0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('EJT', config)
time.sleep(1)
response_data6b = getsetupmenu(config)
while response_data6f.find('devicelist":[]') > 0:
time.sleep(1)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('QPW', config)
device_list = json.loads(response_data6f)
if config['DebugLevel'] > 0:
print(device_list)
nfs = config['default_nfs']
for device in device_list['devicelist']:
if device['name'].upper() == servidor.upper():
if device['sub_type'] == 'nfs':
nfs = True
break
else:
nfs = False
break
if nfs:
response_login = LoginNFS(config, servidor)
else:
response_login = LoginSambaWithOutID(config, servidor)
if config['Always_ON'] == False:
time.sleep(5)
response_data6b = getsetupmenu(config)
if nfs:
response_mount = mountSharedNFSFolder(servidor, carpeta, '', '',
config)
else:
response_mount = mountSharedFolder(servidor, carpeta, '', '',
config)
response = json.loads(response_mount)
if config['Autoscript'] == True:
result = umountSharedFolder(config)
if response['success'] == True:
a = 'OK'
else:
a = 'FAILURE'
return a
else:
print(
'No se puede conectar, revisa las configuraciones o que el OPPO este encendido o en reposo'
)
def test_emby(config):
try:
EmbySession = EmbyHttp(config)
user_info = EmbySession.user_info
if user_info['SessionInfo']['Id'] != '':
return 'OK'
else:
return 'FAILED'
except:
return 'FAILED'
def test_oppo(config):
result = check_socket(config)
if result == 0:
return 'OK'
else:
return 'FAILED'
def carga_libraries(config):
try:
EmbySession = EmbyHttp(config)
views_list = EmbySession.get_user_views(EmbySession.user_info[
'User']['Id'])
libraries = []
for view in views_list:
library = {}
library['Name'] = view['Name']
library['Id'] = view['Id']
library['Active'] = False
try:
lib_list = config['Libraries']
except:
lib_list = {}
for lib in lib_list:
if lib['Id'] == view['Id']:
library['Active'] = lib['Active']
libraries.append(library)
config['Libraries'] = libraries
return 0
except:
return 1
def is_library_active(config, libraryname):
for library in config['Libraries']:
if library['Name'] == libraryname:
return library['Active']
return False
def get_selectableFolders(config):
EmbySession = EmbyHttp(config)
MediaFolders = EmbySession.get_emby_selectablefolders()
servers = []
for Folder in MediaFolders:
index = 1
active = is_library_active(config, Folder['Name'])
if config['enable_all_libraries'] == True:
active = True
if active == True:
for SubFolder in Folder['SubFolders']:
server = {}
server['Id'] = SubFolder['Id']
if index > 1:
server['name'] = Folder['Name'] + '(' + str(index) + ')'
else:
server['name'] = Folder['Name']
server['Emby_Path'] = SubFolder['Path']
server['Oppo_Path'] = '/'
try:
serv_list = config['servers']
except:
serv_list = {}
for serv in serv_list:
if server['Emby_Path'] == serv['Emby_Path']:
server['name'] = serv['name']
server['Oppo_Path'] = serv['Oppo_Path']
server['Test_OK'] = serv['Test_OK']
servers.append(server)
index = index + 1
config['servers'] = servers
def get_dir_folders(directory):
os.chdir(directory)
dirs = os.listdir('.')
encontrado = False
list_dir = []
for x in dirs:
if os.path.isdir(x):
list_dir.append(x)
return list_dir
def move_files(src, dest):
os.chdir(src)
src_files = os.listdir('.')
for file_name in src_files:
full_file_name = os.path.join(src, file_name)
if os.path.isfile(full_file_name):
shutil.copy(full_file_name, dest)
return 0
def get_devices(config):
try:
EmbySession = EmbyHttp(config)
devices = EmbySession.get_emby_devices()
index = 0
dev_temp = []
for device in devices['Items']:
try:
if device['Id'] != 'Xnoppo':
device['Name'] = device['Name'] + ' / ' + device['AppName']
device['Id'] = device['ReportedDeviceId']
dev_temp.append(device)
except:
pass
config['devices'] = dev_temp
return 'OK'
except:
return 'FAILURE'
class MyServer(BaseHTTPRequestHandler):
def do_GET(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
resource_path = (cwd + separador + 'web' + separador + 'resources' +
separador)
html_path = cwd + separador + 'web' + separador
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/emby_conf.html':
i = leer_file(html_path + 'emby_conf.html')
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/oppo_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'oppo_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/lib_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'lib_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/path_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'path_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/tv_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'tv_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/av_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'av_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/other_conf.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'other_conf.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/status.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'status.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/help.html':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_file(html_path + 'help.html')
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/remote.html':
i = leer_file(html_path + 'remote.html')
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(i, 'utf-8'))
return 0
if self.path == '/android-chrome-36x36.png':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'android-chrome-36x36.png')
self.wfile.write(bytes(i))
return 0
if self.path == '/av-receiver-icon-2.jpg':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'av-receiver-icon-2.jpg')
self.wfile.write(bytes(i))
return 0
if self.path == '/dragon.png':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
i = leer_img(resource_path + 'dragon.png')
self.wfile.write(bytes(i))
return 0
if self.path == '/xnoppo_config':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/xnoppo_config_lib':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
carga_libraries(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/xnoppo_config_dev':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
get_devices(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/check_version':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = check_version(config)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/update_version':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = update_version(config, vers_path, cwd)
restart()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/get_state':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = get_state()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/restart':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
a = 'Restarting'
self.wfile.write(bytes(a, 'utf-8'))
restart()
if self.path == '/refresh_paths':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
a = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
get_selectableFolders(a)
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/lang':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = cargar_lang(lang_path + config['language'] + separador +
'lang.js')
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path.find('/send_key?') >= 0:
get_data = self.path
print(get_data)
a = len('/send_key?sendkey=')
b = get_data[a:len(get_data)]
print(b)
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
sendnotifyremote(config['Oppo_IP'])
result = check_socket(config)
if b == 'PON':
if result == 0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey('EJT', config)
if config['BRDisc'] == True:
time.sleep(1)
response_data_on = sendremotekey('EJT', config)
time.sleep(1)
response_data6b = getsetupmenu(config)
else:
response_data_on = sendremotekey(b, config)
self.send_response(200)
self.send_header('Content-type', 'text')
self.end_headers()
a = 'ok'
self.wfile.write(bytes(a, 'utf-8'))
return 0
if self.path == '/log.txt':
self.send_response(200)
self.send_header('Content-type', 'text')
self.end_headers()
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = leer_img(cwd + separador + 'emby_xnoppo_client_logging.log')
self.wfile.write(bytes(a))
return 0
else:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(
'<html><head><title>https://pythonbasics.org</title></head>',
'utf-8'))
self.wfile.write(bytes('<p>Request: %s</p>' % self.path, 'utf-8'))
self.wfile.write(bytes('<body>', 'utf-8'))
self.wfile.write(bytes('<p>This is an example web server.</p>',
'utf-8'))
self.wfile.write(bytes('</body></html>', 'utf-8'))
def do_POST(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
resource_path = (cwd + separador + 'web' + separador + 'resources' +
separador)
html_path = cwd + separador + 'web' + separador
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
lib_path = cwd + separador + 'lib' + separador
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/save_config':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
if self.path == '/check_emby':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = test_emby(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
status = get_state()
if status['Playstate'] == 'Not_Connected':
save_config(cwd + separador + 'config.json', config)
emby_wsocket.ws_config = config
restart()
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/check_oppo':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = test_oppo(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/test_path':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
server = json.loads(post_data.decode('utf-8'))
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = test_path(config, server)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(server))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(server), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/navigate_path':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
path_obj = json.loads(post_data.decode('utf-8'))
path = path_obj['path']
config = cargar_config(cwd + separador + 'config.json', tv_path,
av_path, lang_path)
a = navigate_folder(path, config)
a_json = json.dumps(a)
print(len(a_json))
self.send_response(200)
self.send_header('Content-Length', len(a_json))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(a), 'utf-8'))
return 0
if self.path == '/move_tv':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
move_files(tv_path + config['TV_model'], lib_path)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
restart()
return 0
if self.path == '/move_av':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json', config)
move_files(av_path + config['AV_model'], lib_path)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
restart()
return 0
if self.path == '/get_tv_key':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_tv_key(config)
if a == 'OK':
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_conn':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_test_conn(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/get_tv_sources':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_tv_sources(config)
if a == 'OK':
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/get_av_sources':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = get_hdmi_list(config)
if a != None:
config['AV_SOURCES'] = a
save_config(cwd + separador + 'config.json', config)
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_init':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/tv_test_end':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = tv_set_prev(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_on':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_check_power(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_off':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_power_off(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if self.path == '/av_test_hdmi':
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
config = json.loads(post_data.decode('utf-8'))
a = av_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header('Content-Length', len(config))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config), 'utf-8'))
else:
self.send_response(300)
self.send_header('Content-Length', len('ERROR'))
self.send_header('Content-Type', 'text/html')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes('ERROR', 'utf-8'))
return 0
if __name__ == '__main__':
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador = '\\'
else:
separador = '/'
config_file = cwd + separador + 'config.json'
resource_path = (cwd + separador + 'web' + separador + 'resources' +
separador)
html_path = cwd + separador + 'web' + separador
tv_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'TV' + separador)
av_path = (cwd + separador + 'web' + separador + 'libraries' +
separador + 'AV' + separador)
lib_path = cwd + separador + 'lib' + separador
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
config = cargar_config(config_file, tv_path, av_path, lang_path)
logfile = cwd + separador + 'emby_xnoppo_client_logging.log'
lang = cargar_lang(lang_path + config['language'] + separador + 'lang.js')
if config['DebugLevel'] == 0:
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%d/%m/%Y %I:%M:%S %p', level=logging.CRITICAL)
elif config['DebugLevel'] == 1:
rfh = logging.handlers.RotatingFileHandler(filename=logfile, mode=
'a', maxBytes=50 * 1024 * 1024, backupCount=2, encoding=None,
delay=0)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%d/%m/%Y %I:%M:%S %p', level=logging.INFO, handlers=[rfh])
elif config['DebugLevel'] == 2:
rfh = logging.handlers.RotatingFileHandler(filename=logfile, mode=
'a', maxBytes=5 * 1024 * 1024, backupCount=2, encoding=None,
delay=0)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%d/%m/%Y %I:%M:%S %p', level=logging.DEBUG, handlers=[rfh]
)
emby_wsocket = xnoppo_ws()
emby_wsocket.ws_config = config
emby_wsocket.config_file = config_file
emby_wsocket.ws_lang = lang
x = threading.Thread(target=thread_function, args=(emby_wsocket,))
x.start()
espera = 0
estado_anterior = ''
logging.debug('Arrancamos el Servidor Web\n')
serverPort = 8090
webServer = HTTPServer(('', serverPort), MyServer)
print('Server started http://%s:%s' % ('', serverPort))
try:
webServer.serve_forever()
except KeyboardInterrupt:
pass
webServer.server_close()
logging.info('Fin proceso')
logging.info('Finished')
print('Server stopped.')
| import http.server
import socketserver
from http.server import BaseHTTPRequestHandler, HTTPServer
import time
import json
import io
import urllib
import requests
from lib.Emby_ws import xnoppo_ws
from lib.Emby_http import *
from lib.Xnoppo import *
from lib.Xnoppo_TV import *
import lib.Xnoppo_AVR
import shutil
import asyncio
import threading
import logging
import logging.handlers
import psutil
def get_version():
return("2.01")
def thread_function(ws_object):
print("Thread: starting")
ws_object.start()
print("Thread: finishing")
def restart():
print('restart')
try:
emby_wsocket.stop()
except:
sys.exit()
sys.exit()
print('fin restart')
def save_config(config_file, config):
with open(config_file, 'w') as fw:
json.dump(config, fw, indent=4)
fw.close
try:
emby_wsocket.ws_config=config
emby_wsocket.EmbySession.config=config
except:
emby_wsocket.ws_config=config
def get_state():
status={}
status["Version"]=get_version()
try:
status["Playstate"]=emby_wsocket.EmbySession.playstate
status["playedtitle"]=emby_wsocket.EmbySession.playedtitle
status["server"]=emby_wsocket.EmbySession.server
status["folder"]=emby_wsocket.EmbySession.folder
status["filename"]=emby_wsocket.EmbySession.filename
status["CurrentData"]=emby_wsocket.EmbySession.currentdata
# gives a single float value
except:
status["Playstate"]="Not_Connected"
status["playedtitle"]=""
status["server"]=""
status["folder"]=""
status["filename"]=""
status["CurrentData"]=""
status["cpu_perc"]=psutil.cpu_percent()
status["mem_perc"]=psutil.virtual_memory().percent
# you can have the percentage of used RAM
print(psutil.virtual_memory().percent)
print(status)
return(status)
def cargar_config(config_file,tv_path,av_path,lang_path):
with open(config_file, 'r') as f:
config = json.load(f)
#ver_configuracion(config)
f.close
## new options default config values
config["Version"]=get_version()
default = config.get("Autoscript", False)
config["Autoscript"]=default
default = config.get("enable_all_libraries", False)
config["enable_all_libraries"]=default
default = config.get("TV_model", "")
config["TV_model"]=default
default = config.get("TV_SOURCES", [])
config["TV_SOURCES"] = default
default = config.get("AV_model", "")
config["AV_model"]=default
default = config.get("AV_SOURCES", [])
config["AV_SOURCES"] = default
default = config.get("TV_script_init", "")
config["TV_script_init"]=default
default = config.get("TV_script_end", "")
config["TV_script_end"]=default
default = config.get("av_delay_hdmi", 0)
config["av_delay_hdmi"]=default
default = config.get("AV_Port", 23)
config["AV_Port"]=default
default = config.get("timeout_oppo_mount", 60)
config["timeout_oppo_mount"]=default
default = config.get("language","es-ES")
config["language"]=default
default = config.get("default_nfs",False)
config["default_nfs"]=default
default = config.get("wait_nfs",False)
config["wait_nfs"]=default
default = config.get("refresh_time",5)
config["refresh_time"]=default
default = config.get("check_beta",False)
config["check_beta"]=default
default = config.get("smbtrick",False)
config["smbtrick"]=default
default = config.get("BRDisc",False)
config["BRDisc"]=default
## testeado de rutas
edit_server=0
server_list = config["servers"]
for server in server_list:
default = server.get("Test_OK", False)
server_list[edit_server]["Test_OK"]=default
edit_server=edit_server+1
## Cambio de booleans de texto antiguos a boleans actuales.
if config["TV"]=='True':
config["TV"]=True;
if config["TV"]=='False':
config["TV"]=False;
if config["AV"]=='True':
config["AV"]=True;
if config["AV"]=='False':
config["AV"]=False;
config["servers"]=server_list
config["tv_dirs"]=get_dir_folders(tv_path);
config["av_dirs"]=get_dir_folders(av_path);
config["langs"]=get_dir_folders(lang_path);
return(config)
def check_version(config):
url = "https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js"
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
print(config["check_beta"])
if config["check_beta"]==True:
last_version=version["beta_version"]
last_version_file=version["beta_version_file"]
else:
last_version=version["curr_version"]
last_version_file=version["curr_version_file"]
xno_version=get_version()
resp = {}
resp["version"]=last_version
resp["file"]=last_version_file
print(xno_version)
print(last_version)
if xno_version<last_version:
resp["new_version"]=True
else:
resp["new_version"]=False
print(resp)
return(resp)
def update_version(config,vers_path,cwd):
url = "https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js"
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
if config["check_beta"]==True:
last_version=version["beta_version"]
last_version_file=version["beta_version_file"]
else:
last_version=version["curr_version"]
last_version_file=version["curr_version_file"]
url2 = "https://github.com/siberian-git/Xnoppo/raw/main/versions/" + last_version_file
headers = {}
response2 = requests.get(url2, headers=headers)
filename=vers_path + last_version_file
with open(filename, 'wb') as f:
f.write(response2.content)
f.close()
shutil.unpack_archive(filename, cwd)
if sys.platform.startswith('win'):
separador="\\"
else:
separador="/"
tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador
av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador
if config["TV"]==True and config["TV_model"]!="":
move_files(tv_path + config["TV_model"],lib_path)
if config["AV"]==True and config["AV_model"]!="":
move_files(av_path + config["AV_model"],lib_path)
resp = {}
resp["version"]=last_version
resp["file"]=last_version_file
resp["new_version"]=False
return(resp)
def cargar_lang(config_file):
with open(config_file.encode(sys.getfilesystemencoding()), 'r',encoding='latin-1') as f:
config = json.load(f)
#ver_configuracion(config)
f.close
## new options default config values
return(config)
def leer_file(web_file):
with open(web_file, 'r',encoding='utf8') as f:
num=f.read()
f.close
return(num)
def leer_img(web_file):
with open(web_file, 'rb') as f:
num=f.read()
f.close
return(num)
def test_path(config,server):
rutas = get_mount_path(server["Emby_Path"] + "/test.mkv",server)
result2 = test_mount_path(config,rutas["Servidor"],rutas["Carpeta"])
return(result2)
def get_mount_path(movie,server_data):
movie = movie.replace(server_data["Emby_Path"],server_data["Oppo_Path"])
movie = movie.replace('\\\\','\\')
movie = movie.replace('\\','/')
word = '/'
inicio = movie.find(word)
inicio = inicio +1
final = movie.find(word,inicio,len(movie))
servidor = movie[inicio:final]
ultimo=final+1
result=final+1
while result > 0:
ultimo=result+1
result=movie.find(word,ultimo,len(movie))
fichero=movie[ultimo:len(movie)]
final=final+1
ultimo=ultimo-1
carpeta=movie[final:ultimo]
resultado={}
resultado["Servidor"]=servidor
resultado["Carpeta"]=carpeta
resultado["Fichero"]=fichero
return(resultado)
def test_mount_path(config,servidor,carpeta):
sendnotifyremote(config["Oppo_IP"])
#print("Conectando con el OPPO")
result=check_socket(config)
if result==0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey("EJT",config)
time.sleep(1)
#print("Solicitando montar ruta al OPPO")
response_data6b = getsetupmenu(config)
while response_data6f.find('devicelist":[]') > 0:
time.sleep(1)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey("QPW",config)
device_list=json.loads(response_data6f)
if config["DebugLevel"]>0: print(device_list)
nfs=config["default_nfs"]
for device in device_list["devicelist"]:
if device["name"].upper()==servidor.upper():
if device["sub_type"]=="nfs":
nfs=True
break
else:
nfs=False
break
if nfs:
response_login = LoginNFS(config,servidor)
else:
response_login = LoginSambaWithOutID(config,servidor)
if config["Always_ON"]==False:
time.sleep(5)
response_data6b = getsetupmenu(config)
if nfs:
response_mount = mountSharedNFSFolder(servidor,carpeta,'','',config)
else:
response_mount = mountSharedFolder(servidor,carpeta,'','',config)
response=json.loads(response_mount)
#print(response)
if config["Autoscript"]==True:
result=umountSharedFolder(config)
if response["success"]==True:
a = "OK"
else:
a = "FAILURE"
return(a)
else:
print("No se puede conectar, revisa las configuraciones o que el OPPO este encendido o en reposo")
def test_emby(config):
try:
EmbySession=EmbyHttp(config)
user_info = EmbySession.user_info
if user_info["SessionInfo"]["Id"]!="":
return("OK")
else:
return("FAILED")
except:
return("FAILED")
def test_oppo(config):
result=check_socket(config)
if result==0:
return("OK")
else:
return("FAILED")
def carga_libraries(config):
try:
EmbySession=EmbyHttp(config)
views_list=EmbySession.get_user_views(EmbySession.user_info["User"]["Id"])
libraries = []
for view in views_list:
library= {}
library["Name"]=view["Name"]
library["Id"]=view["Id"]
library["Active"]=False
try:
lib_list=config["Libraries"]
except:
lib_list={}
for lib in lib_list:
if lib["Id"]==view["Id"]:
library["Active"]=lib["Active"]
libraries.append(library)
config["Libraries"]=libraries
return(0)
except:
return(1)
def is_library_active(config,libraryname):
for library in config["Libraries"]:
if library["Name"]==libraryname:
return(library["Active"])
return(False)
def get_selectableFolders(config):
EmbySession=EmbyHttp(config)
MediaFolders = EmbySession.get_emby_selectablefolders()
servers=[]
for Folder in MediaFolders:
index=1
active=is_library_active(config,Folder["Name"])
if config["enable_all_libraries"]==True:
active=True;
if active==True:
for SubFolder in Folder["SubFolders"]:
server={}
server["Id"]=SubFolder["Id"]
if index>1:
server["name"]=Folder["Name"]+"("+str(index)+")"
else:
server["name"]=Folder["Name"]
server["Emby_Path"]=SubFolder["Path"]
server["Oppo_Path"]="/"
try:
serv_list=config["servers"]
except:
serv_list={}
for serv in serv_list:
if server["Emby_Path"]==serv["Emby_Path"]:
server["name"]=serv["name"];
server["Oppo_Path"]=serv["Oppo_Path"];
server["Test_OK"]=serv["Test_OK"];
servers.append(server)
index=index+1
config["servers"]=servers
def get_dir_folders(directory):
os.chdir(directory)
dirs = os.listdir(".")
encontrado=False
list_dir=[]
#a =""
#list_dir.append(a)
for x in dirs:
if os.path.isdir(x):
list_dir.append(x)
return(list_dir)
def move_files(src, dest):
os.chdir(src)
src_files = os.listdir('.')
for file_name in src_files:
full_file_name = os.path.join(src, file_name)
if os.path.isfile(full_file_name):
shutil.copy(full_file_name, dest)
return(0)
def get_devices(config):
try:
EmbySession=EmbyHttp(config)
devices = EmbySession.get_emby_devices()
index=0
dev_temp = []
for device in devices["Items"]:
try:
if device["Id"]!='Xnoppo':
device["Name"]=device["Name"] + " / " + device["AppName"]
device["Id"]=device["ReportedDeviceId"]
dev_temp.append(device)
except:
pass
config["devices"]=dev_temp
return('OK')
except:
return('FAILURE')
class MyServer(BaseHTTPRequestHandler):
def do_GET(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador="\\"
else:
separador="/"
resource_path=cwd + separador + 'web' + separador + 'resources' + separador
html_path = cwd + separador + 'web' + separador
tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador
av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/emby_conf.html':
i = leer_file(html_path + 'emby_conf.html')
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/oppo_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'oppo_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/lib_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'lib_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/path_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'path_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/tv_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'tv_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/av_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'av_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/other_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'other_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/status.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'status.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/help.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'help.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/remote.html':
i = leer_file(html_path + 'remote.html')
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/android-chrome-36x36.png':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_img(resource_path + 'android-chrome-36x36.png')
self.wfile.write(bytes(i))
return(0)
if self.path == '/av-receiver-icon-2.jpg':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_img(resource_path + 'av-receiver-icon-2.jpg')
self.wfile.write(bytes(i))
return(0)
if self.path == '/dragon.png':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_img(resource_path + 'dragon.png')
self.wfile.write(bytes(i))
return(0)
if self.path == '/xnoppo_config':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/xnoppo_config_lib':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
carga_libraries(a)
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/xnoppo_config_dev':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
get_devices(a)
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/check_version':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = check_version(config)
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/update_version':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = update_version(config,vers_path,cwd)
restart()
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/get_state':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
a = get_state()
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/restart':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
a = "Restarting"
self.wfile.write(bytes(a,"utf-8"))
restart()
if self.path == '/refresh_paths':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
get_selectableFolders(a)
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/lang':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = cargar_lang(lang_path + config["language"] + separador +'lang.js')
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path.find("/send_key?")>=0:
get_data = self.path
print(get_data)
a = len('/send_key?sendkey=')
b=get_data[a:len(get_data)]
print(b)
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
sendnotifyremote(config["Oppo_IP"])
result=check_socket(config)
if b=='PON':
if result==0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey("EJT",config)
if config["BRDisc"]==True:
time.sleep(1)
response_data_on = sendremotekey("EJT",config)
time.sleep(1)
response_data6b = getsetupmenu(config)
else:
response_data_on = sendremotekey(b,config)
self.send_response(200)
self.send_header("Content-type", "text")
self.end_headers()
a = "ok"
self.wfile.write(bytes(a,"utf-8"))
return(0)
if self.path == '/log.txt':
self.send_response(200)
self.send_header("Content-type", "text")
self.end_headers()
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = leer_img(cwd + separador + 'emby_xnoppo_client_logging.log')
self.wfile.write(bytes(a))
return(0)
else:
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(bytes("<html><head><title>https://pythonbasics.org</title></head>", "utf-8"))
self.wfile.write(bytes("<p>Request: %s</p>" % self.path, "utf-8"))
self.wfile.write(bytes("<body>", "utf-8"))
self.wfile.write(bytes("<p>This is an example web server.</p>", "utf-8"))
self.wfile.write(bytes("</body></html>", "utf-8"))
def do_POST(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador="\\"
else:
separador="/"
resource_path=cwd + separador + 'web' + separador + 'resources' + separador
html_path = cwd + separador + 'web' + separador
tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador
av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador
lib_path = cwd + separador + 'lib' + separador
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/save_config':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json',config)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
if self.path == '/check_emby':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = test_emby(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
status = get_state()
if status["Playstate"]=="Not_Connected":
save_config(cwd + separador + 'config.json',config)
emby_wsocket.ws_config=config
restart()
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/check_oppo':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = test_oppo(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/test_path':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
server = json.loads(post_data.decode('utf-8'))
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = test_path(config,server)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(server))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(server),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/navigate_path':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
path_obj = json.loads(post_data.decode('utf-8'))
path = path_obj["path"]
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = navigate_folder(path,config)
a_json=json.dumps(a)
print(len(a_json))
self.send_response(200)
self.send_header("Content-Length", len(a_json))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/move_tv':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json',config)
move_files(tv_path + config["TV_model"],lib_path)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
restart()
return(0)
if self.path == '/move_av':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json',config)
move_files(av_path + config["AV_model"],lib_path)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
restart()
return(0)
if self.path == '/get_tv_key':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = get_tv_key(config)
if a == 'OK':
save_config(cwd + separador + 'config.json',config)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/tv_test_conn':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = tv_test_conn(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/get_tv_sources':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = get_tv_sources(config)
if a == 'OK':
save_config(cwd + separador + 'config.json',config)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/get_av_sources':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = get_hdmi_list(config)
if a != None:
config["AV_SOURCES"]=a
save_config(cwd + separador + 'config.json',config)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/tv_test_init':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = tv_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/tv_test_end':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = tv_set_prev(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/av_test_on':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = av_check_power(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/av_test_off':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = av_power_off(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/av_test_hdmi':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = av_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if __name__ == "__main__":
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador="\\"
else:
separador="/"
config_file = cwd + separador + "config.json"
resource_path=cwd + separador + 'web' + separador + 'resources' + separador
html_path = cwd + separador + 'web' + separador
tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador
av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador
lib_path = cwd + separador + 'lib' + separador
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
config = cargar_config(config_file,tv_path,av_path,lang_path)
logfile=cwd + separador + "emby_xnoppo_client_logging.log"
lang = cargar_lang(lang_path + config["language"] + separador +'lang.js')
if config["DebugLevel"]==0:
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',datefmt='%d/%m/%Y %I:%M:%S %p',level=logging.CRITICAL)
elif config["DebugLevel"]==1:
rfh = logging.handlers.RotatingFileHandler(
filename=logfile,
mode='a',
maxBytes=50*1024*1024,
backupCount=2,
encoding=None,
delay=0
)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',datefmt='%d/%m/%Y %I:%M:%S %p',level=logging.INFO,handlers=[rfh])
elif config["DebugLevel"]==2:
rfh = logging.handlers.RotatingFileHandler(
filename=logfile,
mode='a',
maxBytes=5*1024*1024,
backupCount=2,
encoding=None,
delay=0
)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',datefmt='%d/%m/%Y %I:%M:%S %p',level=logging.DEBUG,handlers=[rfh])
emby_wsocket = xnoppo_ws()
emby_wsocket.ws_config=config
emby_wsocket.config_file=config_file
emby_wsocket.ws_lang=lang
x = threading.Thread(target=thread_function, args=(emby_wsocket,))
x.start()
espera=0
estado_anterior=''
logging.debug('Arrancamos el Servidor Web\n')
serverPort = 8090
webServer = HTTPServer(("", serverPort), MyServer)
print("Server started http://%s:%s" % ("", serverPort))
try:
webServer.serve_forever()
except KeyboardInterrupt:
pass
webServer.server_close()
logging.info('Fin proceso')
logging.info('Finished')
print("Server stopped.")
| [
21,
24,
25,
27,
28
] |
505 | 33cc8814d9397bcb0041728407efef80a136f151 | #!/usr/bin/env python
import argparse
import keyring
import papercut
import ConfigParser
import getpass
import time
import os
config = ConfigParser.ConfigParser()
config.read([os.path.expanduser('~/.papercut')])
try:
username = config.get('papercut','username')
except ConfigParser.NoSectionError:
username = None
p = argparse.ArgumentParser(description='Print some documents')
p.add_argument('--print', '-p', help='a filename to be printed', dest='printjob')
p.add_argument('--printer', '-r', help='a printer name to print to')
p.add_argument('--balance', '-b', nargs='?', const=True, help='display the user\' printing balance')
p.add_argument('--list', '-l', nargs='?', const=True, help='list available printers')
p.add_argument('--user', '-u', help='username')
p.add_argument('--password-options', '-o', choices=['save','prompt'], help='save: prompt for password and save to keyring,\n prompt: prompt for password')
args = p.parse_args()
if not username and not args.user:
username = raw_input('enter username: ')
password = keyring.get_password('papercut', username)
def list_printers(sessID):
printers = papercut.listPrinters(sessID)
print "\nAvailable printers:"
for i,printer in enumerate(printers):
print i,"\t",printer[1], "." * (50 - len(printer[1])), printer[0]
return printers
def get_balance(sessID):
print '\nYour balance is now: $ %.2f' % (int(papercut.getBalance(sessID)) / 100.0)
if args.password_options or not password:
password = getpass.getpass()
if args.password_options == 'save':
keyring.set_password('papercut', username, password)
print "password saved in keyring"
if args.list or args.balance or args.printjob or args.printer:
sessID = papercut.login(username, password)
if sessID:
print '\nLogged in to PaperCut with session ID',sessID
if args.list: list_printers(sessID)
if args.balance: get_balance(sessID)
if args.printjob:
if not args.printer:
printers = list_printers(sessID)
args.printer = raw_input('select printer: ')
try:
printerIndex = int(args.printer)
args.printer = printers[printerIndex][1]
except ValueError:
pass
printJobID = papercut.printFile(args.printjob, args.printer, sessID)
print '\nJob sent to printer', args.printer
status = papercut.getPrintStatus(printJobID)
while(status['status'] == 'Submitting'):
time.sleep(0.1)
status = papercut.getPrintStatus(printJobID)
print "\nJob queued for printing."
while(not status['complete']):
time.sleep(0.1)
status = papercut.getPrintStatus(printJobID)
print "\nComplete!"
print "\nThis job cost $", status['cost']
# print status
get_balance(sessID)
else:
print '\nDid not successfully log in to PaperCut'
| null | null | null | null | [
0
] |
506 | 5dc8f420e16ee14ecfdc61413f10a783e819ec32 | <mask token>
| <mask token>
def is_huge(A, B):
return A[0] > B[0] and A[1] > B[1]
<mask token>
| <mask token>
def is_huge(A, B):
return A[0] > B[0] and A[1] > B[1]
if __name__ == '__main__':
bulks = []
num = int(sys.stdin.readline())
for i in range(num):
bulks.append(list(map(int, sys.stdin.readline().split())))
for i in range(len(bulks)):
count = 0
for j in range(len(bulks)):
if bulks[i] != bulks[j] and is_huge(bulks[j], bulks[i]):
count += 1
if count == 0:
print(1, end=' ')
else:
print(count + 1, end=' ')
| import sys
def is_huge(A, B):
return A[0] > B[0] and A[1] > B[1]
if __name__ == '__main__':
bulks = []
num = int(sys.stdin.readline())
for i in range(num):
bulks.append(list(map(int, sys.stdin.readline().split())))
for i in range(len(bulks)):
count = 0
for j in range(len(bulks)):
if bulks[i] != bulks[j] and is_huge(bulks[j], bulks[i]):
count += 1
if count == 0:
print(1, end=' ')
else:
print(count + 1, end=' ')
| import sys
def is_huge(A, B):
return (A[0] > B[0]) and (A[1] > B[1])
if __name__ == '__main__':
bulks = []
num = int(sys.stdin.readline())
for i in range(num):
bulks.append(list(map(int, sys.stdin.readline().split())))
for i in range(len(bulks)):
count = 0
for j in range(len(bulks)):
if bulks[i] != bulks[j] and is_huge(bulks[j], bulks[i]):
count += 1
if count == 0:
print(1, end=" ")
else:
print(count+1, end=" ")
| [
0,
1,
2,
3,
4
] |
507 | d6a677ed537f6493bb43bd893f3096dc058e27da | <mask token>
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Describe(base.DescribeCommand):
<mask token>
<mask token>
def Run(self, args):
guest_policy_ref = args.CONCEPTS.guest_policy.Parse()
release_track = self.ReleaseTrack()
client = osconfig_utils.GetClientInstance(release_track)
messages = osconfig_utils.GetClientMessages(release_track)
guest_policy_type = guest_policy_ref.type_
guest_policy_name = guest_policy_ref.result.RelativeName()
if guest_policy_type == type(guest_policy_type
).organization_guest_policy:
request = messages.OsconfigOrganizationsGuestPoliciesGetRequest(
name=guest_policy_name)
service = client.organizations_guestPolicies
elif guest_policy_type == type(guest_policy_type).folder_guest_policy:
request = messages.OsconfigFoldersGuestPoliciesGetRequest(name=
guest_policy_name)
service = client.folders_guestPolicies
else:
request = messages.OsconfigProjectsGuestPoliciesGetRequest(name
=guest_policy_name)
service = client.projects_guestPolicies
return service.Get(request)
| <mask token>
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Describe(base.DescribeCommand):
<mask token>
@staticmethod
def Args(parser):
resource_args.AddGuestPolicyResourceArg(parser, 'to describe.')
def Run(self, args):
guest_policy_ref = args.CONCEPTS.guest_policy.Parse()
release_track = self.ReleaseTrack()
client = osconfig_utils.GetClientInstance(release_track)
messages = osconfig_utils.GetClientMessages(release_track)
guest_policy_type = guest_policy_ref.type_
guest_policy_name = guest_policy_ref.result.RelativeName()
if guest_policy_type == type(guest_policy_type
).organization_guest_policy:
request = messages.OsconfigOrganizationsGuestPoliciesGetRequest(
name=guest_policy_name)
service = client.organizations_guestPolicies
elif guest_policy_type == type(guest_policy_type).folder_guest_policy:
request = messages.OsconfigFoldersGuestPoliciesGetRequest(name=
guest_policy_name)
service = client.folders_guestPolicies
else:
request = messages.OsconfigProjectsGuestPoliciesGetRequest(name
=guest_policy_name)
service = client.projects_guestPolicies
return service.Get(request)
| <mask token>
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Describe(base.DescribeCommand):
"""Describe the given guest policy.
## EXAMPLES
To describe the guest policy 'policy1' in the project 'project1', run:
$ {command} policy1 --project=project1
To describe the guest policy 'policy1' in the organization '12345', run:
$ {command} policy1 --organization=12345
"""
@staticmethod
def Args(parser):
resource_args.AddGuestPolicyResourceArg(parser, 'to describe.')
def Run(self, args):
guest_policy_ref = args.CONCEPTS.guest_policy.Parse()
release_track = self.ReleaseTrack()
client = osconfig_utils.GetClientInstance(release_track)
messages = osconfig_utils.GetClientMessages(release_track)
guest_policy_type = guest_policy_ref.type_
guest_policy_name = guest_policy_ref.result.RelativeName()
if guest_policy_type == type(guest_policy_type
).organization_guest_policy:
request = messages.OsconfigOrganizationsGuestPoliciesGetRequest(
name=guest_policy_name)
service = client.organizations_guestPolicies
elif guest_policy_type == type(guest_policy_type).folder_guest_policy:
request = messages.OsconfigFoldersGuestPoliciesGetRequest(name=
guest_policy_name)
service = client.folders_guestPolicies
else:
request = messages.OsconfigProjectsGuestPoliciesGetRequest(name
=guest_policy_name)
service = client.projects_guestPolicies
return service.Get(request)
| <mask token>
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.compute.os_config import osconfig_utils
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute.os_config import resource_args
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Describe(base.DescribeCommand):
"""Describe the given guest policy.
## EXAMPLES
To describe the guest policy 'policy1' in the project 'project1', run:
$ {command} policy1 --project=project1
To describe the guest policy 'policy1' in the organization '12345', run:
$ {command} policy1 --organization=12345
"""
@staticmethod
def Args(parser):
resource_args.AddGuestPolicyResourceArg(parser, 'to describe.')
def Run(self, args):
guest_policy_ref = args.CONCEPTS.guest_policy.Parse()
release_track = self.ReleaseTrack()
client = osconfig_utils.GetClientInstance(release_track)
messages = osconfig_utils.GetClientMessages(release_track)
guest_policy_type = guest_policy_ref.type_
guest_policy_name = guest_policy_ref.result.RelativeName()
if guest_policy_type == type(guest_policy_type
).organization_guest_policy:
request = messages.OsconfigOrganizationsGuestPoliciesGetRequest(
name=guest_policy_name)
service = client.organizations_guestPolicies
elif guest_policy_type == type(guest_policy_type).folder_guest_policy:
request = messages.OsconfigFoldersGuestPoliciesGetRequest(name=
guest_policy_name)
service = client.folders_guestPolicies
else:
request = messages.OsconfigProjectsGuestPoliciesGetRequest(name
=guest_policy_name)
service = client.projects_guestPolicies
return service.Get(request)
| # -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements command to describe a given guest policy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.compute.os_config import osconfig_utils
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute.os_config import resource_args
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Describe(base.DescribeCommand):
"""Describe the given guest policy.
## EXAMPLES
To describe the guest policy 'policy1' in the project 'project1', run:
$ {command} policy1 --project=project1
To describe the guest policy 'policy1' in the organization '12345', run:
$ {command} policy1 --organization=12345
"""
@staticmethod
def Args(parser):
resource_args.AddGuestPolicyResourceArg(parser, 'to describe.')
def Run(self, args):
guest_policy_ref = args.CONCEPTS.guest_policy.Parse()
release_track = self.ReleaseTrack()
client = osconfig_utils.GetClientInstance(release_track)
messages = osconfig_utils.GetClientMessages(release_track)
guest_policy_type = guest_policy_ref.type_
guest_policy_name = guest_policy_ref.result.RelativeName()
if guest_policy_type == type(guest_policy_type).organization_guest_policy:
request = messages.OsconfigOrganizationsGuestPoliciesGetRequest(
name=guest_policy_name)
service = client.organizations_guestPolicies
elif guest_policy_type == type(guest_policy_type).folder_guest_policy:
request = messages.OsconfigFoldersGuestPoliciesGetRequest(
name=guest_policy_name)
service = client.folders_guestPolicies
else:
request = messages.OsconfigProjectsGuestPoliciesGetRequest(
name=guest_policy_name)
service = client.projects_guestPolicies
return service.Get(request)
| [
2,
3,
4,
5,
6
] |
508 | e6acc7b022001d8419095ad6364a6ae9504ec7aa | <mask token>
class Solution:
<mask token>
class Solution:
def countArrangement(self, n: int) ->int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
cnt += dfs(bm ^ 1 << num, i - 1)
return cnt
return dfs(0, n)
class Solution:
def countArrangement(self, n: int) ->int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i - 1, nums - {num}) for num in nums if num %
i == 0 or i % num == 0)
return count(n, set(range(1, n + 1)))
| <mask token>
class Solution:
def countArrangement(self, n: int) ->int:
bitset_total = 2 ** n
dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]
dp[0][0] = 1
for i in range(1, n + 1):
for bm in range(bitset_total):
for num in range(n):
if bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
dp[i][bm] += dp[i - 1][bm ^ 1 << num]
return dp[-1][-1]
class Solution:
def countArrangement(self, n: int) ->int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
cnt += dfs(bm ^ 1 << num, i - 1)
return cnt
return dfs(0, n)
class Solution:
def countArrangement(self, n: int) ->int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i - 1, nums - {num}) for num in nums if num %
i == 0 or i % num == 0)
return count(n, set(range(1, n + 1)))
| <mask token>
class Solution:
<mask token>
class Solution:
def countArrangement(self, n: int) ->int:
bitset_total = 2 ** n
dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]
dp[0][0] = 1
for i in range(1, n + 1):
for bm in range(bitset_total):
for num in range(n):
if bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
dp[i][bm] += dp[i - 1][bm ^ 1 << num]
return dp[-1][-1]
class Solution:
def countArrangement(self, n: int) ->int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
cnt += dfs(bm ^ 1 << num, i - 1)
return cnt
return dfs(0, n)
class Solution:
def countArrangement(self, n: int) ->int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i - 1, nums - {num}) for num in nums if num %
i == 0 or i % num == 0)
return count(n, set(range(1, n + 1)))
| <mask token>
class Solution:
def countArrangement(self, n: int) ->int:
cache = {}
def helper(perm):
digits = len(perm)
if digits == 1:
return 1
if perm in cache:
return cache[perm]
cnt = 0
for i in range(digits):
if perm[i] % digits == 0 or digits % perm[i] == 0:
cnt += helper(perm[:i] + perm[i + 1:])
cache[perm] = cnt
return cnt
return helper(tuple(range(1, n + 1)))
class Solution:
def countArrangement(self, n: int) ->int:
bitset_total = 2 ** n
dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]
dp[0][0] = 1
for i in range(1, n + 1):
for bm in range(bitset_total):
for num in range(n):
if bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
dp[i][bm] += dp[i - 1][bm ^ 1 << num]
return dp[-1][-1]
class Solution:
def countArrangement(self, n: int) ->int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
cnt += dfs(bm ^ 1 << num, i - 1)
return cnt
return dfs(0, n)
class Solution:
def countArrangement(self, n: int) ->int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i - 1, nums - {num}) for num in nums if num %
i == 0 or i % num == 0)
return count(n, set(range(1, n + 1)))
| from __future__ import annotations
from functools import cache
class Solution:
def countArrangement(self, n: int) -> int:
cache = {}
def helper(perm):
digits = len(perm)
if digits == 1:
return 1
if perm in cache:
return cache[perm]
cnt = 0
for i in range(digits):
if perm[i] % digits == 0 or digits % perm[i] == 0:
cnt += helper(perm[:i] + perm[i+1:])
cache[perm] = cnt
return cnt
return helper(tuple(range(1, n+1)))
class Solution:
def countArrangement(self, n: int) -> int:
# total number of bitset states possible
bitset_total = 2**n
dp = [[0 for _ in range(bitset_total)]
for _ in range(n+1)]
# all other valid states lead to this base case so mark this as 1
dp[0][0] = 1
# iterate over all positions
for i in range(1, n+1):
# iterate over all subsets
for bm in range(bitset_total):
# iterate over all numbers
for num in range(n):
# if number is not visited and satisfies condition in question
# & (各桁が両方とも1なら1になる)
# 1 << x (1を左にxシフトさせて右をゼロで埋める)
# ^ (XOR: 各桁の片方が1なら1になる)
if ((bm & (1 << num)) and
(((num+1) % i == 0) or
(i % (num+1) == 0))):
dp[i][bm] += dp[i-1][bm ^ (1 << num)]
return dp[-1][-1]
# bm is binary mask for visited numbers.
# i is current place we want to fill.
# Idea is to start from the end, and fill places in opposite direction,
# because for big numbers we potentially have less candidates.
# how dfs(bm, pl) will work:
# If we reached place 0 and procces was not interrupted so far,
# it means that we find beautiful arrangement.
# For each number 1, 2, ..., n we try to put this number on place pl:
# and we need to check two conditions: first, that this place is still empty,
# using bitmask and secondly that one of the two properties for beutiful arrangement
# holds. In this case we add dfs(bm^1<<i, pl - 1) to final answer.
# Finally, we run dfs(0, n): from the last place and with empty bit-mask.
class Solution:
def countArrangement(self, n: int) -> int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num\
and ((num+1) % i == 0 or i % (num+1) == 0):
cnt += dfs(bm ^ 1 << num, i-1)
return cnt
return dfs(0, n)
# nums is the set of still available numbers.
# Note that my i goes downwards, from n to 1. Because position i = 1
# can hold any number, so I don't even have to check whether the last
# remaining number fits there. Also, position i = 2 happily holds
# every second number and i = 3 happily holds every third number,
# so filling the lowest positions last has a relatively high chance of success.
class Solution:
def countArrangement(self, n: int) -> int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i-1, nums-{num})
for num in nums
if num % i == 0 or i % num == 0)
return count(n, set(range(1, n+1)))
| [
5,
6,
7,
8,
10
] |
509 | fd5fca0e9abbb669ddff4d676147acc4344cdd1c | <mask token>
class QuantidadeForm(forms.Form):
class Meta:
fields = 'quantidade', 'produto_id'
produto_id = forms.CharField(widget=forms.HiddenInput())
quantidade = forms.IntegerField(min_value=1, max_value=1000,
error_messages={'required': 'Campo obrigatório.'}, widget=forms.
TextInput(attrs={'class': 'form-control form-control-sm quantidade',
'maxlength': '20', 'onkeypress':
'return event.charCode >= 48 && event.charCode <= 57'}), required=True)
| <mask token>
class RemoveProdutoDoCarrinhoForm(forms.Form):
class Meta:
fields = 'produto_id'
<mask token>
class QuantidadeForm(forms.Form):
class Meta:
fields = 'quantidade', 'produto_id'
produto_id = forms.CharField(widget=forms.HiddenInput())
quantidade = forms.IntegerField(min_value=1, max_value=1000,
error_messages={'required': 'Campo obrigatório.'}, widget=forms.
TextInput(attrs={'class': 'form-control form-control-sm quantidade',
'maxlength': '20', 'onkeypress':
'return event.charCode >= 48 && event.charCode <= 57'}), required=True)
| <mask token>
class RemoveProdutoDoCarrinhoForm(forms.Form):
class Meta:
fields = 'produto_id'
produto_id = forms.CharField(widget=forms.HiddenInput())
class QuantidadeForm(forms.Form):
class Meta:
fields = 'quantidade', 'produto_id'
produto_id = forms.CharField(widget=forms.HiddenInput())
quantidade = forms.IntegerField(min_value=1, max_value=1000,
error_messages={'required': 'Campo obrigatório.'}, widget=forms.
TextInput(attrs={'class': 'form-control form-control-sm quantidade',
'maxlength': '20', 'onkeypress':
'return event.charCode >= 48 && event.charCode <= 57'}), required=True)
| from django import forms
class RemoveProdutoDoCarrinhoForm(forms.Form):
class Meta:
fields = 'produto_id'
produto_id = forms.CharField(widget=forms.HiddenInput())
class QuantidadeForm(forms.Form):
class Meta:
fields = 'quantidade', 'produto_id'
produto_id = forms.CharField(widget=forms.HiddenInput())
quantidade = forms.IntegerField(min_value=1, max_value=1000,
error_messages={'required': 'Campo obrigatório.'}, widget=forms.
TextInput(attrs={'class': 'form-control form-control-sm quantidade',
'maxlength': '20', 'onkeypress':
'return event.charCode >= 48 && event.charCode <= 57'}), required=True)
| from django import forms
class RemoveProdutoDoCarrinhoForm(forms.Form):
class Meta:
fields = ('produto_id')
produto_id = forms.CharField(widget=forms.HiddenInput())
class QuantidadeForm(forms.Form):
class Meta:
fields = ('quantidade', 'produto_id')
# <input type="hidden" name="produto_id" id="id_produto_id" value="xxx">
produto_id = forms.CharField(widget=forms.HiddenInput())
quantidade = forms.IntegerField(
min_value=1,
max_value=1000,
error_messages={'required': 'Campo obrigatório.', },
widget=forms.TextInput(attrs={'class': 'form-control form-control-sm quantidade',
'maxlength': '20',
'onkeypress': 'return event.charCode >= 48 && event.charCode <= 57'}),
required=True)
| [
2,
3,
4,
5,
6
] |
510 | d654aea3da3e36ccde8a5f4e03798a0dea5aad8a | <mask token>
class Methylation(object):
def __init__(self, table, data_type, name, called_sites):
self.table = table
self.data_type = data_type
self.name = name
self.called_sites = called_sites
<mask token>
def parse_nanopolish(filename, file_type, name, window, smoothen=5):
table = pd.read_csv(filename, sep='\t')
gr = pr.PyRanges(table.rename(columns={'start': 'Start', 'chromosome':
'Chromosome', 'end': 'End', 'Strand': 'strand'}))
logging.info('Read the file in a dataframe.')
if window:
gr = gr[window.chromosome, window.begin:window.end]
try:
gr.pos = np.floor(gr.drop().df[['Start', 'End']].mean(axis=1))
except KeyError:
sys.stderr.write('\n\n\nProblem parsing nanopolish file {}!\n'.
format(filename))
sys.stderr.write(
'Could it be that there are no calls in your selected window?\n')
sys.stderr.write('\n\n\nDetailed error:\n')
raise
table = gr.df
if file_type in ['nanopolish_call', 'nanopolish_phased']:
table = table.drop(columns=['Start', 'End', 'log_lik_methylated',
'log_lik_unmethylated', 'num_calling_strands', 'num_motifs',
'sequence'])
return Methylation(table=table.sort_values(['read_name', 'pos']),
data_type=file_type, name=name, called_sites=len(table))
if file_type == 'nanopolish_freq':
called_sites = table.called_sites
table = table.drop(columns=['Start', 'End', 'num_motifs_in_group',
'called_sites', 'called_sites_methylated', 'group_sequence'])
return Methylation(table=table.sort_values('pos').groupby('pos').
mean().rolling(window=smoothen, center=True).mean(), data_type=
file_type, name=name, called_sites=called_sites.sum())
<mask token>
def get_modified_reference_positions(read):
if read.has_tag('MM'):
basemod = read.get_tag('MM').split(',', 1)[0]
if '-' in basemod:
sys.exit(
'ERROR: modifications on negative strand currently unsupported.'
)
base, mod = basemod.split('+')
deltas = [int(i) for i in read.get_tag('MM').split(',')[1:]]
probabilities = phred_to_probability(read.get_tag('MP'))
locations = np.cumsum(deltas) + np.concatenate((np.zeros(shape=1),
np.ones(shape=len(deltas) - 1))).astype('int')
base_index = np.array([i for i, letter in enumerate(read.
get_forward_sequence()) if letter == base])
modified_bases = base_index[locations]
refpos = np.array(read.get_reference_positions(full_length=True))
if read.is_reverse:
refpos = np.flipud(refpos)
probabilities = probabilities[::-1]
return basemod, refpos[modified_bases], probabilities
else:
return None, [None], [None]
def errs_tab(n):
"""Generate list of error rates for qualities less than equal than n."""
return [(10 ** (q / -10)) for q in range(n + 1)]
<mask token>
| <mask token>
class Methylation(object):
def __init__(self, table, data_type, name, called_sites):
self.table = table
self.data_type = data_type
self.name = name
self.called_sites = called_sites
<mask token>
def parse_nanopolish(filename, file_type, name, window, smoothen=5):
table = pd.read_csv(filename, sep='\t')
gr = pr.PyRanges(table.rename(columns={'start': 'Start', 'chromosome':
'Chromosome', 'end': 'End', 'Strand': 'strand'}))
logging.info('Read the file in a dataframe.')
if window:
gr = gr[window.chromosome, window.begin:window.end]
try:
gr.pos = np.floor(gr.drop().df[['Start', 'End']].mean(axis=1))
except KeyError:
sys.stderr.write('\n\n\nProblem parsing nanopolish file {}!\n'.
format(filename))
sys.stderr.write(
'Could it be that there are no calls in your selected window?\n')
sys.stderr.write('\n\n\nDetailed error:\n')
raise
table = gr.df
if file_type in ['nanopolish_call', 'nanopolish_phased']:
table = table.drop(columns=['Start', 'End', 'log_lik_methylated',
'log_lik_unmethylated', 'num_calling_strands', 'num_motifs',
'sequence'])
return Methylation(table=table.sort_values(['read_name', 'pos']),
data_type=file_type, name=name, called_sites=len(table))
if file_type == 'nanopolish_freq':
called_sites = table.called_sites
table = table.drop(columns=['Start', 'End', 'num_motifs_in_group',
'called_sites', 'called_sites_methylated', 'group_sequence'])
return Methylation(table=table.sort_values('pos').groupby('pos').
mean().rolling(window=smoothen, center=True).mean(), data_type=
file_type, name=name, called_sites=called_sites.sum())
def parse_nanocompore(filename, name, window):
def nanocompore_columns_of_interest(column):
if column in ['pos', 'ref_id']:
return True
elif column.endswith('pvalue_context_2') or column.endswith('pvalue'):
return True
else:
return False
table = pd.read_csv(filename, sep='\t', usecols=
nanocompore_columns_of_interest)
if window:
table = table[table['ref_id'] == window.chromosome]
return Methylation(table=table.sort_values('pos').append({'pos': window
.end}, ignore_index=True).drop(columns='ref_id').fillna(1.0),
data_type='nanocompore', name=name, called_sites=len(table))
def parse_ont_cram(filename, name, window):
cram = pysam.AlignmentFile(filename, 'rc')
data = []
for read in cram.fetch(reference=window.chromosome, start=window.begin,
end=window.end):
if not read.is_supplementary and not read.is_secondary:
mod, positions, quals = get_modified_reference_positions(read)
for pos, qual in zip(positions, quals):
if pos is not None:
data.append((read.query_name, '-' if read.is_reverse else
'+', pos, qual, mod))
return Methylation(table=pd.DataFrame(data, columns=['read_name',
'strand', 'pos', 'quality', 'mod']).astype(dtype={'mod': 'category',
'quality': 'float'}).sort_values(['read_name', 'pos']), data_type=
'ont-cram', name=name, called_sites=len(data))
def get_modified_reference_positions(read):
if read.has_tag('MM'):
basemod = read.get_tag('MM').split(',', 1)[0]
if '-' in basemod:
sys.exit(
'ERROR: modifications on negative strand currently unsupported.'
)
base, mod = basemod.split('+')
deltas = [int(i) for i in read.get_tag('MM').split(',')[1:]]
probabilities = phred_to_probability(read.get_tag('MP'))
locations = np.cumsum(deltas) + np.concatenate((np.zeros(shape=1),
np.ones(shape=len(deltas) - 1))).astype('int')
base_index = np.array([i for i, letter in enumerate(read.
get_forward_sequence()) if letter == base])
modified_bases = base_index[locations]
refpos = np.array(read.get_reference_positions(full_length=True))
if read.is_reverse:
refpos = np.flipud(refpos)
probabilities = probabilities[::-1]
return basemod, refpos[modified_bases], probabilities
else:
return None, [None], [None]
def errs_tab(n):
"""Generate list of error rates for qualities less than equal than n."""
return [(10 ** (q / -10)) for q in range(n + 1)]
<mask token>
def get_data(methylation_files, names, window, smoothen=5):
"""
Import methylation data from all files in the list methylation_files
Data can be either frequency or raw.
data is extracted within the window args.window
Frequencies are smoothened using a sliding window
"""
return [read_meth(f, n, window, smoothen) for f, n in zip(
methylation_files, names)]
| <mask token>
class Methylation(object):
def __init__(self, table, data_type, name, called_sites):
self.table = table
self.data_type = data_type
self.name = name
self.called_sites = called_sites
def read_meth(filename, name, window, smoothen=5):
"""
converts a file from nanopolish to a pandas dataframe
input can be from calculate_methylation_frequency
which will return a dataframe with 'chromosome', 'pos', 'methylated_frequency'
smoothening the result by a rolling average
input can also be raw data per read, optionally phased
which will return a dataframe with 'read', 'chromosome', 'pos', 'log_lik_ratio', 'strand'
"""
file_type = file_sniffer(filename)
logging.info('File is of type {}'.format(file_type))
try:
if file_type.startswith('nanopolish'):
return parse_nanopolish(filename, file_type, name, window,
smoothen=smoothen)
elif file_type == 'nanocompore':
return parse_nanocompore(filename, name, window)
elif file_type == 'ont-cram':
return parse_ont_cram(filename, name, window)
except Exception:
sys.stderr.write('\n\n\nInput file {} not recognized!\n'.format(
filename))
sys.stderr.write('\n\n\nDetailed error:\n')
raise
def parse_nanopolish(filename, file_type, name, window, smoothen=5):
table = pd.read_csv(filename, sep='\t')
gr = pr.PyRanges(table.rename(columns={'start': 'Start', 'chromosome':
'Chromosome', 'end': 'End', 'Strand': 'strand'}))
logging.info('Read the file in a dataframe.')
if window:
gr = gr[window.chromosome, window.begin:window.end]
try:
gr.pos = np.floor(gr.drop().df[['Start', 'End']].mean(axis=1))
except KeyError:
sys.stderr.write('\n\n\nProblem parsing nanopolish file {}!\n'.
format(filename))
sys.stderr.write(
'Could it be that there are no calls in your selected window?\n')
sys.stderr.write('\n\n\nDetailed error:\n')
raise
table = gr.df
if file_type in ['nanopolish_call', 'nanopolish_phased']:
table = table.drop(columns=['Start', 'End', 'log_lik_methylated',
'log_lik_unmethylated', 'num_calling_strands', 'num_motifs',
'sequence'])
return Methylation(table=table.sort_values(['read_name', 'pos']),
data_type=file_type, name=name, called_sites=len(table))
if file_type == 'nanopolish_freq':
called_sites = table.called_sites
table = table.drop(columns=['Start', 'End', 'num_motifs_in_group',
'called_sites', 'called_sites_methylated', 'group_sequence'])
return Methylation(table=table.sort_values('pos').groupby('pos').
mean().rolling(window=smoothen, center=True).mean(), data_type=
file_type, name=name, called_sites=called_sites.sum())
def parse_nanocompore(filename, name, window):
def nanocompore_columns_of_interest(column):
if column in ['pos', 'ref_id']:
return True
elif column.endswith('pvalue_context_2') or column.endswith('pvalue'):
return True
else:
return False
table = pd.read_csv(filename, sep='\t', usecols=
nanocompore_columns_of_interest)
if window:
table = table[table['ref_id'] == window.chromosome]
return Methylation(table=table.sort_values('pos').append({'pos': window
.end}, ignore_index=True).drop(columns='ref_id').fillna(1.0),
data_type='nanocompore', name=name, called_sites=len(table))
def parse_ont_cram(filename, name, window):
cram = pysam.AlignmentFile(filename, 'rc')
data = []
for read in cram.fetch(reference=window.chromosome, start=window.begin,
end=window.end):
if not read.is_supplementary and not read.is_secondary:
mod, positions, quals = get_modified_reference_positions(read)
for pos, qual in zip(positions, quals):
if pos is not None:
data.append((read.query_name, '-' if read.is_reverse else
'+', pos, qual, mod))
return Methylation(table=pd.DataFrame(data, columns=['read_name',
'strand', 'pos', 'quality', 'mod']).astype(dtype={'mod': 'category',
'quality': 'float'}).sort_values(['read_name', 'pos']), data_type=
'ont-cram', name=name, called_sites=len(data))
def get_modified_reference_positions(read):
if read.has_tag('MM'):
basemod = read.get_tag('MM').split(',', 1)[0]
if '-' in basemod:
sys.exit(
'ERROR: modifications on negative strand currently unsupported.'
)
base, mod = basemod.split('+')
deltas = [int(i) for i in read.get_tag('MM').split(',')[1:]]
probabilities = phred_to_probability(read.get_tag('MP'))
locations = np.cumsum(deltas) + np.concatenate((np.zeros(shape=1),
np.ones(shape=len(deltas) - 1))).astype('int')
base_index = np.array([i for i, letter in enumerate(read.
get_forward_sequence()) if letter == base])
modified_bases = base_index[locations]
refpos = np.array(read.get_reference_positions(full_length=True))
if read.is_reverse:
refpos = np.flipud(refpos)
probabilities = probabilities[::-1]
return basemod, refpos[modified_bases], probabilities
else:
return None, [None], [None]
def errs_tab(n):
"""Generate list of error rates for qualities less than equal than n."""
return [(10 ** (q / -10)) for q in range(n + 1)]
def phred_to_probability(quals, tab=errs_tab(128)):
return [tab[ord(q) - 33] for q in quals]
def get_data(methylation_files, names, window, smoothen=5):
"""
Import methylation data from all files in the list methylation_files
Data can be either frequency or raw.
data is extracted within the window args.window
Frequencies are smoothened using a sliding window
"""
return [read_meth(f, n, window, smoothen) for f, n in zip(
methylation_files, names)]
| import pandas as pd
import pyranges as pr
import numpy as np
import sys
import logging
from methplotlib.utils import file_sniffer
import pysam
class Methylation(object):
def __init__(self, table, data_type, name, called_sites):
self.table = table
self.data_type = data_type
self.name = name
self.called_sites = called_sites
def read_meth(filename, name, window, smoothen=5):
"""
converts a file from nanopolish to a pandas dataframe
input can be from calculate_methylation_frequency
which will return a dataframe with 'chromosome', 'pos', 'methylated_frequency'
smoothening the result by a rolling average
input can also be raw data per read, optionally phased
which will return a dataframe with 'read', 'chromosome', 'pos', 'log_lik_ratio', 'strand'
"""
file_type = file_sniffer(filename)
logging.info('File is of type {}'.format(file_type))
try:
if file_type.startswith('nanopolish'):
return parse_nanopolish(filename, file_type, name, window,
smoothen=smoothen)
elif file_type == 'nanocompore':
return parse_nanocompore(filename, name, window)
elif file_type == 'ont-cram':
return parse_ont_cram(filename, name, window)
except Exception:
sys.stderr.write('\n\n\nInput file {} not recognized!\n'.format(
filename))
sys.stderr.write('\n\n\nDetailed error:\n')
raise
def parse_nanopolish(filename, file_type, name, window, smoothen=5):
table = pd.read_csv(filename, sep='\t')
gr = pr.PyRanges(table.rename(columns={'start': 'Start', 'chromosome':
'Chromosome', 'end': 'End', 'Strand': 'strand'}))
logging.info('Read the file in a dataframe.')
if window:
gr = gr[window.chromosome, window.begin:window.end]
try:
gr.pos = np.floor(gr.drop().df[['Start', 'End']].mean(axis=1))
except KeyError:
sys.stderr.write('\n\n\nProblem parsing nanopolish file {}!\n'.
format(filename))
sys.stderr.write(
'Could it be that there are no calls in your selected window?\n')
sys.stderr.write('\n\n\nDetailed error:\n')
raise
table = gr.df
if file_type in ['nanopolish_call', 'nanopolish_phased']:
table = table.drop(columns=['Start', 'End', 'log_lik_methylated',
'log_lik_unmethylated', 'num_calling_strands', 'num_motifs',
'sequence'])
return Methylation(table=table.sort_values(['read_name', 'pos']),
data_type=file_type, name=name, called_sites=len(table))
if file_type == 'nanopolish_freq':
called_sites = table.called_sites
table = table.drop(columns=['Start', 'End', 'num_motifs_in_group',
'called_sites', 'called_sites_methylated', 'group_sequence'])
return Methylation(table=table.sort_values('pos').groupby('pos').
mean().rolling(window=smoothen, center=True).mean(), data_type=
file_type, name=name, called_sites=called_sites.sum())
def parse_nanocompore(filename, name, window):
def nanocompore_columns_of_interest(column):
if column in ['pos', 'ref_id']:
return True
elif column.endswith('pvalue_context_2') or column.endswith('pvalue'):
return True
else:
return False
table = pd.read_csv(filename, sep='\t', usecols=
nanocompore_columns_of_interest)
if window:
table = table[table['ref_id'] == window.chromosome]
return Methylation(table=table.sort_values('pos').append({'pos': window
.end}, ignore_index=True).drop(columns='ref_id').fillna(1.0),
data_type='nanocompore', name=name, called_sites=len(table))
def parse_ont_cram(filename, name, window):
cram = pysam.AlignmentFile(filename, 'rc')
data = []
for read in cram.fetch(reference=window.chromosome, start=window.begin,
end=window.end):
if not read.is_supplementary and not read.is_secondary:
mod, positions, quals = get_modified_reference_positions(read)
for pos, qual in zip(positions, quals):
if pos is not None:
data.append((read.query_name, '-' if read.is_reverse else
'+', pos, qual, mod))
return Methylation(table=pd.DataFrame(data, columns=['read_name',
'strand', 'pos', 'quality', 'mod']).astype(dtype={'mod': 'category',
'quality': 'float'}).sort_values(['read_name', 'pos']), data_type=
'ont-cram', name=name, called_sites=len(data))
def get_modified_reference_positions(read):
if read.has_tag('MM'):
basemod = read.get_tag('MM').split(',', 1)[0]
if '-' in basemod:
sys.exit(
'ERROR: modifications on negative strand currently unsupported.'
)
base, mod = basemod.split('+')
deltas = [int(i) for i in read.get_tag('MM').split(',')[1:]]
probabilities = phred_to_probability(read.get_tag('MP'))
locations = np.cumsum(deltas) + np.concatenate((np.zeros(shape=1),
np.ones(shape=len(deltas) - 1))).astype('int')
base_index = np.array([i for i, letter in enumerate(read.
get_forward_sequence()) if letter == base])
modified_bases = base_index[locations]
refpos = np.array(read.get_reference_positions(full_length=True))
if read.is_reverse:
refpos = np.flipud(refpos)
probabilities = probabilities[::-1]
return basemod, refpos[modified_bases], probabilities
else:
return None, [None], [None]
def errs_tab(n):
"""Generate list of error rates for qualities less than equal than n."""
return [(10 ** (q / -10)) for q in range(n + 1)]
def phred_to_probability(quals, tab=errs_tab(128)):
return [tab[ord(q) - 33] for q in quals]
def get_data(methylation_files, names, window, smoothen=5):
"""
Import methylation data from all files in the list methylation_files
Data can be either frequency or raw.
data is extracted within the window args.window
Frequencies are smoothened using a sliding window
"""
return [read_meth(f, n, window, smoothen) for f, n in zip(
methylation_files, names)]
| import pandas as pd
import pyranges as pr
import numpy as np
import sys
import logging
from methplotlib.utils import file_sniffer
import pysam
class Methylation(object):
def __init__(self, table, data_type, name, called_sites):
self.table = table
self.data_type = data_type
self.name = name
self.called_sites = called_sites
def read_meth(filename, name, window, smoothen=5):
"""
converts a file from nanopolish to a pandas dataframe
input can be from calculate_methylation_frequency
which will return a dataframe with 'chromosome', 'pos', 'methylated_frequency'
smoothening the result by a rolling average
input can also be raw data per read, optionally phased
which will return a dataframe with 'read', 'chromosome', 'pos', 'log_lik_ratio', 'strand'
"""
file_type = file_sniffer(filename)
logging.info("File is of type {}".format(file_type))
try:
if file_type.startswith("nanopolish"):
return parse_nanopolish(filename, file_type, name, window, smoothen=smoothen)
elif file_type == "nanocompore":
return parse_nanocompore(filename, name, window)
elif file_type == "ont-cram":
return parse_ont_cram(filename, name, window)
except Exception:
sys.stderr.write("\n\n\nInput file {} not recognized!\n".format(filename))
sys.stderr.write("\n\n\nDetailed error:\n")
raise
def parse_nanopolish(filename, file_type, name, window, smoothen=5):
table = pd.read_csv(filename, sep="\t")
gr = pr.PyRanges(table.rename(columns={"start": "Start", "chromosome": "Chromosome",
"end": "End", "Strand": "strand"}))
logging.info("Read the file in a dataframe.")
if window:
gr = gr[window.chromosome, window.begin:window.end]
try:
gr.pos = np.floor(gr.drop().df[["Start", "End"]].mean(axis=1))
except KeyError:
sys.stderr.write("\n\n\nProblem parsing nanopolish file {}!\n".format(filename))
sys.stderr.write("Could it be that there are no calls in your selected window?\n")
sys.stderr.write("\n\n\nDetailed error:\n")
raise
table = gr.df
if file_type in ['nanopolish_call', 'nanopolish_phased']:
table = table.drop(columns=['Start', 'End', 'log_lik_methylated',
'log_lik_unmethylated', 'num_calling_strands',
'num_motifs', 'sequence'])
return Methylation(
table=table.sort_values(['read_name', 'pos']),
data_type=file_type,
name=name,
called_sites=len(table))
if file_type == "nanopolish_freq":
called_sites = table.called_sites
table = table.drop(columns=['Start', 'End', 'num_motifs_in_group',
'called_sites', 'called_sites_methylated',
'group_sequence'])
return Methylation(
table=table.sort_values('pos')
.groupby('pos')
.mean()
.rolling(window=smoothen, center=True)
.mean(),
data_type=file_type,
name=name,
called_sites=called_sites.sum())
def parse_nanocompore(filename, name, window):
def nanocompore_columns_of_interest(column):
if column in ['pos', 'ref_id']:
return True
elif column.endswith('pvalue_context_2') or column.endswith('pvalue'):
return True
else:
return False
table = pd.read_csv(filename, sep="\t", usecols=nanocompore_columns_of_interest)
if window:
table = table[table["ref_id"] == window.chromosome]
return Methylation(
table=table.sort_values('pos')
.append({'pos': window.end}, ignore_index=True)
.drop(columns="ref_id")
.fillna(1.0),
data_type='nanocompore',
name=name,
called_sites=len(table))
def parse_ont_cram(filename, name, window):
cram = pysam.AlignmentFile(filename, "rc")
data = []
for read in cram.fetch(reference=window.chromosome, start=window.begin, end=window.end):
if not read.is_supplementary and not read.is_secondary:
mod, positions, quals = get_modified_reference_positions(read)
for pos, qual in zip(positions, quals):
if pos is not None:
data.append((read.query_name,
'-' if read.is_reverse else '+',
pos,
qual,
mod))
return Methylation(
table=pd.DataFrame(data, columns=['read_name', 'strand', 'pos', 'quality', 'mod'])
.astype(dtype={'mod': 'category', 'quality': 'float'})
.sort_values(['read_name', 'pos']),
data_type="ont-cram",
name=name,
called_sites=len(data))
def get_modified_reference_positions(read):
if read.has_tag('MM'):
basemod = read.get_tag('MM').split(',', 1)[0]
if '-' in basemod:
sys.exit("ERROR: modifications on negative strand currently unsupported.")
base, mod = basemod.split('+')
deltas = [int(i) for i in read.get_tag('MM').split(',')[1:]]
probabilities = phred_to_probability(read.get_tag('MP'))
locations = np.cumsum(deltas) + np.concatenate(
(np.zeros(shape=1),
np.ones(shape=len(deltas) - 1))).astype('int')
base_index = np.array(
[i for i, letter in enumerate(read.get_forward_sequence()) if letter == base]
)
modified_bases = base_index[locations]
refpos = np.array(read.get_reference_positions(full_length=True))
if read.is_reverse:
refpos = np.flipud(refpos)
probabilities = probabilities[::-1]
return (basemod, refpos[modified_bases], probabilities)
else:
return (None, [None], [None])
def errs_tab(n):
"""Generate list of error rates for qualities less than equal than n."""
return [10**(q / -10) for q in range(n + 1)]
def phred_to_probability(quals, tab=errs_tab(128)):
return [tab[ord(q) - 33] for q in quals]
def get_data(methylation_files, names, window, smoothen=5):
"""
Import methylation data from all files in the list methylation_files
Data can be either frequency or raw.
data is extracted within the window args.window
Frequencies are smoothened using a sliding window
"""
return [read_meth(f, n, window, smoothen) for f, n in zip(methylation_files, names)]
| [
5,
8,
10,
11,
12
] |
511 | 6ee36994f63d64e35c4e76f65e9c4f09797a161e | <mask token>
class BinarySearchTree:
def __init__(self):
self.root = None
def create(self, val):
if self.root == None:
self.root = Node(val)
else:
current = self.root
while True:
if val < current.info:
if current.left:
current = current.left
else:
current.left = Node(val)
break
elif val > current.info:
if current.right:
current = current.right
else:
current.right = Node(val)
break
else:
break
<mask token>
| class Node:
<mask token>
<mask token>
class BinarySearchTree:
def __init__(self):
self.root = None
def create(self, val):
if self.root == None:
self.root = Node(val)
else:
current = self.root
while True:
if val < current.info:
if current.left:
current = current.left
else:
current.left = Node(val)
break
elif val > current.info:
if current.right:
current = current.right
else:
current.right = Node(val)
break
else:
break
<mask token>
| class Node:
<mask token>
def __str__(self):
return str(self.info)
class BinarySearchTree:
def __init__(self):
self.root = None
def create(self, val):
if self.root == None:
self.root = Node(val)
else:
current = self.root
while True:
if val < current.info:
if current.left:
current = current.left
else:
current.left = Node(val)
break
elif val > current.info:
if current.right:
current = current.right
else:
current.right = Node(val)
break
else:
break
<mask token>
| class Node:
def __init__(self, info):
self.info = info
self.left = None
self.right = None
self.level = None
def __str__(self):
return str(self.info)
class BinarySearchTree:
def __init__(self):
self.root = None
def create(self, val):
if self.root == None:
self.root = Node(val)
else:
current = self.root
while True:
if val < current.info:
if current.left:
current = current.left
else:
current.left = Node(val)
break
elif val > current.info:
if current.right:
current = current.right
else:
current.right = Node(val)
break
else:
break
<mask token>
def lca(root, v1, v2):
def find_path(r, p, n):
if not r:
return False
p.append(r)
if r.info == n:
return True
if r.left and find_path(r.left, p, n) or r.right and find_path(r.
right, p, n):
return True
p.pop()
return False
p1 = []
p2 = []
if not find_path(root, p1, v1) or not find_path(root, p2, v2):
return -1
i = 0
while i < len(p1) and i < len(p2):
if p1[i].info != p2[i].info:
break
i += 1
return p1[i - 1]
tree = BinarySearchTree()
| class Node:
def __init__(self, info):
self.info = info
self.left = None
self.right = None
self.level = None
def __str__(self):
return str(self.info)
class BinarySearchTree:
def __init__(self):
self.root = None
def create(self, val):
if self.root == None:
self.root = Node(val)
else:
current = self.root
while True:
if val < current.info:
if current.left:
current = current.left
else:
current.left = Node(val)
break
elif val > current.info:
if current.right:
current = current.right
else:
current.right = Node(val)
break
else:
break
# Enter your code here. Read input from STDIN. Print output to STDOUT
'''
class Node:
def __init__(self,info):
self.info = info
self.left = None
self.right = None
// this is a node of the tree , which contains info as data, left , right
Approach:
1. Find a path from the root to n1 and store it in a vector or array.
2. Find a path from the root to n2 and store it in another vector or array.
3. Traverse both paths till the values in arrays are the same. Return the common element just before the mismatch.
'''
# Time complexity: O(n)
def lca(root, v1, v2):
def find_path(r, p, n):
if not r: return False
# Append the root
p.append(r)
if r.info == n:
return True
# Check if k is found in left or right sub-tree
if (r.left and find_path(r.left, p, n)) or (r.right and find_path(r.right, p, n)):
return True
# If not present in subtree rooted with root, remove
# root from path and return False
p.pop()
return False
p1 = []
p2 = []
if not find_path(root, p1, v1) or not find_path(root, p2, v2):
return -1
i = 0
while i < len(p1) and i < len(p2):
if p1[i].info != p2[i].info:
break
i += 1
return p1[i-1]
tree = BinarySearchTree()
| [
3,
4,
5,
8,
9
] |
512 | 207b6e56b683c0b069c531a4c6076c2822814390 | <mask token>
| <mask token>
file.write('Yo')
| file = open('yo.txt', 'wr')
file.write('Yo')
| file = open("yo.txt", "wr")
file.write("Yo")
| null | [
0,
1,
2,
3
] |
513 | e6af221f1d6397d0fc52671cdd27d43549d0aecb | <mask token>
| <mask token>
def test_xyz123():
cone_x = bc.primitives.Cone(1.0, 1.0)
| __author__ = 'jjpr'
<mask token>
def test_xyz123():
cone_x = bc.primitives.Cone(1.0, 1.0)
| __author__ = 'jjpr'
import pyrr
import barleycorn as bc
def test_xyz123():
cone_x = bc.primitives.Cone(1.0, 1.0)
| null | [
0,
1,
2,
3
] |
514 | ce75c23c6b0862dde797225f53c900b4ebc56428 | <mask token>
| <mask token>
def usuario():
global usser
usser = input('Introduce un usuario : ')
if len(usser) < 5 or len(usser) > 15:
print('El usuario debe tener entre 5 y 15 caracteres')
usuario()
elif usser.isalnum() == False:
print('Los valores del usurio deben ser únicamente letras o números')
usuario()
else:
print(True)
<mask token>
| <mask token>
def usuario():
global usser
usser = input('Introduce un usuario : ')
if len(usser) < 5 or len(usser) > 15:
print('El usuario debe tener entre 5 y 15 caracteres')
usuario()
elif usser.isalnum() == False:
print('Los valores del usurio deben ser únicamente letras o números')
usuario()
else:
print(True)
def contraseña():
global passw
passw = input('Introduce contraseña: ')
if len(passw) <= 9:
print('La contraseña debe tener al menos 10 caractéres')
contraseña()
elif passw.isalnum() == True:
print('La contraseña debe tener al menos un carácter no alfanumérico')
contraseña()
elif passw.lower() == passw:
print('Debe haber por lo menos una mayúscula')
contraseña()
elif passw.upper() == passw:
print('Debe haber por lo menos una minúscula')
contraseña()
for i in passw:
if i == ' ':
print('La contraseña no debe tener espacios en blanco')
contraseña()
print(True)
| from bbdd import *
def usuario():
global usser
usser = input('Introduce un usuario : ')
if len(usser) < 5 or len(usser) > 15:
print('El usuario debe tener entre 5 y 15 caracteres')
usuario()
elif usser.isalnum() == False:
print('Los valores del usurio deben ser únicamente letras o números')
usuario()
else:
print(True)
def contraseña():
global passw
passw = input('Introduce contraseña: ')
if len(passw) <= 9:
print('La contraseña debe tener al menos 10 caractéres')
contraseña()
elif passw.isalnum() == True:
print('La contraseña debe tener al menos un carácter no alfanumérico')
contraseña()
elif passw.lower() == passw:
print('Debe haber por lo menos una mayúscula')
contraseña()
elif passw.upper() == passw:
print('Debe haber por lo menos una minúscula')
contraseña()
for i in passw:
if i == ' ':
print('La contraseña no debe tener espacios en blanco')
contraseña()
print(True)
| from bbdd import *
def usuario():
global usser
usser=input("Introduce un usuario : ")
if len(usser)<5 or len(usser)>15:
print("El usuario debe tener entre 5 y 15 caracteres")
usuario()
elif usser.isalnum()==False:
print("Los valores del usurio deben ser únicamente letras o números")
usuario()
else:
print(True)
def contraseña():
global passw
passw=input("Introduce contraseña: ")
if len(passw)<=9:
print("La contraseña debe tener al menos 10 caractéres")
contraseña()
elif passw.isalnum()==True:
print ("La contraseña debe tener al menos un carácter no alfanumérico")
contraseña()
elif passw.lower() == passw:
print("Debe haber por lo menos una mayúscula")
contraseña()
elif passw.upper()==passw:
print("Debe haber por lo menos una minúscula")
contraseña()
for i in passw:
if i==" ":
print("La contraseña no debe tener espacios en blanco")
contraseña()
print(True)
| [
0,
1,
2,
3,
4
] |
515 | 345967e2aeafda6ce30cbbbbacf976c97b17def7 | <mask token>
| myDict = {'Friends': ['AP', 'Soham', 'Baba'], 'Likes': ['Math',
'Programming'], 'languages': ['C++', 'Python', 'Java']}
myInt = 123
myFloat = 12.3333
myName = 'Somesh Thakur'
| null | null | null | [
0,
1
] |
516 | f70f66926b9e2bf8b387d481263493d7f4c65397 | <mask token>
class ventaDetalle:
<mask token>
| <mask token>
class ventaDetalle:
def __init__(self, pro, pre, cant):
self.producto = pro
self.precio = pre
self.cantidad = cant
| <mask token>
class Articulo:
<mask token>
class ventaDetalle:
def __init__(self, pro, pre, cant):
self.producto = pro
self.precio = pre
self.cantidad = cant
| <mask token>
class Articulo:
def __init__(self, cod, des, pre, stoc):
self.codigo = cod
self.descripcion = des
self.precio = pre
self.stock = stoc
class ventaDetalle:
def __init__(self, pro, pre, cant):
self.producto = pro
self.precio = pre
self.cantidad = cant
| """"
articulo
cliente
venta
ventadet
"""
class Articulo:
def __init__(self,cod,des,pre,stoc):
self.codigo=cod
self.descripcion = des
self.precio=pre
self.stock=stoc
class ventaDetalle:
def __init__(self,pro,pre,cant):
self.producto=pro
self.precio=pre
self.cantidad=cant
| [
1,
2,
3,
4,
5
] |
517 | b8fcd8e6dce8d210576bc4166dd258e5fd51278d | <mask token>
class _PartitionedResults(BaseResults):
<mask token>
def mask(self, indices):
self.theta.mask[indices] = True
self.skeletons.mask[indices] = True
self.scores.mask[indices] = True
self.partitions.mask[indices] = True
<mask token>
<mask token>
<mask token>
class _ResolvedResults(BaseResults):
def __init__(self, partitioned_results: _PartitionedResults):
self._partitioned_results = partitioned_results
theta = _init_unified_series(partitioned_results.theta)
skeletons = _init_unified_series(partitioned_results.skeletons)
scores = _init_unified_series(partitioned_results.scores)
super().__init__(theta=theta, skeletons=skeletons, scores=scores)
def resolve(self, segment, segment_alignment):
self.scores[segment] = self._partitioned_results.scores[segment][:,
segment_alignment]
self.skeletons[segment] = self._partitioned_results.skeletons[segment][
:, segment_alignment]
self.theta[segment] = self._partitioned_results.theta[segment][:,
segment_alignment]
def mask(self, indices):
self.theta.mask[indices] = True
self.skeletons.mask[indices] = True
self.scores.mask[indices] = True
def num_valid(self):
return np.sum(~self.scores.mask)
class _FinalResults(BaseResults):
@classmethod
def from_resolved(cls, resolved_results: _ResolvedResults):
return _FinalResults(theta=resolved_results.theta.filled(np.nan),
skeletons=resolved_results.skeletons.filled(np.nan), scores=
resolved_results.scores.filled(np.nan))
@classmethod
def from_shuffled(cls, shuffled_results: ShuffledResults):
return _FinalResults(theta=np.full_like(shuffled_results.theta[:, 0
], np.nan), skeletons=np.full_like(shuffled_results.scores[:, 0
], np.nan), scores=np.full_like(shuffled_results.skeletons[:, 0
], np.nan))
<mask token>
| <mask token>
def _init_partitioned_series(shuffled_series: np.ndarray):
return ma.masked_all_like(shuffled_series)
<mask token>
class _PartitionedResults(BaseResults):
def __init__(self, shuffled_results: ShuffledResults):
self.cur_partition = -1
self.partitions = ma.masked_all((len(shuffled_results),), dtype=int)
self._shuffled_results = shuffled_results
theta = _init_partitioned_series(shuffled_results.theta)
skeletons = _init_partitioned_series(shuffled_results.skeletons)
scores = _init_partitioned_series(shuffled_results.scores)
super().__init__(theta=theta, skeletons=skeletons, scores=scores)
def mask(self, indices):
self.theta.mask[indices] = True
self.skeletons.mask[indices] = True
self.scores.mask[indices] = True
self.partitions.mask[indices] = True
def set_partition(self, frame_index: int, partition: int, new_partition:
bool=False):
if new_partition:
self.cur_partition += 1
_set_partition(self.theta, self._shuffled_results.theta,
frame_index, partition)
_set_partition(self.skeletons, self._shuffled_results.skeletons,
frame_index, partition)
_set_partition(self.scores, self._shuffled_results.scores,
frame_index, partition)
self.partitions[frame_index] = self.cur_partition
def _get_partition_indices(self, partition_index: int):
return np.where(self.partitions == partition_index)[0]
def get_segments(self):
all_partitions_indexes = np.unique(self.partitions.filled(-1))
return [self._get_partition_indices(partition_index) for
partition_index in all_partitions_indexes if partition_index >= 0]
class _ResolvedResults(BaseResults):
def __init__(self, partitioned_results: _PartitionedResults):
self._partitioned_results = partitioned_results
theta = _init_unified_series(partitioned_results.theta)
skeletons = _init_unified_series(partitioned_results.skeletons)
scores = _init_unified_series(partitioned_results.scores)
super().__init__(theta=theta, skeletons=skeletons, scores=scores)
def resolve(self, segment, segment_alignment):
self.scores[segment] = self._partitioned_results.scores[segment][:,
segment_alignment]
self.skeletons[segment] = self._partitioned_results.skeletons[segment][
:, segment_alignment]
self.theta[segment] = self._partitioned_results.theta[segment][:,
segment_alignment]
def mask(self, indices):
self.theta.mask[indices] = True
self.skeletons.mask[indices] = True
self.scores.mask[indices] = True
def num_valid(self):
return np.sum(~self.scores.mask)
class _FinalResults(BaseResults):
@classmethod
def from_resolved(cls, resolved_results: _ResolvedResults):
return _FinalResults(theta=resolved_results.theta.filled(np.nan),
skeletons=resolved_results.skeletons.filled(np.nan), scores=
resolved_results.scores.filled(np.nan))
@classmethod
def from_shuffled(cls, shuffled_results: ShuffledResults):
return _FinalResults(theta=np.full_like(shuffled_results.theta[:, 0
], np.nan), skeletons=np.full_like(shuffled_results.scores[:, 0
], np.nan), scores=np.full_like(shuffled_results.skeletons[:, 0
], np.nan))
<mask token>
def _calculate_smallest_gap_to_adjacent(segment_index, segments,
segments_alignment):
score = np.nan
segment_offset = np.nan
if segment_index - 1 >= 0 and not segments_alignment.mask[segment_index - 1
]:
gap = segments[segment_index][0] - segments[segment_index - 1][-1]
score = gap
segment_offset = -1
if segment_index + 1 < len(segments_alignment
) and not segments_alignment.mask[segment_index + 1]:
gap = segments[segment_index + 1][0] - segments[segment_index][-1]
if np.isnan(score) or gap < score:
score = gap
segment_offset = 1
return score, segment_offset
def _align_unlabelled_segments_with_adjacents(segments, segments_alignment,
partitioned_skeletons, frame_rate: float):
"""
Resolve the unaligned segments by comparing with adjacent segments,
starting with the segments that have the least frames gap between an adjacent trusted segment
Don't align isolated segments which a big gap between trusted segments
"""
maximum_gap_allowed = max(1, int(frame_rate *
MAXIMUM_GAP_ALLOWED_WITH_ADJACENT_SEGMENT_SEC))
if np.all(segments_alignment.mask):
logger.info(
'There are no trusted segments with head decision to resolve the whole video, stopping analysis.'
)
return segments_alignment
unaligned = np.where(segments_alignment.mask)[0]
while len(unaligned) > 0:
all_gaps = [_calculate_smallest_gap_to_adjacent(segment_index=x,
segments=segments, segments_alignment=segments_alignment) for x in
unaligned]
segment_to_fix_index = np.nanargmin(all_gaps, axis=0)[0]
gap_to_adjacent_segment, adjacent_segment_offset = all_gaps[
segment_to_fix_index]
if gap_to_adjacent_segment > maximum_gap_allowed:
break
cur_segment_index = unaligned[segment_to_fix_index]
cur_segment_skeleton = partitioned_skeletons[segments[
cur_segment_index]]
adjacent_segment_index = cur_segment_index + adjacent_segment_offset
adjacent_alignment = segments_alignment[adjacent_segment_index]
adjacent_segment = segments[adjacent_segment_index]
adjacent_segment_skeleton = partitioned_skeletons[adjacent_segment][
:, adjacent_alignment]
if adjacent_segment_offset == -1:
closest_unaligned_skeleton = cur_segment_skeleton[0]
closest_known_skeleton = adjacent_segment_skeleton[-1]
elif adjacent_segment_offset == 1:
closest_unaligned_skeleton = cur_segment_skeleton[-1]
closest_known_skeleton = adjacent_segment_skeleton[0]
else:
raise ValueError()
dists = [skeleton_distance(closest_known_skeleton, skel) for skel in
closest_unaligned_skeleton]
segments_alignment[cur_segment_index] = int(np.argmax(dists))
unaligned = np.where(segments_alignment.mask)[0]
return segments_alignment
def _init_unified_series(mixed_series):
return ma.masked_all((mixed_series.shape[0],) + mixed_series.shape[2:],
dtype=mixed_series.dtype)
def resolve_head_tail(shuffled_results: ShuffledResults, original_results:
OriginalResults, frame_rate: float, score_threshold) ->BaseResults:
len_series = len(shuffled_results)
partitioned_results = _make_continuous_partitions(score_threshold=
score_threshold, frame_rate=frame_rate, shuffled_results=
shuffled_results)
segments = partitioned_results.get_segments()
if len(segments) == 0:
logger.error(
f"Couldn't find any continuous segments of predicted data above the threshold {score_threshold}, stopping analysis."
)
return _FinalResults.from_shuffled(shuffled_results)
segments_alignment = _align_segments_with_labels(segments,
partitioned_results.skeletons, original_results.skeletons)
segments_alignment = _align_unlabelled_segments_with_adjacents(segments,
segments_alignment, partitioned_results.skeletons, frame_rate)
resolved_results = _ResolvedResults(partitioned_results)
for segment, segment_alignment in zip(segments, segments_alignment):
if not ma.is_masked(segment_alignment):
resolved_results.resolve(segment, segment_alignment)
low_scores_indices = np.where(ma.masked_less(resolved_results.scores,
score_threshold).mask)[0]
resolved_results.mask(low_scores_indices)
num_success = resolved_results.num_valid()
original_num_success = np.any(~np.isnan(original_results.skeletons),
axis=(1, 2)).sum()
logger.info(
f'Resolved head/tail, {num_success} out of {len_series} frames analyzed successfully ({float(num_success) / len_series * 100:.1f}%) (original features : {original_num_success} or {float(original_num_success) / len_series * 100:.1f}% of total)'
)
if num_success < original_num_success:
logger.warning(
f'Original results had {original_num_success - num_success} more successfully analyzed frames!'
)
return _FinalResults.from_resolved(resolved_results)
| <mask token>
def _init_partitioned_series(shuffled_series: np.ndarray):
return ma.masked_all_like(shuffled_series)
<mask token>
class _PartitionedResults(BaseResults):
def __init__(self, shuffled_results: ShuffledResults):
self.cur_partition = -1
self.partitions = ma.masked_all((len(shuffled_results),), dtype=int)
self._shuffled_results = shuffled_results
theta = _init_partitioned_series(shuffled_results.theta)
skeletons = _init_partitioned_series(shuffled_results.skeletons)
scores = _init_partitioned_series(shuffled_results.scores)
super().__init__(theta=theta, skeletons=skeletons, scores=scores)
def mask(self, indices):
self.theta.mask[indices] = True
self.skeletons.mask[indices] = True
self.scores.mask[indices] = True
self.partitions.mask[indices] = True
def set_partition(self, frame_index: int, partition: int, new_partition:
bool=False):
if new_partition:
self.cur_partition += 1
_set_partition(self.theta, self._shuffled_results.theta,
frame_index, partition)
_set_partition(self.skeletons, self._shuffled_results.skeletons,
frame_index, partition)
_set_partition(self.scores, self._shuffled_results.scores,
frame_index, partition)
self.partitions[frame_index] = self.cur_partition
def _get_partition_indices(self, partition_index: int):
return np.where(self.partitions == partition_index)[0]
def get_segments(self):
all_partitions_indexes = np.unique(self.partitions.filled(-1))
return [self._get_partition_indices(partition_index) for
partition_index in all_partitions_indexes if partition_index >= 0]
class _ResolvedResults(BaseResults):
def __init__(self, partitioned_results: _PartitionedResults):
self._partitioned_results = partitioned_results
theta = _init_unified_series(partitioned_results.theta)
skeletons = _init_unified_series(partitioned_results.skeletons)
scores = _init_unified_series(partitioned_results.scores)
super().__init__(theta=theta, skeletons=skeletons, scores=scores)
def resolve(self, segment, segment_alignment):
self.scores[segment] = self._partitioned_results.scores[segment][:,
segment_alignment]
self.skeletons[segment] = self._partitioned_results.skeletons[segment][
:, segment_alignment]
self.theta[segment] = self._partitioned_results.theta[segment][:,
segment_alignment]
def mask(self, indices):
self.theta.mask[indices] = True
self.skeletons.mask[indices] = True
self.scores.mask[indices] = True
def num_valid(self):
return np.sum(~self.scores.mask)
class _FinalResults(BaseResults):
@classmethod
def from_resolved(cls, resolved_results: _ResolvedResults):
return _FinalResults(theta=resolved_results.theta.filled(np.nan),
skeletons=resolved_results.skeletons.filled(np.nan), scores=
resolved_results.scores.filled(np.nan))
@classmethod
def from_shuffled(cls, shuffled_results: ShuffledResults):
return _FinalResults(theta=np.full_like(shuffled_results.theta[:, 0
], np.nan), skeletons=np.full_like(shuffled_results.scores[:, 0
], np.nan), scores=np.full_like(shuffled_results.skeletons[:, 0
], np.nan))
<mask token>
def _align_segments_with_labels(segments, partitioned_skeletons,
labelled_skeletons, min_labelled=5):
"""
Match the head/tail alignment with the results of the classical tracking in each of the segments,
if there is enough labelled data in the segment
"""
segments_alignment = ma.masked_all((len(segments),), dtype=np.uint8)
for segment_index, segment in enumerate(segments):
segment_skeletons = labelled_skeletons[segment]
non_nan_labelled = np.any(~np.isnan(segment_skeletons), axis=(1, 2))
labels_count = np.sum(non_nan_labelled)
non_masked = ~np.any(partitioned_skeletons[segment].mask, axis=(1,
2, 3))
to_compare = np.logical_and(non_nan_labelled, non_masked)
similarity_scores = []
for label_skel, partitioned_skeleton in zip(segment_skeletons[
to_compare], partitioned_skeletons[segment][to_compare]):
dists = [skeleton_distance(label_skel, x) for x in
partitioned_skeleton]
similarity_scores.append(dists)
if len(similarity_scores) > 0:
mean_similarity_scores = np.mean(similarity_scores, axis=0)
if mean_similarity_scores[0] * mean_similarity_scores[1
] < 0 and labels_count > min_labelled:
segments_alignment[segment_index] = np.argmax(
mean_similarity_scores)
return segments_alignment
def _calculate_smallest_gap_to_adjacent(segment_index, segments,
segments_alignment):
score = np.nan
segment_offset = np.nan
if segment_index - 1 >= 0 and not segments_alignment.mask[segment_index - 1
]:
gap = segments[segment_index][0] - segments[segment_index - 1][-1]
score = gap
segment_offset = -1
if segment_index + 1 < len(segments_alignment
) and not segments_alignment.mask[segment_index + 1]:
gap = segments[segment_index + 1][0] - segments[segment_index][-1]
if np.isnan(score) or gap < score:
score = gap
segment_offset = 1
return score, segment_offset
def _align_unlabelled_segments_with_adjacents(segments, segments_alignment,
partitioned_skeletons, frame_rate: float):
"""
Resolve the unaligned segments by comparing with adjacent segments,
starting with the segments that have the least frames gap between an adjacent trusted segment
Don't align isolated segments which a big gap between trusted segments
"""
maximum_gap_allowed = max(1, int(frame_rate *
MAXIMUM_GAP_ALLOWED_WITH_ADJACENT_SEGMENT_SEC))
if np.all(segments_alignment.mask):
logger.info(
'There are no trusted segments with head decision to resolve the whole video, stopping analysis.'
)
return segments_alignment
unaligned = np.where(segments_alignment.mask)[0]
while len(unaligned) > 0:
all_gaps = [_calculate_smallest_gap_to_adjacent(segment_index=x,
segments=segments, segments_alignment=segments_alignment) for x in
unaligned]
segment_to_fix_index = np.nanargmin(all_gaps, axis=0)[0]
gap_to_adjacent_segment, adjacent_segment_offset = all_gaps[
segment_to_fix_index]
if gap_to_adjacent_segment > maximum_gap_allowed:
break
cur_segment_index = unaligned[segment_to_fix_index]
cur_segment_skeleton = partitioned_skeletons[segments[
cur_segment_index]]
adjacent_segment_index = cur_segment_index + adjacent_segment_offset
adjacent_alignment = segments_alignment[adjacent_segment_index]
adjacent_segment = segments[adjacent_segment_index]
adjacent_segment_skeleton = partitioned_skeletons[adjacent_segment][
:, adjacent_alignment]
if adjacent_segment_offset == -1:
closest_unaligned_skeleton = cur_segment_skeleton[0]
closest_known_skeleton = adjacent_segment_skeleton[-1]
elif adjacent_segment_offset == 1:
closest_unaligned_skeleton = cur_segment_skeleton[-1]
closest_known_skeleton = adjacent_segment_skeleton[0]
else:
raise ValueError()
dists = [skeleton_distance(closest_known_skeleton, skel) for skel in
closest_unaligned_skeleton]
segments_alignment[cur_segment_index] = int(np.argmax(dists))
unaligned = np.where(segments_alignment.mask)[0]
return segments_alignment
def _init_unified_series(mixed_series):
return ma.masked_all((mixed_series.shape[0],) + mixed_series.shape[2:],
dtype=mixed_series.dtype)
def resolve_head_tail(shuffled_results: ShuffledResults, original_results:
OriginalResults, frame_rate: float, score_threshold) ->BaseResults:
len_series = len(shuffled_results)
partitioned_results = _make_continuous_partitions(score_threshold=
score_threshold, frame_rate=frame_rate, shuffled_results=
shuffled_results)
segments = partitioned_results.get_segments()
if len(segments) == 0:
logger.error(
f"Couldn't find any continuous segments of predicted data above the threshold {score_threshold}, stopping analysis."
)
return _FinalResults.from_shuffled(shuffled_results)
segments_alignment = _align_segments_with_labels(segments,
partitioned_results.skeletons, original_results.skeletons)
segments_alignment = _align_unlabelled_segments_with_adjacents(segments,
segments_alignment, partitioned_results.skeletons, frame_rate)
resolved_results = _ResolvedResults(partitioned_results)
for segment, segment_alignment in zip(segments, segments_alignment):
if not ma.is_masked(segment_alignment):
resolved_results.resolve(segment, segment_alignment)
low_scores_indices = np.where(ma.masked_less(resolved_results.scores,
score_threshold).mask)[0]
resolved_results.mask(low_scores_indices)
num_success = resolved_results.num_valid()
original_num_success = np.any(~np.isnan(original_results.skeletons),
axis=(1, 2)).sum()
logger.info(
f'Resolved head/tail, {num_success} out of {len_series} frames analyzed successfully ({float(num_success) / len_series * 100:.1f}%) (original features : {original_num_success} or {float(original_num_success) / len_series * 100:.1f}% of total)'
)
if num_success < original_num_success:
logger.warning(
f'Original results had {original_num_success - num_success} more successfully analyzed frames!'
)
return _FinalResults.from_resolved(resolved_results)
| <mask token>
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
CONTINUOUS_ANGLES_DIST_THRESHOLD = np.deg2rad(30)
CONTINOUS_SEGMENT_TIME_WINDOW_SEC = 0.2
MIN_SEGMENT_SIZE_SEC = 0.2
MAXIMUM_GAP_ALLOWED_WITH_ADJACENT_SEGMENT_SEC = 1
def _init_partitioned_series(shuffled_series: np.ndarray):
return ma.masked_all_like(shuffled_series)
def _set_partition(partitioned_series, shuffled_series, frame_index: int,
partition: int):
partitioned_series[frame_index][0] = shuffled_series[frame_index, partition
]
partitioned_series[frame_index][1] = shuffled_series[frame_index, 1 -
partition]
class _PartitionedResults(BaseResults):
def __init__(self, shuffled_results: ShuffledResults):
self.cur_partition = -1
self.partitions = ma.masked_all((len(shuffled_results),), dtype=int)
self._shuffled_results = shuffled_results
theta = _init_partitioned_series(shuffled_results.theta)
skeletons = _init_partitioned_series(shuffled_results.skeletons)
scores = _init_partitioned_series(shuffled_results.scores)
super().__init__(theta=theta, skeletons=skeletons, scores=scores)
def mask(self, indices):
self.theta.mask[indices] = True
self.skeletons.mask[indices] = True
self.scores.mask[indices] = True
self.partitions.mask[indices] = True
def set_partition(self, frame_index: int, partition: int, new_partition:
bool=False):
if new_partition:
self.cur_partition += 1
_set_partition(self.theta, self._shuffled_results.theta,
frame_index, partition)
_set_partition(self.skeletons, self._shuffled_results.skeletons,
frame_index, partition)
_set_partition(self.scores, self._shuffled_results.scores,
frame_index, partition)
self.partitions[frame_index] = self.cur_partition
def _get_partition_indices(self, partition_index: int):
return np.where(self.partitions == partition_index)[0]
def get_segments(self):
all_partitions_indexes = np.unique(self.partitions.filled(-1))
return [self._get_partition_indices(partition_index) for
partition_index in all_partitions_indexes if partition_index >= 0]
class _ResolvedResults(BaseResults):
def __init__(self, partitioned_results: _PartitionedResults):
self._partitioned_results = partitioned_results
theta = _init_unified_series(partitioned_results.theta)
skeletons = _init_unified_series(partitioned_results.skeletons)
scores = _init_unified_series(partitioned_results.scores)
super().__init__(theta=theta, skeletons=skeletons, scores=scores)
def resolve(self, segment, segment_alignment):
self.scores[segment] = self._partitioned_results.scores[segment][:,
segment_alignment]
self.skeletons[segment] = self._partitioned_results.skeletons[segment][
:, segment_alignment]
self.theta[segment] = self._partitioned_results.theta[segment][:,
segment_alignment]
def mask(self, indices):
self.theta.mask[indices] = True
self.skeletons.mask[indices] = True
self.scores.mask[indices] = True
def num_valid(self):
return np.sum(~self.scores.mask)
class _FinalResults(BaseResults):
@classmethod
def from_resolved(cls, resolved_results: _ResolvedResults):
return _FinalResults(theta=resolved_results.theta.filled(np.nan),
skeletons=resolved_results.skeletons.filled(np.nan), scores=
resolved_results.scores.filled(np.nan))
@classmethod
def from_shuffled(cls, shuffled_results: ShuffledResults):
return _FinalResults(theta=np.full_like(shuffled_results.theta[:, 0
], np.nan), skeletons=np.full_like(shuffled_results.scores[:, 0
], np.nan), scores=np.full_like(shuffled_results.skeletons[:, 0
], np.nan))
def _make_continuous_partitions(shuffled_results: ShuffledResults,
score_threshold: float, frame_rate: float) ->_PartitionedResults:
time_window = max(1, int(frame_rate * CONTINOUS_SEGMENT_TIME_WINDOW_SEC))
min_segment_size = max(1, int(frame_rate * MIN_SEGMENT_SIZE_SEC))
partitioned_results = _PartitionedResults(shuffled_results)
good_score_frames = np.where(ma.greater_equal(ma.max(shuffled_results.
scores, axis=1), score_threshold))[0]
for frame_index in good_score_frames:
prev_theta = partitioned_results.theta[frame_index - min(
time_window, frame_index):frame_index, 0]
if np.all(np.any(prev_theta.mask, axis=1)):
partitioned_results.set_partition(frame_index=frame_index,
partition=0, new_partition=True)
else:
last_valid_index = np.where(~np.any(prev_theta.mask, axis=1))[0][-1
]
dists = [angle_distance(shuffled_results.theta[frame_index, k,
:], prev_theta[last_valid_index]) for k in range(2)]
partition = int(np.argmin(dists))
if dists[partition] < CONTINUOUS_ANGLES_DIST_THRESHOLD:
partitioned_results.set_partition(frame_index=frame_index,
partition=partition)
for cur_partition_indices in partitioned_results.get_segments():
if len(cur_partition_indices) < min_segment_size:
partitioned_results.mask(cur_partition_indices)
return partitioned_results
def _align_segments_with_labels(segments, partitioned_skeletons,
labelled_skeletons, min_labelled=5):
"""
Match the head/tail alignment with the results of the classical tracking in each of the segments,
if there is enough labelled data in the segment
"""
segments_alignment = ma.masked_all((len(segments),), dtype=np.uint8)
for segment_index, segment in enumerate(segments):
segment_skeletons = labelled_skeletons[segment]
non_nan_labelled = np.any(~np.isnan(segment_skeletons), axis=(1, 2))
labels_count = np.sum(non_nan_labelled)
non_masked = ~np.any(partitioned_skeletons[segment].mask, axis=(1,
2, 3))
to_compare = np.logical_and(non_nan_labelled, non_masked)
similarity_scores = []
for label_skel, partitioned_skeleton in zip(segment_skeletons[
to_compare], partitioned_skeletons[segment][to_compare]):
dists = [skeleton_distance(label_skel, x) for x in
partitioned_skeleton]
similarity_scores.append(dists)
if len(similarity_scores) > 0:
mean_similarity_scores = np.mean(similarity_scores, axis=0)
if mean_similarity_scores[0] * mean_similarity_scores[1
] < 0 and labels_count > min_labelled:
segments_alignment[segment_index] = np.argmax(
mean_similarity_scores)
return segments_alignment
def _calculate_smallest_gap_to_adjacent(segment_index, segments,
segments_alignment):
score = np.nan
segment_offset = np.nan
if segment_index - 1 >= 0 and not segments_alignment.mask[segment_index - 1
]:
gap = segments[segment_index][0] - segments[segment_index - 1][-1]
score = gap
segment_offset = -1
if segment_index + 1 < len(segments_alignment
) and not segments_alignment.mask[segment_index + 1]:
gap = segments[segment_index + 1][0] - segments[segment_index][-1]
if np.isnan(score) or gap < score:
score = gap
segment_offset = 1
return score, segment_offset
def _align_unlabelled_segments_with_adjacents(segments, segments_alignment,
partitioned_skeletons, frame_rate: float):
"""
Resolve the unaligned segments by comparing with adjacent segments,
starting with the segments that have the least frames gap between an adjacent trusted segment
Don't align isolated segments which a big gap between trusted segments
"""
maximum_gap_allowed = max(1, int(frame_rate *
MAXIMUM_GAP_ALLOWED_WITH_ADJACENT_SEGMENT_SEC))
if np.all(segments_alignment.mask):
logger.info(
'There are no trusted segments with head decision to resolve the whole video, stopping analysis.'
)
return segments_alignment
unaligned = np.where(segments_alignment.mask)[0]
while len(unaligned) > 0:
all_gaps = [_calculate_smallest_gap_to_adjacent(segment_index=x,
segments=segments, segments_alignment=segments_alignment) for x in
unaligned]
segment_to_fix_index = np.nanargmin(all_gaps, axis=0)[0]
gap_to_adjacent_segment, adjacent_segment_offset = all_gaps[
segment_to_fix_index]
if gap_to_adjacent_segment > maximum_gap_allowed:
break
cur_segment_index = unaligned[segment_to_fix_index]
cur_segment_skeleton = partitioned_skeletons[segments[
cur_segment_index]]
adjacent_segment_index = cur_segment_index + adjacent_segment_offset
adjacent_alignment = segments_alignment[adjacent_segment_index]
adjacent_segment = segments[adjacent_segment_index]
adjacent_segment_skeleton = partitioned_skeletons[adjacent_segment][
:, adjacent_alignment]
if adjacent_segment_offset == -1:
closest_unaligned_skeleton = cur_segment_skeleton[0]
closest_known_skeleton = adjacent_segment_skeleton[-1]
elif adjacent_segment_offset == 1:
closest_unaligned_skeleton = cur_segment_skeleton[-1]
closest_known_skeleton = adjacent_segment_skeleton[0]
else:
raise ValueError()
dists = [skeleton_distance(closest_known_skeleton, skel) for skel in
closest_unaligned_skeleton]
segments_alignment[cur_segment_index] = int(np.argmax(dists))
unaligned = np.where(segments_alignment.mask)[0]
return segments_alignment
def _init_unified_series(mixed_series):
return ma.masked_all((mixed_series.shape[0],) + mixed_series.shape[2:],
dtype=mixed_series.dtype)
def resolve_head_tail(shuffled_results: ShuffledResults, original_results:
OriginalResults, frame_rate: float, score_threshold) ->BaseResults:
len_series = len(shuffled_results)
partitioned_results = _make_continuous_partitions(score_threshold=
score_threshold, frame_rate=frame_rate, shuffled_results=
shuffled_results)
segments = partitioned_results.get_segments()
if len(segments) == 0:
logger.error(
f"Couldn't find any continuous segments of predicted data above the threshold {score_threshold}, stopping analysis."
)
return _FinalResults.from_shuffled(shuffled_results)
segments_alignment = _align_segments_with_labels(segments,
partitioned_results.skeletons, original_results.skeletons)
segments_alignment = _align_unlabelled_segments_with_adjacents(segments,
segments_alignment, partitioned_results.skeletons, frame_rate)
resolved_results = _ResolvedResults(partitioned_results)
for segment, segment_alignment in zip(segments, segments_alignment):
if not ma.is_masked(segment_alignment):
resolved_results.resolve(segment, segment_alignment)
low_scores_indices = np.where(ma.masked_less(resolved_results.scores,
score_threshold).mask)[0]
resolved_results.mask(low_scores_indices)
num_success = resolved_results.num_valid()
original_num_success = np.any(~np.isnan(original_results.skeletons),
axis=(1, 2)).sum()
logger.info(
f'Resolved head/tail, {num_success} out of {len_series} frames analyzed successfully ({float(num_success) / len_series * 100:.1f}%) (original features : {original_num_success} or {float(original_num_success) / len_series * 100:.1f}% of total)'
)
if num_success < original_num_success:
logger.warning(
f'Original results had {original_num_success - num_success} more successfully analyzed frames!'
)
return _FinalResults.from_resolved(resolved_results)
| """
This module contains the logic to resolve the head-tail orientation of a predicted video time series.
"""
import logging
import numpy as np
import numpy.ma as ma
from wormpose.pose.distance_metrics import angle_distance, skeleton_distance
from wormpose.pose.results_datatypes import (
BaseResults,
ShuffledResults,
OriginalResults,
)
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# threshold to compare neighbor frames theta, to be considered continuous and belong to the same segment
CONTINUOUS_ANGLES_DIST_THRESHOLD = np.deg2rad(30)
# we consider frames to be part of the same segment if they are maximum this amount of seconds apart
# (and satisfy the distance threshold)
CONTINOUS_SEGMENT_TIME_WINDOW_SEC = 0.2
# discard too small segments less than this amount of seconds
MIN_SEGMENT_SIZE_SEC = 0.2
# don't align isolated segments that are more than this amount of seconds apart from aligned segments
MAXIMUM_GAP_ALLOWED_WITH_ADJACENT_SEGMENT_SEC = 1
def _init_partitioned_series(shuffled_series: np.ndarray):
return ma.masked_all_like(shuffled_series)
def _set_partition(partitioned_series, shuffled_series, frame_index: int, partition: int):
partitioned_series[frame_index][0] = shuffled_series[frame_index, partition]
partitioned_series[frame_index][1] = shuffled_series[frame_index, 1 - partition]
class _PartitionedResults(BaseResults):
def __init__(self, shuffled_results: ShuffledResults):
self.cur_partition = -1
self.partitions = ma.masked_all((len(shuffled_results),), dtype=int)
self._shuffled_results = shuffled_results
theta = _init_partitioned_series(shuffled_results.theta)
skeletons = _init_partitioned_series(shuffled_results.skeletons)
scores = _init_partitioned_series(shuffled_results.scores)
super().__init__(theta=theta, skeletons=skeletons, scores=scores)
def mask(self, indices):
self.theta.mask[indices] = True
self.skeletons.mask[indices] = True
self.scores.mask[indices] = True
self.partitions.mask[indices] = True
def set_partition(self, frame_index: int, partition: int, new_partition: bool = False):
if new_partition:
self.cur_partition += 1
_set_partition(self.theta, self._shuffled_results.theta, frame_index, partition)
_set_partition(self.skeletons, self._shuffled_results.skeletons, frame_index, partition)
_set_partition(self.scores, self._shuffled_results.scores, frame_index, partition)
self.partitions[frame_index] = self.cur_partition
def _get_partition_indices(self, partition_index: int):
return np.where(self.partitions == partition_index)[0]
def get_segments(self):
all_partitions_indexes = np.unique(self.partitions.filled(-1))
return [
self._get_partition_indices(partition_index)
for partition_index in all_partitions_indexes
if partition_index >= 0
]
class _ResolvedResults(BaseResults):
def __init__(self, partitioned_results: _PartitionedResults):
self._partitioned_results = partitioned_results
theta = _init_unified_series(partitioned_results.theta)
skeletons = _init_unified_series(partitioned_results.skeletons)
scores = _init_unified_series(partitioned_results.scores)
super().__init__(theta=theta, skeletons=skeletons, scores=scores)
def resolve(self, segment, segment_alignment):
self.scores[segment] = self._partitioned_results.scores[segment][:, segment_alignment]
self.skeletons[segment] = self._partitioned_results.skeletons[segment][:, segment_alignment]
self.theta[segment] = self._partitioned_results.theta[segment][:, segment_alignment]
def mask(self, indices):
self.theta.mask[indices] = True
self.skeletons.mask[indices] = True
self.scores.mask[indices] = True
def num_valid(self):
return np.sum(~self.scores.mask)
class _FinalResults(BaseResults):
@classmethod
def from_resolved(cls, resolved_results: _ResolvedResults):
return _FinalResults(
theta=resolved_results.theta.filled(np.nan),
skeletons=resolved_results.skeletons.filled(np.nan),
scores=resolved_results.scores.filled(np.nan),
)
@classmethod
def from_shuffled(cls, shuffled_results: ShuffledResults):
return _FinalResults(
theta=np.full_like(shuffled_results.theta[:, 0], np.nan),
skeletons=np.full_like(shuffled_results.scores[:, 0], np.nan),
scores=np.full_like(shuffled_results.skeletons[:, 0], np.nan),
)
def _make_continuous_partitions(
shuffled_results: ShuffledResults, score_threshold: float, frame_rate: float
) -> _PartitionedResults:
time_window = max(1, int(frame_rate * CONTINOUS_SEGMENT_TIME_WINDOW_SEC))
min_segment_size = max(1, int(frame_rate * MIN_SEGMENT_SIZE_SEC))
partitioned_results = _PartitionedResults(shuffled_results)
# discard low score frames early (use the maximum value of both scores for now)
good_score_frames = np.where(ma.greater_equal(ma.max(shuffled_results.scores, axis=1), score_threshold))[0]
for frame_index in good_score_frames:
prev_theta = partitioned_results.theta[frame_index - min(time_window, frame_index) : frame_index, 0]
# if there is a big gap > time_window we start a new partition, with a random value (0)
if np.all(np.any(prev_theta.mask, axis=1)):
partitioned_results.set_partition(frame_index=frame_index, partition=0, new_partition=True)
# otherwise we look in the time_window close past the closest non nan frame see if we can continue the
# partition as long as the values stay continuous
else:
last_valid_index = np.where(~np.any(prev_theta.mask, axis=1))[0][-1]
dists = [
angle_distance(
shuffled_results.theta[frame_index, k, :],
prev_theta[last_valid_index],
)
for k in range(2)
]
partition = int(np.argmin(dists))
if dists[partition] < CONTINUOUS_ANGLES_DIST_THRESHOLD:
partitioned_results.set_partition(frame_index=frame_index, partition=partition)
# discard short segments
for cur_partition_indices in partitioned_results.get_segments():
if len(cur_partition_indices) < min_segment_size:
partitioned_results.mask(cur_partition_indices)
return partitioned_results
def _align_segments_with_labels(segments, partitioned_skeletons, labelled_skeletons, min_labelled=5):
"""
Match the head/tail alignment with the results of the classical tracking in each of the segments,
if there is enough labelled data in the segment
"""
segments_alignment = ma.masked_all((len(segments),), dtype=np.uint8)
for segment_index, segment in enumerate(segments):
segment_skeletons = labelled_skeletons[segment]
non_nan_labelled = np.any(~np.isnan(segment_skeletons), axis=(1, 2))
labels_count = np.sum(non_nan_labelled)
non_masked = ~np.any(partitioned_skeletons[segment].mask, axis=(1, 2, 3))
to_compare = np.logical_and(non_nan_labelled, non_masked)
similarity_scores = []
for label_skel, partitioned_skeleton in zip(
segment_skeletons[to_compare], partitioned_skeletons[segment][to_compare]
):
dists = [skeleton_distance(label_skel, x) for x in partitioned_skeleton]
similarity_scores.append(dists)
if len(similarity_scores) > 0:
mean_similarity_scores = np.mean(similarity_scores, axis=0)
if mean_similarity_scores[0] * mean_similarity_scores[1] < 0 and labels_count > min_labelled:
segments_alignment[segment_index] = np.argmax(mean_similarity_scores)
return segments_alignment
def _calculate_smallest_gap_to_adjacent(segment_index, segments, segments_alignment):
# evaluate how far away this segment is from known values
score = np.nan
segment_offset = np.nan
if segment_index - 1 >= 0 and not segments_alignment.mask[segment_index - 1]:
gap = segments[segment_index][0] - segments[segment_index - 1][-1]
score = gap
segment_offset = -1
if segment_index + 1 < len(segments_alignment) and not segments_alignment.mask[segment_index + 1]:
gap = segments[segment_index + 1][0] - segments[segment_index][-1]
if np.isnan(score) or gap < score:
score = gap
segment_offset = 1
return score, segment_offset
def _align_unlabelled_segments_with_adjacents(segments, segments_alignment, partitioned_skeletons, frame_rate: float):
"""
Resolve the unaligned segments by comparing with adjacent segments,
starting with the segments that have the least frames gap between an adjacent trusted segment
Don't align isolated segments which a big gap between trusted segments
"""
maximum_gap_allowed = max(1, int(frame_rate * MAXIMUM_GAP_ALLOWED_WITH_ADJACENT_SEGMENT_SEC))
# ensure that if no segments have been aligned at all, pick one solution randomly to start
if np.all(segments_alignment.mask):
logger.info("There are no trusted segments with head decision to resolve the whole video, stopping analysis.")
return segments_alignment
# fix in priority the segments with known adjacent frames with little gap
# until all segments are aligned except the isolated ones (further than maximum_gap_allowed)
unaligned = np.where(segments_alignment.mask)[0]
while len(unaligned) > 0:
# we first pick the best candidate segment to align (there are known frames nearby before or after or both)
all_gaps = [
_calculate_smallest_gap_to_adjacent(
segment_index=x,
segments=segments,
segments_alignment=segments_alignment,
)
for x in unaligned
]
segment_to_fix_index = np.nanargmin(all_gaps, axis=0)[0]
gap_to_adjacent_segment, adjacent_segment_offset = all_gaps[segment_to_fix_index]
# abort if only isolated segments are left
if gap_to_adjacent_segment > maximum_gap_allowed:
break
cur_segment_index = unaligned[segment_to_fix_index]
cur_segment_skeleton = partitioned_skeletons[segments[cur_segment_index]]
adjacent_segment_index = cur_segment_index + adjacent_segment_offset
adjacent_alignment = segments_alignment[adjacent_segment_index]
adjacent_segment = segments[adjacent_segment_index]
adjacent_segment_skeleton = partitioned_skeletons[adjacent_segment][:, adjacent_alignment]
if adjacent_segment_offset == -1:
closest_unaligned_skeleton = cur_segment_skeleton[0] # first frame of cur segment
closest_known_skeleton = adjacent_segment_skeleton[-1] # last frame of prev segment
elif adjacent_segment_offset == 1:
closest_unaligned_skeleton = cur_segment_skeleton[-1] # last frame of cur segment
closest_known_skeleton = adjacent_segment_skeleton[0] # first frame of next segment
else:
raise ValueError()
dists = [skeleton_distance(closest_known_skeleton, skel) for skel in closest_unaligned_skeleton]
segments_alignment[cur_segment_index] = int(np.argmax(dists))
unaligned = np.where(segments_alignment.mask)[0]
return segments_alignment
def _init_unified_series(mixed_series):
return ma.masked_all((mixed_series.shape[0],) + mixed_series.shape[2:], dtype=mixed_series.dtype)
def resolve_head_tail(
shuffled_results: ShuffledResults,
original_results: OriginalResults,
frame_rate: float,
score_threshold,
) -> BaseResults:
len_series = len(shuffled_results)
# Create continuous segments without jumps
partitioned_results = _make_continuous_partitions(
score_threshold=score_threshold,
frame_rate=frame_rate,
shuffled_results=shuffled_results,
)
segments = partitioned_results.get_segments()
if len(segments) == 0:
logger.error(
f"Couldn't find any continuous segments of predicted data above the threshold {score_threshold},"
f" stopping analysis."
)
return _FinalResults.from_shuffled(shuffled_results)
# Choose each segment global alignment by comparing with labelled data
segments_alignment = _align_segments_with_labels(
segments, partitioned_results.skeletons, original_results.skeletons
)
# Fix unaligned segments here by comparing skeletons with neighboring segments iteratively
segments_alignment = _align_unlabelled_segments_with_adjacents(
segments, segments_alignment, partitioned_results.skeletons, frame_rate
)
# Compile results
resolved_results = _ResolvedResults(partitioned_results)
for segment, segment_alignment in zip(segments, segments_alignment):
if not ma.is_masked(segment_alignment):
resolved_results.resolve(segment, segment_alignment)
# Filter the final results again by score threshold
low_scores_indices = np.where(ma.masked_less(resolved_results.scores, score_threshold).mask)[0]
resolved_results.mask(low_scores_indices)
num_success = resolved_results.num_valid()
original_num_success = np.any(~np.isnan(original_results.skeletons), axis=(1, 2)).sum()
logger.info(
f"Resolved head/tail, {num_success} out of {len_series} frames analyzed successfully "
f"({float(num_success) / len_series * 100:.1f}%) (original features : {original_num_success}"
f" or {(float(original_num_success) / len_series * 100):.1f}% of total)"
)
if num_success < original_num_success:
logger.warning(f"Original results had {original_num_success - num_success} more successfully analyzed frames!")
return _FinalResults.from_resolved(resolved_results)
| [
10,
19,
20,
24,
26
] |
518 | ec90c731a0e546d9d399cbb68c92be1acca8cbe0 | <mask token>
| <mask token>
class mysql(MakePackage):
<mask token>
<mask token>
<mask token>
| <mask token>
class mysql(MakePackage):
dependencies = ['cmake']
fetch = (
'http://dev.mysql.com/get/Downloads/MySQL-5.6/mysql-5.6.10.tar.gz/from/http://cdn.mysql.com/'
)
config = (
'cmake -G "Unix Makefiles" -DCMAKE_INSTALL_PREFIX=%(prefix)s -DWITH_READLINE=1'
)
| from package import *
class mysql(MakePackage):
dependencies = ['cmake']
fetch = (
'http://dev.mysql.com/get/Downloads/MySQL-5.6/mysql-5.6.10.tar.gz/from/http://cdn.mysql.com/'
)
config = (
'cmake -G "Unix Makefiles" -DCMAKE_INSTALL_PREFIX=%(prefix)s -DWITH_READLINE=1'
)
|
from package import *
class mysql(MakePackage):
dependencies = ["cmake"]
fetch="http://dev.mysql.com/get/Downloads/MySQL-5.6/mysql-5.6.10.tar.gz/from/http://cdn.mysql.com/"
config='cmake -G "Unix Makefiles" -DCMAKE_INSTALL_PREFIX=%(prefix)s -DWITH_READLINE=1'
| [
0,
1,
2,
3,
4
] |
519 | 372d8c8cb9ec8f579db8588aff7799c73c5af255 | <mask token>
class Collector:
<mask token>
def get_api():
parser = ConfigParser()
parser.read('twitter_auth.ini')
consumer_key = parser.get('Keys', 'consumer_key').strip("'")
consumer_secret = parser.get('Secrets', 'consumer_secret').strip("'")
access_token = parser.get('Tokens', 'access_token').strip("'")
access_token_secret = parser.get('Secrets', 'access_token_secret'
).strip("'")
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
return api
<mask token>
def mention_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS mentioned
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
<mask token>
def download_to_limit(api, c, conn, friend_list):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = c.fetchall()
tweet_ids = [e[0] for e in tweet_ids]
new_tweets = []
for friend in friend_list:
try:
get_tweets = api.user_timeline(id=friend, count=200)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=friend, count=200,
max_id=oldest)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
<mask token>
def last_tweets(c, conn):
user_last_tweets = []
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
for user in users:
c.execute(
"""SELECT user_id, tweet_id
FROM tdump
WHERE user_id = ?
ORDER BY tweet_date DESC"""
, [user])
last_tweet = c.fetchone()
user_last_tweets.append(last_tweet)
return user_last_tweets
<mask token>
<mask token>
| <mask token>
class Collector:
<mask token>
def get_api():
parser = ConfigParser()
parser.read('twitter_auth.ini')
consumer_key = parser.get('Keys', 'consumer_key').strip("'")
consumer_secret = parser.get('Secrets', 'consumer_secret').strip("'")
access_token = parser.get('Tokens', 'access_token').strip("'")
access_token_secret = parser.get('Secrets', 'access_token_secret'
).strip("'")
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
return api
def all_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS tdump
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def mention_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS mentioned
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def new_f_check(api, c):
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
friends_ids = api.friends_ids()
new_friends = [x for x in friends_ids if str(x) not in users]
return new_friends
def download_to_limit(api, c, conn, friend_list):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = c.fetchall()
tweet_ids = [e[0] for e in tweet_ids]
new_tweets = []
for friend in friend_list:
try:
get_tweets = api.user_timeline(id=friend, count=200)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=friend, count=200,
max_id=oldest)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
def mention_me(new_tweet_list, c, conn):
mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]
if len(new_tweet_list) != 0:
print('Insert Active')
for tweet in mentioned:
c.execute(
"""INSERT INTO served
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweet_list) != 0:
print('Insert Done' + '\n')
def last_tweets(c, conn):
user_last_tweets = []
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
for user in users:
c.execute(
"""SELECT user_id, tweet_id
FROM tdump
WHERE user_id = ?
ORDER BY tweet_date DESC"""
, [user])
last_tweet = c.fetchone()
user_last_tweets.append(last_tweet)
return user_last_tweets
def download_recent(api, c, conn, last_tweets):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = [x[0] for x in c.fetchall()]
new_tweets = []
for pair in last_tweets:
user_id = pair[0]
tweet_id = pair[1]
try:
get_tweets = api.user_timeline(id=user_id, since_id=
tweet_id, count=200)
except Exception:
continue
if len(get_tweets) != 0:
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=user_id, count=
200, since_id=newest)
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
except Exception:
continue
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
if (tweet.user.screen_name != 'BonneNick' and tweet.id not in
tweet_ids):
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
conn.close()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
<mask token>
| <mask token>
def main():
Collector.collect()
class Collector:
def collect():
api = Collector.get_api()
tweet_dump = Collector.all_tweet_db()
c = tweet_dump[0]
conn = tweet_dump[1]
last_list = Collector.last_tweets(c, conn)
new_friends = Collector.new_f_check(api, c)
Collector.download_to_limit(api, c, conn, new_friends)
Collector.download_recent(api, c, conn, last_list)
def get_api():
parser = ConfigParser()
parser.read('twitter_auth.ini')
consumer_key = parser.get('Keys', 'consumer_key').strip("'")
consumer_secret = parser.get('Secrets', 'consumer_secret').strip("'")
access_token = parser.get('Tokens', 'access_token').strip("'")
access_token_secret = parser.get('Secrets', 'access_token_secret'
).strip("'")
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
return api
def all_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS tdump
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def mention_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS mentioned
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def new_f_check(api, c):
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
friends_ids = api.friends_ids()
new_friends = [x for x in friends_ids if str(x) not in users]
return new_friends
def download_to_limit(api, c, conn, friend_list):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = c.fetchall()
tweet_ids = [e[0] for e in tweet_ids]
new_tweets = []
for friend in friend_list:
try:
get_tweets = api.user_timeline(id=friend, count=200)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=friend, count=200,
max_id=oldest)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
def mention_me(new_tweet_list, c, conn):
mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]
if len(new_tweet_list) != 0:
print('Insert Active')
for tweet in mentioned:
c.execute(
"""INSERT INTO served
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweet_list) != 0:
print('Insert Done' + '\n')
def last_tweets(c, conn):
user_last_tweets = []
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
for user in users:
c.execute(
"""SELECT user_id, tweet_id
FROM tdump
WHERE user_id = ?
ORDER BY tweet_date DESC"""
, [user])
last_tweet = c.fetchone()
user_last_tweets.append(last_tweet)
return user_last_tweets
def download_recent(api, c, conn, last_tweets):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = [x[0] for x in c.fetchall()]
new_tweets = []
for pair in last_tweets:
user_id = pair[0]
tweet_id = pair[1]
try:
get_tweets = api.user_timeline(id=user_id, since_id=
tweet_id, count=200)
except Exception:
continue
if len(get_tweets) != 0:
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=user_id, count=
200, since_id=newest)
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
except Exception:
continue
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
if (tweet.user.screen_name != 'BonneNick' and tweet.id not in
tweet_ids):
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
conn.close()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
<mask token>
| <mask token>
def main():
Collector.collect()
class Collector:
def collect():
api = Collector.get_api()
tweet_dump = Collector.all_tweet_db()
c = tweet_dump[0]
conn = tweet_dump[1]
last_list = Collector.last_tweets(c, conn)
new_friends = Collector.new_f_check(api, c)
Collector.download_to_limit(api, c, conn, new_friends)
Collector.download_recent(api, c, conn, last_list)
def get_api():
parser = ConfigParser()
parser.read('twitter_auth.ini')
consumer_key = parser.get('Keys', 'consumer_key').strip("'")
consumer_secret = parser.get('Secrets', 'consumer_secret').strip("'")
access_token = parser.get('Tokens', 'access_token').strip("'")
access_token_secret = parser.get('Secrets', 'access_token_secret'
).strip("'")
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
return api
def all_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS tdump
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def mention_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS mentioned
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def new_f_check(api, c):
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
friends_ids = api.friends_ids()
new_friends = [x for x in friends_ids if str(x) not in users]
return new_friends
def download_to_limit(api, c, conn, friend_list):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = c.fetchall()
tweet_ids = [e[0] for e in tweet_ids]
new_tweets = []
for friend in friend_list:
try:
get_tweets = api.user_timeline(id=friend, count=200)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=friend, count=200,
max_id=oldest)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
def mention_me(new_tweet_list, c, conn):
mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]
if len(new_tweet_list) != 0:
print('Insert Active')
for tweet in mentioned:
c.execute(
"""INSERT INTO served
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweet_list) != 0:
print('Insert Done' + '\n')
def last_tweets(c, conn):
user_last_tweets = []
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
for user in users:
c.execute(
"""SELECT user_id, tweet_id
FROM tdump
WHERE user_id = ?
ORDER BY tweet_date DESC"""
, [user])
last_tweet = c.fetchone()
user_last_tweets.append(last_tweet)
return user_last_tweets
def download_recent(api, c, conn, last_tweets):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = [x[0] for x in c.fetchall()]
new_tweets = []
for pair in last_tweets:
user_id = pair[0]
tweet_id = pair[1]
try:
get_tweets = api.user_timeline(id=user_id, since_id=
tweet_id, count=200)
except Exception:
continue
if len(get_tweets) != 0:
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=user_id, count=
200, since_id=newest)
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
except Exception:
continue
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
if (tweet.user.screen_name != 'BonneNick' and tweet.id not in
tweet_ids):
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
conn.close()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
if __name__ == '__main__':
main()
| #!/home/nick/.virtualenvs/twitterbots/bin/python3.5
# -*- coding: utf-8 -*-
import tweepy
import sqlite3
from configparser import ConfigParser
'''
A little OOP would be good later for
authenticated user data, c, conn, api
'''
def main():
Collector.collect()
class Collector:
# Main function
def collect():
api = Collector.get_api()
tweet_dump = Collector.all_tweet_db()
c = tweet_dump[0]
conn = tweet_dump[1]
last_list = Collector.last_tweets(c, conn)
# Look for new friends, add to db
new_friends = Collector.new_f_check(api, c)
Collector.download_to_limit(api, c, conn, new_friends)
# Checks timelines of everyone in db already
# adds anything new to db
Collector.download_recent(api, c, conn, last_list)
def get_api():
parser = ConfigParser()
parser.read('twitter_auth.ini')
consumer_key = parser.get('Keys',
'consumer_key').strip("'")
consumer_secret = parser.get('Secrets',
'consumer_secret').strip("'")
access_token = parser.get('Tokens',
'access_token').strip("'")
access_token_secret = parser.get('Secrets',
'access_token_secret').strip("'")
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
return api
# connects to tweet_dump.db creates tdump if not exists
# tdump stores all tweets from anyone in list
def all_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute('''CREATE TABLE IF NOT EXISTS tdump
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)''')
return c, conn
# connects to tweet_dump.db creats served if not exists
# served stores tweets that are mention authenticated user
def mention_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute('''CREATE TABLE IF NOT EXISTS mentioned
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)''')
return c, conn
# looks for new friends by comparing authenticated
# user's friend list with list of friends in tdump
def new_f_check(api, c):
# get list of user's ids
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
# get list of friends_ids from twitter
friends_ids = api.friends_ids()
new_friends = [x for x in friends_ids if str(x) not in users]
return new_friends
# downloads up to 3200 of a user's most
# recent tweets commits to tdump
def download_to_limit(api, c, conn, friend_list):
# List of tweet ids already in db
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = c.fetchall()
tweet_ids = [e[0] for e in tweet_ids]
new_tweets = []
for friend in friend_list:
try:
# try to get most recent 200 tweets from friend
get_tweets = api.user_timeline(id=friend, count=200)
except Exception as e:
continue
# add to list of all of this friend's tweets
new_tweets.extend(get_tweets)
# find oldest retrieved tweet's id number less 1
oldest = new_tweets[-1].id - 1
# get tweets until 3200 limit hit
while len(get_tweets) > 0:
try:
# max_id arg looks for id's less than arg's value
get_tweets = api.user_timeline(id=friend,
count=200,
max_id=oldest)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
c.execute('''INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)''',
[tweet.text,
tweet.user.screen_name,
tweet.created_at,
tweet.id_str,
tweet.source,
tweet.user.id_str])
conn.commit()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
# simply check if tweet text contains my screen name
# change from hard code later
def mention_me(new_tweet_list, c, conn):
mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]
if len(new_tweet_list) != 0:
print('Insert Active')
for tweet in mentioned:
c.execute('''INSERT INTO served
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)''',
[tweet.text,
tweet.user.screen_name,
tweet.created_at,
tweet.id_str,
tweet.source,
tweet.user.id_str])
conn.commit()
if len(new_tweet_list) != 0:
print('Insert Done' + '\n')
# returns list of user_id and created_at pairs
# date associated with user_id is date of last
# tweet in database
def last_tweets(c, conn):
# list of user ids and the date of the
# last tweet in db
user_last_tweets = []
# get list of user's ids
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
for user in users:
c.execute('''SELECT user_id, tweet_id
FROM tdump
WHERE user_id = ?
ORDER BY tweet_date DESC''',
[user])
last_tweet = c.fetchone()
user_last_tweets.append(last_tweet)
return user_last_tweets
# downloads most recent posts in each users timelines
def download_recent(api, c, conn, last_tweets):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = [x[0] for x in c.fetchall()]
new_tweets = []
for pair in last_tweets:
user_id = pair[0]
tweet_id = pair[1]
try:
get_tweets = api.user_timeline(id=user_id,
since_id=tweet_id,
count=200)
except Exception:
continue
if len(get_tweets) != 0:
# add to list of all of this friend's tweets
new_tweets.extend(get_tweets)
# find newest retrieved tweet's id number plus 1
newest = get_tweets[0].id + 1
while len(get_tweets) > 0:
try:
# max_id arg looks for id's less than arg's value
get_tweets = api.user_timeline(id=user_id,
count=200,
since_id=newest)
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
except Exception:
continue
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
if tweet.user.screen_name != 'BonneNick' \
and tweet.id not in tweet_ids:
c.execute('''INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)''',
[tweet.text,
tweet.user.screen_name,
tweet.created_at,
tweet.id_str,
tweet.source,
tweet.user.id_str])
conn.commit()
conn.close()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
if __name__ == '__main__':
main()
| [
5,
9,
11,
12,
14
] |
520 | 8dfef0a4525328be8dfb4723f0a168dc22eb5eb2 | <mask token>
class ProcessDisplay(commands.Cog):
<mask token>
<mask token>
@commands.Cog.listener()
async def on_ready(self):
"""
Ran when bot is starting up and ready
Deletes messages from the bot in the TEXTCHANNEL
starts up find_processes method
:return:
"""
if not self.started:
channel = self.client.get_channel(TEXT_CHANNEL)
await self.delete_bot_msg(channel)
msg = await channel.send(embed=DEFAULT_EMBED)
self.find_processes.start(msg)
started = True
print('ProcessDisplay Running')
@commands.command()
@commands.has_permissions(administrator=True)
async def toggle_inline(self, ctx):
"""
Toggles inline for process controls
:param ctx: The command Context
:return:
"""
self.inline = not self.inline
@commands.command()
@commands.has_permissions(administrator=True)
async def move_process(self, direction, process_name):
"""
need to make
:param direction:
:param process_name:
:return:
"""
for i in range(len(PROCESSES)):
if PROCESSES[i] == process_name:
if direction.lower() == 'up':
pass
@commands.command()
@commands.has_permissions(administrator=True)
async def add_process(self, ctx, process, name):
"""
Adds a process to the process display.
Must be different from ones currently displayed.
:param ctx: Context of the command
:param process: The process (e.g. 'cmd.exe') to be added
:param name: The name to be displayed for the process (e.g. 'Command Prompt')
:return:
"""
name = self.fix_emoji_escapes(name)
if process in PROCESSES.keys():
await ctx.send(f'The process {process} is already being displayed')
elif name in PROCESSES.values():
await ctx.send(
f'The process name {name} is already being displayed')
else:
PROCESSES[process] = name
self.update_processes_config()
await ctx.send(f'The process {name} has been added')
@commands.command()
@commands.has_permissions(administrator=True)
async def remove_process(self, ctx, *name):
"""
Removes a process from the process display
:param ctx: Context of the command
:param name: Name displayed for the process (e.g. Command Prompt)
:return:
"""
print(name)
name = self.fix_emoji_escapes(' '.join(name))
complete = False
for process in PROCESSES.keys():
if PROCESSES.get(process) == name:
PROCESSES.pop(process)
self.update_processes_config()
await ctx.send(f'The process {name} has been removed')
complete = True
break
if not complete:
await ctx.send(f"The process {name} doesn't exist")
@commands.command()
@commands.has_permissions(administrator=True)
async def edit_process(self, ctx, old_name, new_name):
"""
Edits the name of a process
:param ctx: The context of the command
:param old_name: The old name of the process (to be changed)
:param new_name: The new name of the process (changed to)
:return:
"""
old_name = self.fix_emoji_escapes(old_name)
new_name = self.fix_emoji_escapes(new_name)
if old_name in PROCESSES.values():
for process in PROCESSES:
if PROCESSES.get(process) == old_name:
PROCESSES.update({process: new_name})
self.update_processes_config()
else:
await ctx.send(f"Process name {old_name} doesn't exist")
@tasks.loop(seconds=1)
async def find_processes(self, msg):
"""
The processes with statuses are attached to the msg given
:param msg: The message to be edited with the processes
:return:
"""
running_processes = []
new_embed = DEFAULT_EMBED.copy()
for proc in psutil.process_iter():
if proc.name() in PROCESSES.keys():
running_processes.append(proc.name())
elif proc.name() in ['java.exe', 'javaw.exe'] and proc.cwd(
) in PROCESSES.keys():
running_processes.append(proc.cwd())
for process in PROCESSES:
try:
if process in running_processes:
new_embed.add_field(name=PROCESSES.get(process), value=
'Online <:GreenTick:592083498534174721>', inline=
self.inline)
else:
new_embed.add_field(name=PROCESSES.get(process), value=
'Offline <:RedCross:592082557961633877>', inline=
self.inline)
except PermissionError:
new_embed.add_field(name=PROCESSES.get(process), value=
'Admin Required <:OrangeUnknown:592082676891123722>',
inline=self.inline)
await msg.edit(content='', embed=new_embed)
def is_me(self, m):
"""
Checks if a messages author is the bot
:param m: tbh idk, maybe message?
:return:
"""
return m.author == self.client.user
async def delete_bot_msg(self, channel):
"""
Deletes up to the last 100 messages sent by the bot in the given channel
:param channel: The channel that will have the messages deleted
:return: the message that says how many messages were deleted
"""
await channel.purge(limit=100, check=self.is_me)
<mask token>
<mask token>
<mask token>
| <mask token>
class ProcessDisplay(commands.Cog):
<mask token>
def __init__(self, client):
"""
:param client: the bot client parsed in from the main program
"""
self.started = False
self.client = client
self.inline = False
@commands.Cog.listener()
async def on_ready(self):
"""
Ran when bot is starting up and ready
Deletes messages from the bot in the TEXTCHANNEL
starts up find_processes method
:return:
"""
if not self.started:
channel = self.client.get_channel(TEXT_CHANNEL)
await self.delete_bot_msg(channel)
msg = await channel.send(embed=DEFAULT_EMBED)
self.find_processes.start(msg)
started = True
print('ProcessDisplay Running')
@commands.command()
@commands.has_permissions(administrator=True)
async def toggle_inline(self, ctx):
"""
Toggles inline for process controls
:param ctx: The command Context
:return:
"""
self.inline = not self.inline
@commands.command()
@commands.has_permissions(administrator=True)
async def move_process(self, direction, process_name):
"""
need to make
:param direction:
:param process_name:
:return:
"""
for i in range(len(PROCESSES)):
if PROCESSES[i] == process_name:
if direction.lower() == 'up':
pass
@commands.command()
@commands.has_permissions(administrator=True)
async def add_process(self, ctx, process, name):
"""
Adds a process to the process display.
Must be different from ones currently displayed.
:param ctx: Context of the command
:param process: The process (e.g. 'cmd.exe') to be added
:param name: The name to be displayed for the process (e.g. 'Command Prompt')
:return:
"""
name = self.fix_emoji_escapes(name)
if process in PROCESSES.keys():
await ctx.send(f'The process {process} is already being displayed')
elif name in PROCESSES.values():
await ctx.send(
f'The process name {name} is already being displayed')
else:
PROCESSES[process] = name
self.update_processes_config()
await ctx.send(f'The process {name} has been added')
@commands.command()
@commands.has_permissions(administrator=True)
async def remove_process(self, ctx, *name):
"""
Removes a process from the process display
:param ctx: Context of the command
:param name: Name displayed for the process (e.g. Command Prompt)
:return:
"""
print(name)
name = self.fix_emoji_escapes(' '.join(name))
complete = False
for process in PROCESSES.keys():
if PROCESSES.get(process) == name:
PROCESSES.pop(process)
self.update_processes_config()
await ctx.send(f'The process {name} has been removed')
complete = True
break
if not complete:
await ctx.send(f"The process {name} doesn't exist")
@commands.command()
@commands.has_permissions(administrator=True)
async def edit_process(self, ctx, old_name, new_name):
"""
Edits the name of a process
:param ctx: The context of the command
:param old_name: The old name of the process (to be changed)
:param new_name: The new name of the process (changed to)
:return:
"""
old_name = self.fix_emoji_escapes(old_name)
new_name = self.fix_emoji_escapes(new_name)
if old_name in PROCESSES.values():
for process in PROCESSES:
if PROCESSES.get(process) == old_name:
PROCESSES.update({process: new_name})
self.update_processes_config()
else:
await ctx.send(f"Process name {old_name} doesn't exist")
@tasks.loop(seconds=1)
async def find_processes(self, msg):
"""
The processes with statuses are attached to the msg given
:param msg: The message to be edited with the processes
:return:
"""
running_processes = []
new_embed = DEFAULT_EMBED.copy()
for proc in psutil.process_iter():
if proc.name() in PROCESSES.keys():
running_processes.append(proc.name())
elif proc.name() in ['java.exe', 'javaw.exe'] and proc.cwd(
) in PROCESSES.keys():
running_processes.append(proc.cwd())
for process in PROCESSES:
try:
if process in running_processes:
new_embed.add_field(name=PROCESSES.get(process), value=
'Online <:GreenTick:592083498534174721>', inline=
self.inline)
else:
new_embed.add_field(name=PROCESSES.get(process), value=
'Offline <:RedCross:592082557961633877>', inline=
self.inline)
except PermissionError:
new_embed.add_field(name=PROCESSES.get(process), value=
'Admin Required <:OrangeUnknown:592082676891123722>',
inline=self.inline)
await msg.edit(content='', embed=new_embed)
def is_me(self, m):
"""
Checks if a messages author is the bot
:param m: tbh idk, maybe message?
:return:
"""
return m.author == self.client.user
async def delete_bot_msg(self, channel):
"""
Deletes up to the last 100 messages sent by the bot in the given channel
:param channel: The channel that will have the messages deleted
:return: the message that says how many messages were deleted
"""
await channel.purge(limit=100, check=self.is_me)
@staticmethod
def update_processes_config():
"""
Updates the processes line in the config with the current PROCESSES
:return:
"""
config.set('ProcessDisplay', 'processes', str(PROCESSES))
with open(CONFIG_PATH, 'w', encoding='utf-8') as configfile:
config.write(configfile)
<mask token>
<mask token>
| <mask token>
class ProcessDisplay(commands.Cog):
"""
The Cog for Process Display
"""
def __init__(self, client):
"""
:param client: the bot client parsed in from the main program
"""
self.started = False
self.client = client
self.inline = False
@commands.Cog.listener()
async def on_ready(self):
"""
Ran when bot is starting up and ready
Deletes messages from the bot in the TEXTCHANNEL
starts up find_processes method
:return:
"""
if not self.started:
channel = self.client.get_channel(TEXT_CHANNEL)
await self.delete_bot_msg(channel)
msg = await channel.send(embed=DEFAULT_EMBED)
self.find_processes.start(msg)
started = True
print('ProcessDisplay Running')
@commands.command()
@commands.has_permissions(administrator=True)
async def toggle_inline(self, ctx):
"""
Toggles inline for process controls
:param ctx: The command Context
:return:
"""
self.inline = not self.inline
@commands.command()
@commands.has_permissions(administrator=True)
async def move_process(self, direction, process_name):
"""
need to make
:param direction:
:param process_name:
:return:
"""
for i in range(len(PROCESSES)):
if PROCESSES[i] == process_name:
if direction.lower() == 'up':
pass
@commands.command()
@commands.has_permissions(administrator=True)
async def add_process(self, ctx, process, name):
"""
Adds a process to the process display.
Must be different from ones currently displayed.
:param ctx: Context of the command
:param process: The process (e.g. 'cmd.exe') to be added
:param name: The name to be displayed for the process (e.g. 'Command Prompt')
:return:
"""
name = self.fix_emoji_escapes(name)
if process in PROCESSES.keys():
await ctx.send(f'The process {process} is already being displayed')
elif name in PROCESSES.values():
await ctx.send(
f'The process name {name} is already being displayed')
else:
PROCESSES[process] = name
self.update_processes_config()
await ctx.send(f'The process {name} has been added')
@commands.command()
@commands.has_permissions(administrator=True)
async def remove_process(self, ctx, *name):
"""
Removes a process from the process display
:param ctx: Context of the command
:param name: Name displayed for the process (e.g. Command Prompt)
:return:
"""
print(name)
name = self.fix_emoji_escapes(' '.join(name))
complete = False
for process in PROCESSES.keys():
if PROCESSES.get(process) == name:
PROCESSES.pop(process)
self.update_processes_config()
await ctx.send(f'The process {name} has been removed')
complete = True
break
if not complete:
await ctx.send(f"The process {name} doesn't exist")
@commands.command()
@commands.has_permissions(administrator=True)
async def edit_process(self, ctx, old_name, new_name):
"""
Edits the name of a process
:param ctx: The context of the command
:param old_name: The old name of the process (to be changed)
:param new_name: The new name of the process (changed to)
:return:
"""
old_name = self.fix_emoji_escapes(old_name)
new_name = self.fix_emoji_escapes(new_name)
if old_name in PROCESSES.values():
for process in PROCESSES:
if PROCESSES.get(process) == old_name:
PROCESSES.update({process: new_name})
self.update_processes_config()
else:
await ctx.send(f"Process name {old_name} doesn't exist")
@tasks.loop(seconds=1)
async def find_processes(self, msg):
"""
The processes with statuses are attached to the msg given
:param msg: The message to be edited with the processes
:return:
"""
running_processes = []
new_embed = DEFAULT_EMBED.copy()
for proc in psutil.process_iter():
if proc.name() in PROCESSES.keys():
running_processes.append(proc.name())
elif proc.name() in ['java.exe', 'javaw.exe'] and proc.cwd(
) in PROCESSES.keys():
running_processes.append(proc.cwd())
for process in PROCESSES:
try:
if process in running_processes:
new_embed.add_field(name=PROCESSES.get(process), value=
'Online <:GreenTick:592083498534174721>', inline=
self.inline)
else:
new_embed.add_field(name=PROCESSES.get(process), value=
'Offline <:RedCross:592082557961633877>', inline=
self.inline)
except PermissionError:
new_embed.add_field(name=PROCESSES.get(process), value=
'Admin Required <:OrangeUnknown:592082676891123722>',
inline=self.inline)
await msg.edit(content='', embed=new_embed)
def is_me(self, m):
"""
Checks if a messages author is the bot
:param m: tbh idk, maybe message?
:return:
"""
return m.author == self.client.user
async def delete_bot_msg(self, channel):
"""
Deletes up to the last 100 messages sent by the bot in the given channel
:param channel: The channel that will have the messages deleted
:return: the message that says how many messages were deleted
"""
await channel.purge(limit=100, check=self.is_me)
@staticmethod
def update_processes_config():
"""
Updates the processes line in the config with the current PROCESSES
:return:
"""
config.set('ProcessDisplay', 'processes', str(PROCESSES))
with open(CONFIG_PATH, 'w', encoding='utf-8') as configfile:
config.write(configfile)
@staticmethod
def fix_emoji_escapes(text):
"""
Fixes emoji escapes to add the < back on
:param text: The text that needs to be checked for an escape
:return: the fixed text
"""
new_text = text.split(':')
for i in range(2, len(new_text)):
if '>' in new_text[i]:
new_text[i - 2] += '<'
return ':'.join(new_text)
<mask token>
| <mask token>
__author__ = 'Jack Draper'
__copyright__ = 'Unofficial Copyright 2019, CyclopsBot'
__credits__ = ['Jack Draper']
__license__ = 'Developer'
__version__ = '0.0.4'
__maintainer__ = 'Jack Draper'
__email__ = '[email protected]'
__status__ = 'Development'
CONFIG_PATH = './configs/config.ini'
DEFAULT_EMBED = discord.Embed(title=':desktop: Program Status', colour=
discord.Colour.blue())
if not os.path.exists('./configs/config.ini'):
print('No config file can be found in ./configs/.')
sys.exit('No config found.')
config = configparser.ConfigParser()
try:
config.read_file(codecs.open(CONFIG_PATH, 'r', 'utf-8-sig'))
except FileNotFoundError:
try:
print('You need to set up the config file correctly.')
except shutil.Error:
print(
'Something is wrong with the default config file or the config folder.'
)
time.sleep(4)
sys.exit()
ADMIN_ROLE = config['Credentials']['admin_role']
TEXT_CHANNEL = int(config['ProcessDisplay']['text_channel_id'])
PROCESSES = eval(config['ProcessDisplay']['processes'])
class ProcessDisplay(commands.Cog):
"""
The Cog for Process Display
"""
def __init__(self, client):
"""
:param client: the bot client parsed in from the main program
"""
self.started = False
self.client = client
self.inline = False
@commands.Cog.listener()
async def on_ready(self):
"""
Ran when bot is starting up and ready
Deletes messages from the bot in the TEXTCHANNEL
starts up find_processes method
:return:
"""
if not self.started:
channel = self.client.get_channel(TEXT_CHANNEL)
await self.delete_bot_msg(channel)
msg = await channel.send(embed=DEFAULT_EMBED)
self.find_processes.start(msg)
started = True
print('ProcessDisplay Running')
@commands.command()
@commands.has_permissions(administrator=True)
async def toggle_inline(self, ctx):
"""
Toggles inline for process controls
:param ctx: The command Context
:return:
"""
self.inline = not self.inline
@commands.command()
@commands.has_permissions(administrator=True)
async def move_process(self, direction, process_name):
"""
need to make
:param direction:
:param process_name:
:return:
"""
for i in range(len(PROCESSES)):
if PROCESSES[i] == process_name:
if direction.lower() == 'up':
pass
@commands.command()
@commands.has_permissions(administrator=True)
async def add_process(self, ctx, process, name):
"""
Adds a process to the process display.
Must be different from ones currently displayed.
:param ctx: Context of the command
:param process: The process (e.g. 'cmd.exe') to be added
:param name: The name to be displayed for the process (e.g. 'Command Prompt')
:return:
"""
name = self.fix_emoji_escapes(name)
if process in PROCESSES.keys():
await ctx.send(f'The process {process} is already being displayed')
elif name in PROCESSES.values():
await ctx.send(
f'The process name {name} is already being displayed')
else:
PROCESSES[process] = name
self.update_processes_config()
await ctx.send(f'The process {name} has been added')
@commands.command()
@commands.has_permissions(administrator=True)
async def remove_process(self, ctx, *name):
"""
Removes a process from the process display
:param ctx: Context of the command
:param name: Name displayed for the process (e.g. Command Prompt)
:return:
"""
print(name)
name = self.fix_emoji_escapes(' '.join(name))
complete = False
for process in PROCESSES.keys():
if PROCESSES.get(process) == name:
PROCESSES.pop(process)
self.update_processes_config()
await ctx.send(f'The process {name} has been removed')
complete = True
break
if not complete:
await ctx.send(f"The process {name} doesn't exist")
@commands.command()
@commands.has_permissions(administrator=True)
async def edit_process(self, ctx, old_name, new_name):
"""
Edits the name of a process
:param ctx: The context of the command
:param old_name: The old name of the process (to be changed)
:param new_name: The new name of the process (changed to)
:return:
"""
old_name = self.fix_emoji_escapes(old_name)
new_name = self.fix_emoji_escapes(new_name)
if old_name in PROCESSES.values():
for process in PROCESSES:
if PROCESSES.get(process) == old_name:
PROCESSES.update({process: new_name})
self.update_processes_config()
else:
await ctx.send(f"Process name {old_name} doesn't exist")
@tasks.loop(seconds=1)
async def find_processes(self, msg):
"""
The processes with statuses are attached to the msg given
:param msg: The message to be edited with the processes
:return:
"""
running_processes = []
new_embed = DEFAULT_EMBED.copy()
for proc in psutil.process_iter():
if proc.name() in PROCESSES.keys():
running_processes.append(proc.name())
elif proc.name() in ['java.exe', 'javaw.exe'] and proc.cwd(
) in PROCESSES.keys():
running_processes.append(proc.cwd())
for process in PROCESSES:
try:
if process in running_processes:
new_embed.add_field(name=PROCESSES.get(process), value=
'Online <:GreenTick:592083498534174721>', inline=
self.inline)
else:
new_embed.add_field(name=PROCESSES.get(process), value=
'Offline <:RedCross:592082557961633877>', inline=
self.inline)
except PermissionError:
new_embed.add_field(name=PROCESSES.get(process), value=
'Admin Required <:OrangeUnknown:592082676891123722>',
inline=self.inline)
await msg.edit(content='', embed=new_embed)
def is_me(self, m):
"""
Checks if a messages author is the bot
:param m: tbh idk, maybe message?
:return:
"""
return m.author == self.client.user
async def delete_bot_msg(self, channel):
"""
Deletes up to the last 100 messages sent by the bot in the given channel
:param channel: The channel that will have the messages deleted
:return: the message that says how many messages were deleted
"""
await channel.purge(limit=100, check=self.is_me)
@staticmethod
def update_processes_config():
"""
Updates the processes line in the config with the current PROCESSES
:return:
"""
config.set('ProcessDisplay', 'processes', str(PROCESSES))
with open(CONFIG_PATH, 'w', encoding='utf-8') as configfile:
config.write(configfile)
@staticmethod
def fix_emoji_escapes(text):
"""
Fixes emoji escapes to add the < back on
:param text: The text that needs to be checked for an escape
:return: the fixed text
"""
new_text = text.split(':')
for i in range(2, len(new_text)):
if '>' in new_text[i]:
new_text[i - 2] += '<'
return ':'.join(new_text)
def setup(client):
"""
Ran on setup of the Cog
:param client: The bot client
:return:
"""
client.add_cog(ProcessDisplay(client))
| #!/usr/bin/env python
"""
This is a Cog used to display processes/ programs running on the client to a discord text channel
Commented using reStructuredText (reST)
ToDo
create and use a database for multiple servers
"""
# Futures
# Built-in/Generic Imports
import os
import sys
import configparser
import shutil
import time
import codecs
# Libs
import discord
import psutil
from discord.ext import commands, tasks
# Own modules
__author__ = "Jack Draper"
__copyright__ = "Unofficial Copyright 2019, CyclopsBot"
__credits__ = ["Jack Draper"]
__license__ = "Developer"
__version__ = "0.0.4"
__maintainer__ = "Jack Draper"
__email__ = "[email protected]"
__status__ = "Development"
# "Prototype", "Development", or "Production"
# Constants
CONFIG_PATH = "./configs/config.ini"
DEFAULT_EMBED = discord.Embed(
title=":desktop: Program Status",
colour=discord.Colour.blue()
)
# Checks for config file
if not os.path.exists("./configs/config.ini"):
print("No config file can be found in ./configs/.")
sys.exit("No config found.")
# Runs config file
config = configparser.ConfigParser()
try:
# config.read(os.path.abspath("./configs/config.ini"))
config.read_file(codecs.open(CONFIG_PATH, "r", "utf-8-sig"))
except FileNotFoundError:
try:
# shutil.copyfile("./configs/default_config.ini", "./configs/config.ini")
print("You need to set up the config file correctly.")
except shutil.Error:
print("Something is wrong with the default config file or the config folder.")
time.sleep(4)
sys.exit()
# Config Constants
ADMIN_ROLE = config["Credentials"]["admin_role"]
TEXT_CHANNEL = int(config["ProcessDisplay"]["text_channel_id"])
PROCESSES = eval(config["ProcessDisplay"]["processes"])
class ProcessDisplay(commands.Cog):
"""
The Cog for Process Display
"""
def __init__(self, client):
"""
:param client: the bot client parsed in from the main program
"""
self.started = False
self.client = client
self.inline = False
# Events
@commands.Cog.listener()
async def on_ready(self):
"""
Ran when bot is starting up and ready
Deletes messages from the bot in the TEXTCHANNEL
starts up find_processes method
:return:
"""
if not self.started:
channel = self.client.get_channel(TEXT_CHANNEL)
await self.delete_bot_msg(channel)
msg = await channel.send(embed=DEFAULT_EMBED)
self.find_processes.start(msg)
started = True
print("ProcessDisplay Running")
# Commands
@commands.command()
@commands.has_permissions(administrator=True)
async def toggle_inline(self,ctx):
"""
Toggles inline for process controls
:param ctx: The command Context
:return:
"""
self.inline = not self.inline
@commands.command()
@commands.has_permissions(administrator=True)
async def move_process(self, direction, process_name):
"""
need to make
:param direction:
:param process_name:
:return:
"""
for i in range(len(PROCESSES)):
if PROCESSES[i] == process_name:
if direction.lower() == "up":
pass
@commands.command()
@commands.has_permissions(administrator=True)
async def add_process(self, ctx, process, name):
"""
Adds a process to the process display.
Must be different from ones currently displayed.
:param ctx: Context of the command
:param process: The process (e.g. 'cmd.exe') to be added
:param name: The name to be displayed for the process (e.g. 'Command Prompt')
:return:
"""
name = self.fix_emoji_escapes(name)
if process in PROCESSES.keys():
await ctx.send(f"The process {process} is already being displayed")
elif name in PROCESSES.values():
await ctx.send(f"The process name {name} is already being displayed")
else:
PROCESSES[process] = name
self.update_processes_config()
await ctx.send(f"The process {name} has been added")
@commands.command()
@commands.has_permissions(administrator=True)
async def remove_process(self, ctx, *name):
"""
Removes a process from the process display
:param ctx: Context of the command
:param name: Name displayed for the process (e.g. Command Prompt)
:return:
"""
print(name)
name = self.fix_emoji_escapes(" ".join(name))
complete = False
for process in PROCESSES.keys():
if PROCESSES.get(process) == name:
PROCESSES.pop(process)
self.update_processes_config()
await ctx.send(f"The process {name} has been removed")
complete = True
break
if not complete:
await ctx.send(f"The process {name} doesn't exist")
@commands.command()
@commands.has_permissions(administrator=True)
async def edit_process(self, ctx, old_name, new_name):
"""
Edits the name of a process
:param ctx: The context of the command
:param old_name: The old name of the process (to be changed)
:param new_name: The new name of the process (changed to)
:return:
"""
old_name = self.fix_emoji_escapes(old_name)
new_name = self.fix_emoji_escapes(new_name)
if old_name in PROCESSES.values():
for process in PROCESSES:
if PROCESSES.get(process) == old_name:
PROCESSES.update({process: new_name})
self.update_processes_config()
else:
await ctx.send(f"Process name {old_name} doesn't exist")
@tasks.loop(seconds=1)
async def find_processes(self, msg):
"""
The processes with statuses are attached to the msg given
:param msg: The message to be edited with the processes
:return:
"""
running_processes = []
new_embed = DEFAULT_EMBED.copy()
for proc in psutil.process_iter():
if proc.name() in PROCESSES.keys():
running_processes.append(proc.name())
elif proc.name() in ["java.exe", "javaw.exe"] and proc.cwd() in PROCESSES.keys():
running_processes.append(proc.cwd())
for process in PROCESSES:
try:
if process in running_processes:
new_embed.add_field(name=PROCESSES.get(process),
value="Online <:GreenTick:592083498534174721>", inline=self.inline)
else:
new_embed.add_field(name=PROCESSES.get(process),
value="Offline <:RedCross:592082557961633877>", inline=self.inline)
except PermissionError:
new_embed.add_field(name=PROCESSES.get(process),
value="Admin Required <:OrangeUnknown:592082676891123722>", inline=self.inline)
await msg.edit(content="", embed=new_embed)
def is_me(self, m):
"""
Checks if a messages author is the bot
:param m: tbh idk, maybe message?
:return:
"""
return m.author == self.client.user
async def delete_bot_msg(self, channel):
"""
Deletes up to the last 100 messages sent by the bot in the given channel
:param channel: The channel that will have the messages deleted
:return: the message that says how many messages were deleted
"""
await channel.purge(limit=100, check=self.is_me)
@staticmethod
def update_processes_config():
"""
Updates the processes line in the config with the current PROCESSES
:return:
"""
config.set("ProcessDisplay", "processes", str(PROCESSES))
with open(CONFIG_PATH, 'w', encoding='utf-8') as configfile:
config.write(configfile)
@staticmethod
def fix_emoji_escapes(text):
"""
Fixes emoji escapes to add the < back on
:param text: The text that needs to be checked for an escape
:return: the fixed text
"""
new_text = text.split(":")
for i in range(2, len(new_text)):
if ">" in new_text[i]:
new_text[i-2] += "<"
return ":".join(new_text)
def setup(client):
"""
Ran on setup of the Cog
:param client: The bot client
:return:
"""
client.add_cog(ProcessDisplay(client))
| [
2,
4,
6,
9,
11
] |
521 | 9c09309d23510aee4409a6d9021c2991afd2d349 | <mask token>
def clearConsole():
os.system('cls' if os.name == 'nt' else 'clear')
def main():
checkArgs()
rfile = open(sys.argv[1], 'r')
wfile = open(output_name, 'w')
parseAndStrip(rfile, wfile)
rfile.close()
wfile.close()
def checkArgs():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print(
'Usage Error:\t\tThe program needs (at least) an input filename to run.'
)
print('Correct Usage:\t\tpython titleStrip.py [input filename]')
print(
'Alternate Usage:\t\tpython titleStrip.py [input filename] [output filename]'
)
sys.exit(1)
if len(sys.argv) == 3:
global output_name
output_name = sys.argv[2]
def parseAndStrip(rfile, wfile):
while True:
line = rfile.readline()
if not line:
return
skip = 0
for key in strip_target:
if key in line:
skip = 1
if skip == 0:
wfile.write(line)
<mask token>
| <mask token>
def clearConsole():
os.system('cls' if os.name == 'nt' else 'clear')
def main():
checkArgs()
rfile = open(sys.argv[1], 'r')
wfile = open(output_name, 'w')
parseAndStrip(rfile, wfile)
rfile.close()
wfile.close()
def checkArgs():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print(
'Usage Error:\t\tThe program needs (at least) an input filename to run.'
)
print('Correct Usage:\t\tpython titleStrip.py [input filename]')
print(
'Alternate Usage:\t\tpython titleStrip.py [input filename] [output filename]'
)
sys.exit(1)
if len(sys.argv) == 3:
global output_name
output_name = sys.argv[2]
def parseAndStrip(rfile, wfile):
while True:
line = rfile.readline()
if not line:
return
skip = 0
for key in strip_target:
if key in line:
skip = 1
if skip == 0:
wfile.write(line)
main()
| <mask token>
strip_target = ['Wizards of', 'Random Generator']
output_name = 'titleStrip_out.txt'
def clearConsole():
os.system('cls' if os.name == 'nt' else 'clear')
def main():
checkArgs()
rfile = open(sys.argv[1], 'r')
wfile = open(output_name, 'w')
parseAndStrip(rfile, wfile)
rfile.close()
wfile.close()
def checkArgs():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print(
'Usage Error:\t\tThe program needs (at least) an input filename to run.'
)
print('Correct Usage:\t\tpython titleStrip.py [input filename]')
print(
'Alternate Usage:\t\tpython titleStrip.py [input filename] [output filename]'
)
sys.exit(1)
if len(sys.argv) == 3:
global output_name
output_name = sys.argv[2]
def parseAndStrip(rfile, wfile):
while True:
line = rfile.readline()
if not line:
return
skip = 0
for key in strip_target:
if key in line:
skip = 1
if skip == 0:
wfile.write(line)
main()
| import os, sys
strip_target = ['Wizards of', 'Random Generator']
output_name = 'titleStrip_out.txt'
def clearConsole():
os.system('cls' if os.name == 'nt' else 'clear')
def main():
checkArgs()
rfile = open(sys.argv[1], 'r')
wfile = open(output_name, 'w')
parseAndStrip(rfile, wfile)
rfile.close()
wfile.close()
def checkArgs():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print(
'Usage Error:\t\tThe program needs (at least) an input filename to run.'
)
print('Correct Usage:\t\tpython titleStrip.py [input filename]')
print(
'Alternate Usage:\t\tpython titleStrip.py [input filename] [output filename]'
)
sys.exit(1)
if len(sys.argv) == 3:
global output_name
output_name = sys.argv[2]
def parseAndStrip(rfile, wfile):
while True:
line = rfile.readline()
if not line:
return
skip = 0
for key in strip_target:
if key in line:
skip = 1
if skip == 0:
wfile.write(line)
main()
| ################################################################################
#
# titleStrip.py
#
# Generates an output file with the titles of the input stripped
# Usage:
# python titleStrip.py [input filename] [output filename]
#
################################################################################
import os, sys
# Globals / Settings
strip_target = ['Wizards of', 'Random Generator'] # Keys for removal between input and output
output_name = 'titleStrip_out.txt' # default output filename is out.txt
# There will be a better home for this, mhm...
def clearConsole ():
os.system ('cls' if os.name == 'nt' else 'clear')
def main():
checkArgs()
# Open up the input / output files (read / write modes respectively)
rfile = open (sys.argv[1], 'r')
wfile = open (output_name, 'w')
parseAndStrip (rfile, wfile)
# Close the input / output files now that we are done
rfile.close()
wfile.close()
# checkArgs
# 1. Verifies that the number of arguments is acceptable
# 2. Reads in optional output filename
def checkArgs ():
# Verify number of input arguments
if len (sys.argv) < 2 or len (sys.argv) > 3:
print ("Usage Error:\t\tThe program needs (at least) an input filename to run.")
print ("Correct Usage:\t\tpython titleStrip.py [input filename]")
print ("Alternate Usage:\t\tpython titleStrip.py [input filename] [output filename]")
sys.exit(1)
# Read in optional output filename if any
if len (sys.argv) == 3:
global output_name # Use the global output_name
output_name = sys.argv [2] # Set the name
# parseAndStrip
# Reads through rfile and copies lines into wfile
# If we find a line to remove, we do not copy it into wfile
def parseAndStrip ( rfile, wfile ):
while True:
line = rfile.readline() # read in a line
if not line: return # leave this function if we are done
# Check to see if line has a key for removal
skip = 0
for key in strip_target:
if key in line:
skip = 1
# Only copy from rfile to wfile if skip == 0
if skip == 0:
wfile.write (line)
main()
| [
4,
5,
6,
7,
8
] |
522 | 8f3abc5beaded94b6d7b93ac2cfcd12145d75fe8 | <mask token>
class MySubClass(MyClass):
<mask token>
<mask token>
<mask token>
class MyClass2:
stuff = 123
def __init_subclass__(cls):
super().__init_subclass__()
print(f'* Running {cls.__name__}.__init_subclass__')
print(cls.__dict__)
print(cls.super().__dict__)
def foo(self):
pass
class MySubClass2(MyClass2):
ofther = 456
def bar(self):
pass
| <mask token>
class MyClass(metaclass=Meta):
<mask token>
def foo(self):
pass
class MySubClass(MyClass):
ofther = 456
def bar(self):
pass
<mask token>
class MyClass2:
stuff = 123
def __init_subclass__(cls):
super().__init_subclass__()
print(f'* Running {cls.__name__}.__init_subclass__')
print(cls.__dict__)
print(cls.super().__dict__)
def foo(self):
pass
class MySubClass2(MyClass2):
ofther = 456
def bar(self):
pass
| class Meta(type):
<mask token>
class MyClass(metaclass=Meta):
stuff = 123
def foo(self):
pass
class MySubClass(MyClass):
ofther = 456
def bar(self):
pass
<mask token>
class MyClass2:
stuff = 123
def __init_subclass__(cls):
super().__init_subclass__()
print(f'* Running {cls.__name__}.__init_subclass__')
print(cls.__dict__)
print(cls.super().__dict__)
def foo(self):
pass
class MySubClass2(MyClass2):
ofther = 456
def bar(self):
pass
| class Meta(type):
def __new__(meta, name, bases, class_dict):
print(f'* Running {meta}.__new__ for {name}')
print('Bases:', bases)
print(class_dict)
return type.__new__(meta, name, bases, class_dict)
class MyClass(metaclass=Meta):
stuff = 123
def foo(self):
pass
class MySubClass(MyClass):
ofther = 456
def bar(self):
pass
<mask token>
class MyClass2:
stuff = 123
def __init_subclass__(cls):
super().__init_subclass__()
print(f'* Running {cls.__name__}.__init_subclass__')
print(cls.__dict__)
print(cls.super().__dict__)
def foo(self):
pass
class MySubClass2(MyClass2):
ofther = 456
def bar(self):
pass
| class Meta(type):
def __new__(meta, name, bases, class_dict):
print(f'* Running {meta}.__new__ for {name}')
print("Bases:", bases)
print(class_dict)
return type.__new__(meta, name, bases, class_dict)
class MyClass(metaclass=Meta):
stuff = 123
def foo(self):
pass
class MySubClass(MyClass):
ofther = 456
def bar(self):
pass
print("")
class MyClass2:
stuff = 123
def __init_subclass__(cls):
super().__init_subclass__()
print(f'* Running {cls.__name__}.__init_subclass__')
print(cls.__dict__)
print(cls.super().__dict__)
def foo(self):
pass
class MySubClass2(MyClass2):
ofther = 456
def bar(self):
pass
| [
8,
12,
14,
15,
17
] |
523 | 6801d68ebcc6ff52d9be92efeeb8727997a14bbd | <mask token>
| <mask token>
if __name__ == '__main__':
auth = HTTPBasicAuth('cisco', 'cisco')
headers = {'Accept': 'application/json', 'Content-Type': 'application/json'
}
url = 'https://asav/api/interfaces/physical/GigabitEthernet0_API_SLASH_0'
body = {'kind': 'object#GigabitInterface', 'interfaceDesc':
'Configured by Python'}
requests.packages.urllib3.disable_warnings()
response = requests.patch(url, data=json.dumps(body), auth=auth,
headers=headers, verify=False)
| import json
import requests
from requests.auth import HTTPBasicAuth
if __name__ == '__main__':
auth = HTTPBasicAuth('cisco', 'cisco')
headers = {'Accept': 'application/json', 'Content-Type': 'application/json'
}
url = 'https://asav/api/interfaces/physical/GigabitEthernet0_API_SLASH_0'
body = {'kind': 'object#GigabitInterface', 'interfaceDesc':
'Configured by Python'}
requests.packages.urllib3.disable_warnings()
response = requests.patch(url, data=json.dumps(body), auth=auth,
headers=headers, verify=False)
| #!/usr/bin/env python
import json
import requests
from requests.auth import HTTPBasicAuth
if __name__ == "__main__":
auth = HTTPBasicAuth('cisco', 'cisco')
headers = {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
url = "https://asav/api/interfaces/physical/GigabitEthernet0_API_SLASH_0"
body = {
"kind": "object#GigabitInterface",
"interfaceDesc": "Configured by Python"
}
requests.packages.urllib3.disable_warnings()
response = requests.patch(url, data=json.dumps(body), auth=auth, headers=headers, verify=False)
| null | [
0,
1,
2,
3
] |
524 | 6e3bb17696953256af6d8194128427acebf1daac | <mask token>
| <mask token>
class Generator:
<mask token>
@staticmethod
def generate(level):
"""
根据 level 生成指定等级的算术题
0:小学;1:初中;2:高中
"""
"""
生成操作数序列以及二元运算符序列
"""
length = randint(0 if level else 1, 4)
op2Arr = [Generator.opset[randint(0, 3)] for i in range(length)]
numArr = [randint(1, 100) for i in range(length + 1)]
"""
生成二元运算符的位置
"""
remain = 1
position = []
for i in range(length):
position.append(randint(0, remain))
remain += 1 - position[i]
if remain > 1:
position[-1] += remain - 1
"""
生成一元运算符序列
"""
op1Arr = []
if level:
if level == 1:
op1Arr.append(Generator.opset[randint(4, 5)])
elif level == 2:
op1Arr.append(Generator.opset[randint(6, 8)])
for i in range(randint(0, level)):
op1Arr.append(Generator.opset[randint(4, 5 if level == 1 else
8)])
shuffle(op1Arr)
"""
生成后缀表达式
"""
expression = numArr
offset = 2
index = 0
for i in range(length):
for j in range(position[i]):
expression.insert(i + j + offset, op2Arr[index])
index += 1
offset += position[i]
for op in op1Arr:
expression.insert(randint(1, len(expression)), op)
def getPriority(item):
"""
返回运算符或操作数的优先级
操作数:0
一元运算符:1
'*'、'/':2
'+'、'-':3
"""
if isinstance(item, int):
return 0
elif item == '+' or item == '-':
return 3
elif item == '*' or item == '/':
return 2
else:
return 1
"""
转换成中缀表达式
stack 存储 (expression, priority)
"""
stack = []
for e in expression:
priority = getPriority(e)
if priority == 0:
"""
是一个操作数,直接入栈
"""
stack.append((e, 0))
elif priority == 3:
"""
是加/减运算,优先级最低,拼接后直接入栈
"""
item2 = stack.pop()[0]
item1 = stack.pop()[0]
stack.append(('%s%s%s' % (item1, e, item2), 3))
elif priority == 2:
"""
是乘/除运算,如果有加/减运算需要加括号
"""
item2, prio2 = stack.pop()
if prio2 > 2:
item2 = '(%s)' % item2
item1, prio1 = stack.pop()
if prio1 > 2:
item1 = '(%s)' % item1
stack.append(('%s%s%s' % (item1, e, item2), 2))
elif priority == 1:
"""
是一元运算,除了操作数都要加括号
"""
item, prio = stack.pop()
if prio:
item = '(%s)' % item
if e == '²':
stack.append(('%s%s' % (item, '²'), 1))
else:
stack.append(('%s%s' % (e, item), 1))
return stack[0][0]
| <mask token>
class Generator:
opset = ['+', '-', '*', '/', '²', '√', 'sin', 'cos', 'tan']
@staticmethod
def generate(level):
"""
根据 level 生成指定等级的算术题
0:小学;1:初中;2:高中
"""
"""
生成操作数序列以及二元运算符序列
"""
length = randint(0 if level else 1, 4)
op2Arr = [Generator.opset[randint(0, 3)] for i in range(length)]
numArr = [randint(1, 100) for i in range(length + 1)]
"""
生成二元运算符的位置
"""
remain = 1
position = []
for i in range(length):
position.append(randint(0, remain))
remain += 1 - position[i]
if remain > 1:
position[-1] += remain - 1
"""
生成一元运算符序列
"""
op1Arr = []
if level:
if level == 1:
op1Arr.append(Generator.opset[randint(4, 5)])
elif level == 2:
op1Arr.append(Generator.opset[randint(6, 8)])
for i in range(randint(0, level)):
op1Arr.append(Generator.opset[randint(4, 5 if level == 1 else
8)])
shuffle(op1Arr)
"""
生成后缀表达式
"""
expression = numArr
offset = 2
index = 0
for i in range(length):
for j in range(position[i]):
expression.insert(i + j + offset, op2Arr[index])
index += 1
offset += position[i]
for op in op1Arr:
expression.insert(randint(1, len(expression)), op)
def getPriority(item):
"""
返回运算符或操作数的优先级
操作数:0
一元运算符:1
'*'、'/':2
'+'、'-':3
"""
if isinstance(item, int):
return 0
elif item == '+' or item == '-':
return 3
elif item == '*' or item == '/':
return 2
else:
return 1
"""
转换成中缀表达式
stack 存储 (expression, priority)
"""
stack = []
for e in expression:
priority = getPriority(e)
if priority == 0:
"""
是一个操作数,直接入栈
"""
stack.append((e, 0))
elif priority == 3:
"""
是加/减运算,优先级最低,拼接后直接入栈
"""
item2 = stack.pop()[0]
item1 = stack.pop()[0]
stack.append(('%s%s%s' % (item1, e, item2), 3))
elif priority == 2:
"""
是乘/除运算,如果有加/减运算需要加括号
"""
item2, prio2 = stack.pop()
if prio2 > 2:
item2 = '(%s)' % item2
item1, prio1 = stack.pop()
if prio1 > 2:
item1 = '(%s)' % item1
stack.append(('%s%s%s' % (item1, e, item2), 2))
elif priority == 1:
"""
是一元运算,除了操作数都要加括号
"""
item, prio = stack.pop()
if prio:
item = '(%s)' % item
if e == '²':
stack.append(('%s%s' % (item, '²'), 1))
else:
stack.append(('%s%s' % (e, item), 1))
return stack[0][0]
| from random import randint, shuffle
class Generator:
opset = ['+', '-', '*', '/', '²', '√', 'sin', 'cos', 'tan']
@staticmethod
def generate(level):
"""
根据 level 生成指定等级的算术题
0:小学;1:初中;2:高中
"""
"""
生成操作数序列以及二元运算符序列
"""
length = randint(0 if level else 1, 4)
op2Arr = [Generator.opset[randint(0, 3)] for i in range(length)]
numArr = [randint(1, 100) for i in range(length + 1)]
"""
生成二元运算符的位置
"""
remain = 1
position = []
for i in range(length):
position.append(randint(0, remain))
remain += 1 - position[i]
if remain > 1:
position[-1] += remain - 1
"""
生成一元运算符序列
"""
op1Arr = []
if level:
if level == 1:
op1Arr.append(Generator.opset[randint(4, 5)])
elif level == 2:
op1Arr.append(Generator.opset[randint(6, 8)])
for i in range(randint(0, level)):
op1Arr.append(Generator.opset[randint(4, 5 if level == 1 else
8)])
shuffle(op1Arr)
"""
生成后缀表达式
"""
expression = numArr
offset = 2
index = 0
for i in range(length):
for j in range(position[i]):
expression.insert(i + j + offset, op2Arr[index])
index += 1
offset += position[i]
for op in op1Arr:
expression.insert(randint(1, len(expression)), op)
def getPriority(item):
"""
返回运算符或操作数的优先级
操作数:0
一元运算符:1
'*'、'/':2
'+'、'-':3
"""
if isinstance(item, int):
return 0
elif item == '+' or item == '-':
return 3
elif item == '*' or item == '/':
return 2
else:
return 1
"""
转换成中缀表达式
stack 存储 (expression, priority)
"""
stack = []
for e in expression:
priority = getPriority(e)
if priority == 0:
"""
是一个操作数,直接入栈
"""
stack.append((e, 0))
elif priority == 3:
"""
是加/减运算,优先级最低,拼接后直接入栈
"""
item2 = stack.pop()[0]
item1 = stack.pop()[0]
stack.append(('%s%s%s' % (item1, e, item2), 3))
elif priority == 2:
"""
是乘/除运算,如果有加/减运算需要加括号
"""
item2, prio2 = stack.pop()
if prio2 > 2:
item2 = '(%s)' % item2
item1, prio1 = stack.pop()
if prio1 > 2:
item1 = '(%s)' % item1
stack.append(('%s%s%s' % (item1, e, item2), 2))
elif priority == 1:
"""
是一元运算,除了操作数都要加括号
"""
item, prio = stack.pop()
if prio:
item = '(%s)' % item
if e == '²':
stack.append(('%s%s' % (item, '²'), 1))
else:
stack.append(('%s%s' % (e, item), 1))
return stack[0][0]
| null | [
0,
2,
3,
4
] |
525 | 297b2ff6c6022bd8aac09c25537a132f67e05174 | from PIL import Image
source = Image.open("map4.png")
img = source.load()
map_data = {}
curr_x = 1
curr_y = 1
#Go over each chunk and get the pixel info
for x in range(0, 100, 10):
curr_x = x+1
for y in range(0, 100, 10):
curr_y = y+1
chunk = str(curr_x)+"X"+str(curr_y)
if chunk not in map_data:
map_data[chunk] = {}
for j in range(0, 10):
for k in range(0, 10):
loc = str(curr_x+j)+"x"+str(curr_y+k)
map_data[chunk][loc] = img[x+j, y+k]
#print map_data.keys()
#print map_data["1X1"]
print len(map_data.keys())
print len(map_data["1X1"].keys())
| null | null | null | null | [
0
] |
526 | 83117000f5f34490cb14580a9867b1e871ccc2ae | <mask token>
class BureauActifCalendarDataType(db.Model, BaseModel):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
| <mask token>
class BureauActifCalendarDataType(db.Model, BaseModel):
<mask token>
<mask token>
<mask token>
def to_json(self, ignore_fields=None, minimal=False):
if ignore_fields is None:
ignore_fields = []
return super().to_json(ignore_fields=ignore_fields)
@staticmethod
def create_defaults():
data = BureauActifCalendarDataType()
data.name = 'seating'
db.session.add(data)
data2 = BureauActifCalendarDataType()
data2.name = 'standing'
db.session.add(data2)
data3 = BureauActifCalendarDataType()
data3.name = 'positionChanges'
db.session.add(data3)
data4 = BureauActifCalendarDataType()
data4.name = 'absent'
db.session.add(data4)
db.session.commit()
| <mask token>
class BureauActifCalendarDataType(db.Model, BaseModel):
__tablename__ = 'ba_calendar_data_type'
id_calendar_data_type = db.Column(db.Integer, db.Sequence(
'id_calendar_data_type_sequence'), primary_key=True, autoincrement=True
)
name = db.Column(db.String, nullable=False)
def to_json(self, ignore_fields=None, minimal=False):
if ignore_fields is None:
ignore_fields = []
return super().to_json(ignore_fields=ignore_fields)
@staticmethod
def create_defaults():
data = BureauActifCalendarDataType()
data.name = 'seating'
db.session.add(data)
data2 = BureauActifCalendarDataType()
data2.name = 'standing'
db.session.add(data2)
data3 = BureauActifCalendarDataType()
data3.name = 'positionChanges'
db.session.add(data3)
data4 = BureauActifCalendarDataType()
data4.name = 'absent'
db.session.add(data4)
db.session.commit()
| from services.BureauActif.libbureauactif.db.Base import db, BaseModel
class BureauActifCalendarDataType(db.Model, BaseModel):
__tablename__ = 'ba_calendar_data_type'
id_calendar_data_type = db.Column(db.Integer, db.Sequence(
'id_calendar_data_type_sequence'), primary_key=True, autoincrement=True
)
name = db.Column(db.String, nullable=False)
def to_json(self, ignore_fields=None, minimal=False):
if ignore_fields is None:
ignore_fields = []
return super().to_json(ignore_fields=ignore_fields)
@staticmethod
def create_defaults():
data = BureauActifCalendarDataType()
data.name = 'seating'
db.session.add(data)
data2 = BureauActifCalendarDataType()
data2.name = 'standing'
db.session.add(data2)
data3 = BureauActifCalendarDataType()
data3.name = 'positionChanges'
db.session.add(data3)
data4 = BureauActifCalendarDataType()
data4.name = 'absent'
db.session.add(data4)
db.session.commit()
| from services.BureauActif.libbureauactif.db.Base import db, BaseModel
class BureauActifCalendarDataType(db.Model, BaseModel):
__tablename__ = "ba_calendar_data_type"
id_calendar_data_type = db.Column(db.Integer, db.Sequence('id_calendar_data_type_sequence'), primary_key=True,
autoincrement=True)
name = db.Column(db.String, nullable=False)
def to_json(self, ignore_fields=None, minimal=False):
if ignore_fields is None:
ignore_fields = []
return super().to_json(ignore_fields=ignore_fields)
@staticmethod
def create_defaults():
data = BureauActifCalendarDataType()
data.name = 'seating'
db.session.add(data)
data2 = BureauActifCalendarDataType()
data2.name = 'standing'
db.session.add(data2)
data3 = BureauActifCalendarDataType()
data3.name = 'positionChanges'
db.session.add(data3)
data4 = BureauActifCalendarDataType()
data4.name = 'absent'
db.session.add(data4)
db.session.commit()
| [
1,
3,
4,
5,
6
] |
527 | 639669174435492f43bf51680c2724863017e9d2 | import helpers
import os
import os.path
import json
import imp
import source.freesprints
from pygame.locals import *
class PluginLoader:
available_plugins = None
def __init__(self):
self.checkAvailablePlugins()
def checkAvailablePlugins(self):
print helpers.pluginsPath()
plugin_dirs = [plugin_path for plugin_path in os.listdir(helpers.pluginsPath())] #if os.path.isdir(f)]
self.available_plugins = []
for plugin_path in plugin_dirs:
print plugin_path
if plugin_path == ".DS_Store":
continue
plugin_path_absolute = os.path.join(helpers.pluginsPath(), plugin_path)
json_info_path = os.path.join(plugin_path_absolute, "info.json")
json_info_data = open(json_info_path)
jsonInfo = json.load(json_info_data)
self.available_plugins.append(Plugin(jsonInfo, plugin_path_absolute))
def getAvailablePlugins(self):
return self.available_plugins
class Plugin:
path = None
name = None
version = None
author = None
module = None
plugin_object = None
def __init__(self, info_json, path):
self.path = path
self.name = info_json.get("name")
self.version = info_json.get("version")
self.author = info_json.get("author")
print info_json
self.init_module()
def init_module(self):
#module = imp.find_module("pluginModule", [self.path])
self.module = imp.load_source("pluginModule", os.path.join(self.path, "__init__.py"))
print "FIND MODULE:"
print self.module
self.plugin_object = self.module.VisualisationPlugin(source.freesprints.get_app(), self)
#self.plugin_object.start()
#self.plugin_object.spinCount(123, 0)
def start(self, race_options):
source.freesprints.get_app().get_window_surface().fill(Color("black"))
self.plugin_object.start(race_options)
| null | null | null | null | [
0
] |
528 | 4462fec6e0edc25530c93ffeeae2372c86fef2cc | <mask token>
| <mask token>
cv2.imshow('Original', image)
cv2.waitKey(0)
<mask token>
cv2.imshow('Rotated by 45 degrees', rotated)
cv2.waitKey(0)
<mask token>
cv2.imshow('Rotated by -90 degrees', rotated)
cv2.waitKey(0)
<mask token>
cv2.imshow('Rotated by 180', rotated)
cv2.waitKey(0)
| <mask token>
image = cv2.imread('D:\\Github\\python-opencv\\images\\trex.png')
cv2.imshow('Original', image)
cv2.waitKey(0)
h, w = image.shape[:2]
center = w / 2, h / 2
M = cv2.getRotationMatrix2D(center, 45, 1.0)
rotated = cv2.warpAffine(image, M, (w, h))
cv2.imshow('Rotated by 45 degrees', rotated)
cv2.waitKey(0)
M = cv2.getRotationMatrix2D(center, -90, 1.0)
rotated = cv2.warpAffine(image, M, (w, h))
cv2.imshow('Rotated by -90 degrees', rotated)
cv2.waitKey(0)
rotated = imutils.rotate(image, 180)
cv2.imshow('Rotated by 180', rotated)
cv2.waitKey(0)
| import numpy as np
import imutils
import cv2
image = cv2.imread('D:\\Github\\python-opencv\\images\\trex.png')
cv2.imshow('Original', image)
cv2.waitKey(0)
h, w = image.shape[:2]
center = w / 2, h / 2
M = cv2.getRotationMatrix2D(center, 45, 1.0)
rotated = cv2.warpAffine(image, M, (w, h))
cv2.imshow('Rotated by 45 degrees', rotated)
cv2.waitKey(0)
M = cv2.getRotationMatrix2D(center, -90, 1.0)
rotated = cv2.warpAffine(image, M, (w, h))
cv2.imshow('Rotated by -90 degrees', rotated)
cv2.waitKey(0)
rotated = imutils.rotate(image, 180)
cv2.imshow('Rotated by 180', rotated)
cv2.waitKey(0)
| import numpy as np
import imutils
import cv2
image = cv2.imread("D:\\Github\\python-opencv\\images\\trex.png")
cv2.imshow("Original", image)
cv2.waitKey(0)
(h, w) = image.shape[:2] # get height and width of the image
center = (w/2, h/2) # which point to rotate around
M = cv2.getRotationMatrix2D(center, 45, 1.0) # rotation matrix
rotated = cv2.warpAffine(image, M, (w, h)) # apply the rotation
cv2. imshow("Rotated by 45 degrees", rotated)
cv2.waitKey(0)
M = cv2.getRotationMatrix2D(center, -90, 1.0)
rotated = cv2.warpAffine(image, M, (w, h))
cv2.imshow("Rotated by -90 degrees", rotated)
cv2.waitKey(0)
rotated = imutils.rotate(image, 180)
cv2.imshow("Rotated by 180", rotated)
cv2.waitKey(0) | [
0,
1,
2,
3,
4
] |
529 | 934921b22d036bd611134ce74f6eba3a2710018e | <mask token>
| <mask token>
while True:
ret, square = result.read()
area = square[100:200, 100:200]
cv2.imshow('video', square)
cv2.imshow('video2', area)
print(square)
if cv2.waitKey(25) & 255 == ord('q'):
break
result.release()
cv2.destroyAllWindows()
| <mask token>
result = cv2.VideoCapture(0)
while True:
ret, square = result.read()
area = square[100:200, 100:200]
cv2.imshow('video', square)
cv2.imshow('video2', area)
print(square)
if cv2.waitKey(25) & 255 == ord('q'):
break
result.release()
cv2.destroyAllWindows()
| import cv2
import numpy as np
result = cv2.VideoCapture(0)
while True:
ret, square = result.read()
area = square[100:200, 100:200]
cv2.imshow('video', square)
cv2.imshow('video2', area)
print(square)
if cv2.waitKey(25) & 255 == ord('q'):
break
result.release()
cv2.destroyAllWindows()
| import cv2
import numpy as np
result=cv2.VideoCapture(0)
while True:
ret,square=result.read()
area=square[100:200,100:200]
cv2.imshow("video",square)
cv2.imshow("video2",area)
print(square)
if cv2.waitKey(25) & 0xff == ord('q'):
break
result.release()
cv2.destroyAllWindows()
| [
0,
1,
2,
3,
4
] |
530 | 76d166bc227986863db77aa784be3de8110437ff | <mask token>
class BaseStrategy(BaseConsumer):
<mask token>
<mask token>
@abstractmethod
def calculate_signals(self):
"""Provide the mechanism to calculate a list of signals"""
raise NotImplementedError('Should implement calculate_signals()\n' +
"By calling this method to calculate 'Signal' Events")
<mask token>
<mask token>
def on_hard_stop(self, symbol):
pass
def on_rebalance(self, symbol):
pass
def has_position(self, symbol):
return self.pos[symbol].has_position
def has_open_orders(self, symbol):
return self.pos[symbol].has_open_orders
def has_long(self, symbol):
return self.pos[symbol].has_long
<mask token>
<mask token>
@property
def total_cost(self):
return sum(pos.cost for pos in self.pos.values())
@property
def total_bp(self):
if self.fixed_allocation:
return self.allocation
else:
return self.nav
<mask token>
def start(self):
while self.status != 'RUNNING':
time.sleep(2)
self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,
self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(
np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.
value))
logger.info('Warming up Strategy')
self.basic_publish('warmup', sender=self.id)
logger.info('Really Starting up calculating Signals')
self.basic_publish('next', sender=self.id)
def on_ack_reg_feed(self, oid, body):
self.required['feed'] = True
def on_ack_reg_exe(self, oid, body):
self.required['exe'] = True
<mask token>
<mask token>
def on_eod(self, oid, body):
"""Handlering End of Data Event"""
self._pbar.update(self._pbar.total - self._pbar.n)
self._pbar.close()
self.basic_publish('dereg-feed', sender=self.id)
self.basic_publish('dereg-exe', sender=self.id)
self._stop()
def on_fill(self, oid, body):
"""Upon filled order
- update strategy's position, spot position reversion
- update holding time
- update position quantity
Parameter:
----------
fill (Fill Event)
"""
logger.info('Consuming filled Order')
fill = body['fill']
self.pos[fill.symbol].on_fill(fill)
Q = fill.quantity
K, D, C = fill.fill_cost, fill.fill_type, fill.commission
cost = D.value * K * Q
self.commission += C
self.cash -= cost + C
def on_market(self, oid, body):
"""On market event
- update information for each existing poistion
- generate orders for rebalancing()
- the strategy will calculate signal(s)
- and publish them to the exchange for processing
- then a "done" will be published to indicate
the strategy is finish doing everything this heartbeat
- so then the risk manager will collect all signals
before sending order for execution
Parameter:
----------
ticks (Market Event)
"""
if body['freq'] != self.freq:
return
ticks = body['ticks']
self._update_data(ticks)
if self.t >= self.warmup:
self._calculate_signals()
equity = self.total_bp
bp = copy(self.avaliable_bp)
for S, pos in self.pos.items():
for order, lvl in pos.generate_orders(equity):
used_bp = self.on_order(order, lvl, bp)
bp -= used_bp
self._pbar.update(1)
if self.t >= self.warmup:
self._save_positions()
<mask token>
def generate_signal(self, symbol, signal_type, **kws):
"""Generate a signal that will stored at Strategy level
- Then all signals will be batch processed
Parameter
---------
symbol: str, the target symbol for the signal
signal_type: {LONG, SHORT, EXIT}
kws: additional arguments passes to the SignalEvent class
- especially the `strength` for percentage of portfolio
- if not passed, the default `pct_portfolio` will be used
"""
self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)
<mask token>
def _update_data(self, ticks):
"""Update the existing state of strategies
- based on given market observation
Note:
-----
1. It will always be called before calculating the new signal
2. this will be called no matter strategy is in warmup period or not
becuase warmup period is used for gathering nessceary data
"""
self.ticks = ticks
self.t += 1
for S, pos in self.pos.items():
pos._update_data(ticks[S])
self.update_data(ticks)
def _save_positions(self):
output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':
self.cash, 'commission': self.commission, 'nav': self.nav}
for k, v in self.pos.items():
output[str(k) + '_quantity'] = v.quantity
output[str(k) + '_mv'] = v.mv
self._hist.append(output)
| <mask token>
class BaseStrategy(BaseConsumer):
<mask token>
<mask token>
@abstractmethod
def calculate_signals(self):
"""Provide the mechanism to calculate a list of signals"""
raise NotImplementedError('Should implement calculate_signals()\n' +
"By calling this method to calculate 'Signal' Events")
<mask token>
<mask token>
def on_hard_stop(self, symbol):
pass
def on_rebalance(self, symbol):
pass
def has_position(self, symbol):
return self.pos[symbol].has_position
def has_open_orders(self, symbol):
return self.pos[symbol].has_open_orders
def has_long(self, symbol):
return self.pos[symbol].has_long
<mask token>
@property
def nav(self):
"""Net Account Value / Net Liquidating Value"""
return sum(pos.mv for pos in self.pos.values()) + self.cash
@property
def total_cost(self):
return sum(pos.cost for pos in self.pos.values())
@property
def total_bp(self):
if self.fixed_allocation:
return self.allocation
else:
return self.nav
<mask token>
def start(self):
while self.status != 'RUNNING':
time.sleep(2)
self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,
self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(
np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.
value))
logger.info('Warming up Strategy')
self.basic_publish('warmup', sender=self.id)
logger.info('Really Starting up calculating Signals')
self.basic_publish('next', sender=self.id)
def on_ack_reg_feed(self, oid, body):
self.required['feed'] = True
def on_ack_reg_exe(self, oid, body):
self.required['exe'] = True
<mask token>
<mask token>
def on_eod(self, oid, body):
"""Handlering End of Data Event"""
self._pbar.update(self._pbar.total - self._pbar.n)
self._pbar.close()
self.basic_publish('dereg-feed', sender=self.id)
self.basic_publish('dereg-exe', sender=self.id)
self._stop()
def on_fill(self, oid, body):
"""Upon filled order
- update strategy's position, spot position reversion
- update holding time
- update position quantity
Parameter:
----------
fill (Fill Event)
"""
logger.info('Consuming filled Order')
fill = body['fill']
self.pos[fill.symbol].on_fill(fill)
Q = fill.quantity
K, D, C = fill.fill_cost, fill.fill_type, fill.commission
cost = D.value * K * Q
self.commission += C
self.cash -= cost + C
def on_market(self, oid, body):
"""On market event
- update information for each existing poistion
- generate orders for rebalancing()
- the strategy will calculate signal(s)
- and publish them to the exchange for processing
- then a "done" will be published to indicate
the strategy is finish doing everything this heartbeat
- so then the risk manager will collect all signals
before sending order for execution
Parameter:
----------
ticks (Market Event)
"""
if body['freq'] != self.freq:
return
ticks = body['ticks']
self._update_data(ticks)
if self.t >= self.warmup:
self._calculate_signals()
equity = self.total_bp
bp = copy(self.avaliable_bp)
for S, pos in self.pos.items():
for order, lvl in pos.generate_orders(equity):
used_bp = self.on_order(order, lvl, bp)
bp -= used_bp
self._pbar.update(1)
if self.t >= self.warmup:
self._save_positions()
<mask token>
def generate_signal(self, symbol, signal_type, **kws):
"""Generate a signal that will stored at Strategy level
- Then all signals will be batch processed
Parameter
---------
symbol: str, the target symbol for the signal
signal_type: {LONG, SHORT, EXIT}
kws: additional arguments passes to the SignalEvent class
- especially the `strength` for percentage of portfolio
- if not passed, the default `pct_portfolio` will be used
"""
self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)
<mask token>
def _update_data(self, ticks):
"""Update the existing state of strategies
- based on given market observation
Note:
-----
1. It will always be called before calculating the new signal
2. this will be called no matter strategy is in warmup period or not
becuase warmup period is used for gathering nessceary data
"""
self.ticks = ticks
self.t += 1
for S, pos in self.pos.items():
pos._update_data(ticks[S])
self.update_data(ticks)
def _save_positions(self):
output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':
self.cash, 'commission': self.commission, 'nav': self.nav}
for k, v in self.pos.items():
output[str(k) + '_quantity'] = v.quantity
output[str(k) + '_mv'] = v.mv
self._hist.append(output)
| <mask token>
class BaseStrategy(BaseConsumer):
<mask token>
def __init__(self, symbol_list, allocation, freq, positions, start, end,
warmup=0, fixed_allocation=True, batch_size=10000):
"""
Parameter:
----------
symbol_list (list): A list of Contract perm_tick (for data)
allocation (float): Dollar amount that this strategy is able to use
freq (conf.FREQ): Data Frequency type for this strategy (for data)
positions (dict of dict):
A dictionary with perm_tick and a dictionary of arguments
- pct_portfolio (float): percentage of the allocation
- rebalance (int): # of days to rebalance to pct_portfolio
- hard_stop (float): hard drawdown gate to close position
warmup (int): # of days to warmup the strategy
env_type (string): {'BACKTEST', 'PAPPER', 'LIVE'}
which environment to run the startegy
start, end (datetime):
Only for backtesting to specificy the range of data to test
"""
n = ceil(freq.one_day)
num_pos = len(positions)
self.symbol_list = symbol_list
self.freq = freq
self.warmup = warmup * n
if start:
self.start_dt = clean_timestamp(start)
if end:
self.end_dt = clean_timestamp(end) + pd.DateOffset(seconds=-1,
days=1)
self.allocation = allocation
self.cash = allocation
self.commission = 0
self.fixed_allocation = fixed_allocation
pos_dict = {}
for perm_tick, v in positions.items():
if perm_tick not in self.symbol_list:
self.symbol_list.append(perm_tick)
pos = Position(perm_tick, pct_portfolio=v.get('pct_portfolio',
1 / num_pos), rebalance=v.get('rebalance', 0) * n,
hard_stop=v.get('hard_stop', 0))
pos_dict[perm_tick] = pos
self.pos = pos_dict
self.t = 0
self._hist = []
self.batch_size = batch_size
super().__init__(comp_type='STGY', required=['feed', 'exe'])
@abstractmethod
def calculate_signals(self):
"""Provide the mechanism to calculate a list of signals"""
raise NotImplementedError('Should implement calculate_signals()\n' +
"By calling this method to calculate 'Signal' Events")
def subscriptions(self):
return [('ack-reg-feed', self.id, self.on_ack_reg_feed), (
'ack-dereg_feed', self.id, self.on_ack_dereg_feed), (
'ack-reg-exe', self.id, self.on_ack_reg_exe), ('ack-dereg-exe',
self.id, self.on_ack_dereg_exe), ('eod', self.id, self.on_eod),
('tick', self.id, self.on_market), ('fill', self.id, self.on_fill)]
def update_data(self, ticks):
pass
def on_hard_stop(self, symbol):
pass
def on_rebalance(self, symbol):
pass
def has_position(self, symbol):
return self.pos[symbol].has_position
def has_open_orders(self, symbol):
return self.pos[symbol].has_open_orders
def has_long(self, symbol):
return self.pos[symbol].has_long
def has_short(self, symbol):
return self.pos[symbol].has_short
@property
def nav(self):
"""Net Account Value / Net Liquidating Value"""
return sum(pos.mv for pos in self.pos.values()) + self.cash
@property
def total_cost(self):
return sum(pos.cost for pos in self.pos.values())
@property
def total_bp(self):
if self.fixed_allocation:
return self.allocation
else:
return self.nav
@property
def avaliable_bp(self):
return self.total_bp - self.total_cost
def start(self):
while self.status != 'RUNNING':
time.sleep(2)
self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,
self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(
np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.
value))
logger.info('Warming up Strategy')
self.basic_publish('warmup', sender=self.id)
logger.info('Really Starting up calculating Signals')
self.basic_publish('next', sender=self.id)
def on_ack_reg_feed(self, oid, body):
self.required['feed'] = True
def on_ack_reg_exe(self, oid, body):
self.required['exe'] = True
def on_ack_dereg_feed(self, oid, body):
self.required['feed'] = False
def on_ack_dereg_exe(self, oid, body):
self.required['exe'] = False
def on_eod(self, oid, body):
"""Handlering End of Data Event"""
self._pbar.update(self._pbar.total - self._pbar.n)
self._pbar.close()
self.basic_publish('dereg-feed', sender=self.id)
self.basic_publish('dereg-exe', sender=self.id)
self._stop()
def on_fill(self, oid, body):
"""Upon filled order
- update strategy's position, spot position reversion
- update holding time
- update position quantity
Parameter:
----------
fill (Fill Event)
"""
logger.info('Consuming filled Order')
fill = body['fill']
self.pos[fill.symbol].on_fill(fill)
Q = fill.quantity
K, D, C = fill.fill_cost, fill.fill_type, fill.commission
cost = D.value * K * Q
self.commission += C
self.cash -= cost + C
def on_market(self, oid, body):
"""On market event
- update information for each existing poistion
- generate orders for rebalancing()
- the strategy will calculate signal(s)
- and publish them to the exchange for processing
- then a "done" will be published to indicate
the strategy is finish doing everything this heartbeat
- so then the risk manager will collect all signals
before sending order for execution
Parameter:
----------
ticks (Market Event)
"""
if body['freq'] != self.freq:
return
ticks = body['ticks']
self._update_data(ticks)
if self.t >= self.warmup:
self._calculate_signals()
equity = self.total_bp
bp = copy(self.avaliable_bp)
for S, pos in self.pos.items():
for order, lvl in pos.generate_orders(equity):
used_bp = self.on_order(order, lvl, bp)
bp -= used_bp
self._pbar.update(1)
if self.t >= self.warmup:
self._save_positions()
def on_order(self, order, lvl, bp):
"""Handling new order
- Orders are generated from signals
- will have to check currently avaliable buying power before publish
Parameter:
---------
order (Order Event)
lvl (str): Level of urgency for the order
This flag will be used to call corresponding callback
bp (float): The amount of avaliable buying power
Return:
-------
used buying power (float)
"""
S = order.symbol
need_bp = order.quantity * self.ticks[S].close
if need_bp <= bp:
used_bp = need_bp
if lvl == 'hard_stop':
self.on_hard_stop(S)
elif lvl == 'rebalance':
self.on_rebalance(S)
self.pos[order.symbol].confirm_order(order)
logger.info('Publish Order={} for Strategy={}'.format(order,
self.id))
self.basic_publish('order', sender=self.id, order=order)
else:
used_bp = 0
return used_bp
def generate_signal(self, symbol, signal_type, **kws):
"""Generate a signal that will stored at Strategy level
- Then all signals will be batch processed
Parameter
---------
symbol: str, the target symbol for the signal
signal_type: {LONG, SHORT, EXIT}
kws: additional arguments passes to the SignalEvent class
- especially the `strength` for percentage of portfolio
- if not passed, the default `pct_portfolio` will be used
"""
self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)
def _calculate_signals(self):
for pos in self.pos.values():
pos._calculate_signals()
self.calculate_signals()
def _update_data(self, ticks):
"""Update the existing state of strategies
- based on given market observation
Note:
-----
1. It will always be called before calculating the new signal
2. this will be called no matter strategy is in warmup period or not
becuase warmup period is used for gathering nessceary data
"""
self.ticks = ticks
self.t += 1
for S, pos in self.pos.items():
pos._update_data(ticks[S])
self.update_data(ticks)
def _save_positions(self):
output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':
self.cash, 'commission': self.commission, 'nav': self.nav}
for k, v in self.pos.items():
output[str(k) + '_quantity'] = v.quantity
output[str(k) + '_mv'] = v.mv
self._hist.append(output)
| <mask token>
logger = logging.getLogger('Strategy')
class BaseStrategy(BaseConsumer):
"""Strategy is an abstract base class providing an interface for
all subsequent (inherited) strategy handling objects.
Goal
----
The goal of a (derived) Strategy object
- based on the inbound 'Tick', calcualte signals
- 'Signal' is at the symbol level which will be published
Note
----
This is designed to work both with historic and live data as
the Strategy object is agnostic to the data source,
since it obtains the 'Tick' object from MarketEvent message
"""
def __init__(self, symbol_list, allocation, freq, positions, start, end,
warmup=0, fixed_allocation=True, batch_size=10000):
"""
Parameter:
----------
symbol_list (list): A list of Contract perm_tick (for data)
allocation (float): Dollar amount that this strategy is able to use
freq (conf.FREQ): Data Frequency type for this strategy (for data)
positions (dict of dict):
A dictionary with perm_tick and a dictionary of arguments
- pct_portfolio (float): percentage of the allocation
- rebalance (int): # of days to rebalance to pct_portfolio
- hard_stop (float): hard drawdown gate to close position
warmup (int): # of days to warmup the strategy
env_type (string): {'BACKTEST', 'PAPPER', 'LIVE'}
which environment to run the startegy
start, end (datetime):
Only for backtesting to specificy the range of data to test
"""
n = ceil(freq.one_day)
num_pos = len(positions)
self.symbol_list = symbol_list
self.freq = freq
self.warmup = warmup * n
if start:
self.start_dt = clean_timestamp(start)
if end:
self.end_dt = clean_timestamp(end) + pd.DateOffset(seconds=-1,
days=1)
self.allocation = allocation
self.cash = allocation
self.commission = 0
self.fixed_allocation = fixed_allocation
pos_dict = {}
for perm_tick, v in positions.items():
if perm_tick not in self.symbol_list:
self.symbol_list.append(perm_tick)
pos = Position(perm_tick, pct_portfolio=v.get('pct_portfolio',
1 / num_pos), rebalance=v.get('rebalance', 0) * n,
hard_stop=v.get('hard_stop', 0))
pos_dict[perm_tick] = pos
self.pos = pos_dict
self.t = 0
self._hist = []
self.batch_size = batch_size
super().__init__(comp_type='STGY', required=['feed', 'exe'])
@abstractmethod
def calculate_signals(self):
"""Provide the mechanism to calculate a list of signals"""
raise NotImplementedError('Should implement calculate_signals()\n' +
"By calling this method to calculate 'Signal' Events")
def subscriptions(self):
return [('ack-reg-feed', self.id, self.on_ack_reg_feed), (
'ack-dereg_feed', self.id, self.on_ack_dereg_feed), (
'ack-reg-exe', self.id, self.on_ack_reg_exe), ('ack-dereg-exe',
self.id, self.on_ack_dereg_exe), ('eod', self.id, self.on_eod),
('tick', self.id, self.on_market), ('fill', self.id, self.on_fill)]
def update_data(self, ticks):
pass
def on_hard_stop(self, symbol):
pass
def on_rebalance(self, symbol):
pass
def has_position(self, symbol):
return self.pos[symbol].has_position
def has_open_orders(self, symbol):
return self.pos[symbol].has_open_orders
def has_long(self, symbol):
return self.pos[symbol].has_long
def has_short(self, symbol):
return self.pos[symbol].has_short
@property
def nav(self):
"""Net Account Value / Net Liquidating Value"""
return sum(pos.mv for pos in self.pos.values()) + self.cash
@property
def total_cost(self):
return sum(pos.cost for pos in self.pos.values())
@property
def total_bp(self):
if self.fixed_allocation:
return self.allocation
else:
return self.nav
@property
def avaliable_bp(self):
return self.total_bp - self.total_cost
def start(self):
while self.status != 'RUNNING':
time.sleep(2)
self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,
self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(
np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.
value))
logger.info('Warming up Strategy')
self.basic_publish('warmup', sender=self.id)
logger.info('Really Starting up calculating Signals')
self.basic_publish('next', sender=self.id)
def on_ack_reg_feed(self, oid, body):
self.required['feed'] = True
def on_ack_reg_exe(self, oid, body):
self.required['exe'] = True
def on_ack_dereg_feed(self, oid, body):
self.required['feed'] = False
def on_ack_dereg_exe(self, oid, body):
self.required['exe'] = False
def on_eod(self, oid, body):
"""Handlering End of Data Event"""
self._pbar.update(self._pbar.total - self._pbar.n)
self._pbar.close()
self.basic_publish('dereg-feed', sender=self.id)
self.basic_publish('dereg-exe', sender=self.id)
self._stop()
def on_fill(self, oid, body):
"""Upon filled order
- update strategy's position, spot position reversion
- update holding time
- update position quantity
Parameter:
----------
fill (Fill Event)
"""
logger.info('Consuming filled Order')
fill = body['fill']
self.pos[fill.symbol].on_fill(fill)
Q = fill.quantity
K, D, C = fill.fill_cost, fill.fill_type, fill.commission
cost = D.value * K * Q
self.commission += C
self.cash -= cost + C
def on_market(self, oid, body):
"""On market event
- update information for each existing poistion
- generate orders for rebalancing()
- the strategy will calculate signal(s)
- and publish them to the exchange for processing
- then a "done" will be published to indicate
the strategy is finish doing everything this heartbeat
- so then the risk manager will collect all signals
before sending order for execution
Parameter:
----------
ticks (Market Event)
"""
if body['freq'] != self.freq:
return
ticks = body['ticks']
self._update_data(ticks)
if self.t >= self.warmup:
self._calculate_signals()
equity = self.total_bp
bp = copy(self.avaliable_bp)
for S, pos in self.pos.items():
for order, lvl in pos.generate_orders(equity):
used_bp = self.on_order(order, lvl, bp)
bp -= used_bp
self._pbar.update(1)
if self.t >= self.warmup:
self._save_positions()
def on_order(self, order, lvl, bp):
"""Handling new order
- Orders are generated from signals
- will have to check currently avaliable buying power before publish
Parameter:
---------
order (Order Event)
lvl (str): Level of urgency for the order
This flag will be used to call corresponding callback
bp (float): The amount of avaliable buying power
Return:
-------
used buying power (float)
"""
S = order.symbol
need_bp = order.quantity * self.ticks[S].close
if need_bp <= bp:
used_bp = need_bp
if lvl == 'hard_stop':
self.on_hard_stop(S)
elif lvl == 'rebalance':
self.on_rebalance(S)
self.pos[order.symbol].confirm_order(order)
logger.info('Publish Order={} for Strategy={}'.format(order,
self.id))
self.basic_publish('order', sender=self.id, order=order)
else:
used_bp = 0
return used_bp
def generate_signal(self, symbol, signal_type, **kws):
"""Generate a signal that will stored at Strategy level
- Then all signals will be batch processed
Parameter
---------
symbol: str, the target symbol for the signal
signal_type: {LONG, SHORT, EXIT}
kws: additional arguments passes to the SignalEvent class
- especially the `strength` for percentage of portfolio
- if not passed, the default `pct_portfolio` will be used
"""
self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)
def _calculate_signals(self):
for pos in self.pos.values():
pos._calculate_signals()
self.calculate_signals()
def _update_data(self, ticks):
"""Update the existing state of strategies
- based on given market observation
Note:
-----
1. It will always be called before calculating the new signal
2. this will be called no matter strategy is in warmup period or not
becuase warmup period is used for gathering nessceary data
"""
self.ticks = ticks
self.t += 1
for S, pos in self.pos.items():
pos._update_data(ticks[S])
self.update_data(ticks)
def _save_positions(self):
output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':
self.cash, 'commission': self.commission, 'nav': self.nav}
for k, v in self.pos.items():
output[str(k) + '_quantity'] = v.quantity
output[str(k) + '_mv'] = v.mv
self._hist.append(output)
| import logging, numpy as np, time, pandas as pd
from abc import abstractmethod
from kombu import binding
from tqdm import tqdm
from functools import lru_cache
from threading import Thread
from math import ceil
from copy import copy
from .pos import Position
from .base import BaseConsumer
from .event import SignalEventPct, OrderEvent
from .conf import LONG, SHORT, EXIT, MKT, BUY, SELL, LOCAL_TZ
from .util import clean_timestamp
from .errors import OverFilling
logger = logging.getLogger('Strategy')
class BaseStrategy(BaseConsumer):
"""Strategy is an abstract base class providing an interface for
all subsequent (inherited) strategy handling objects.
Goal
----
The goal of a (derived) Strategy object
- based on the inbound 'Tick', calcualte signals
- 'Signal' is at the symbol level which will be published
Note
----
This is designed to work both with historic and live data as
the Strategy object is agnostic to the data source,
since it obtains the 'Tick' object from MarketEvent message
"""
def __init__(
self, symbol_list, allocation, freq, positions,
start, end, warmup=0, fixed_allocation=True,
batch_size=10000
):
"""
Parameter:
----------
symbol_list (list): A list of Contract perm_tick (for data)
allocation (float): Dollar amount that this strategy is able to use
freq (conf.FREQ): Data Frequency type for this strategy (for data)
positions (dict of dict):
A dictionary with perm_tick and a dictionary of arguments
- pct_portfolio (float): percentage of the allocation
- rebalance (int): # of days to rebalance to pct_portfolio
- hard_stop (float): hard drawdown gate to close position
warmup (int): # of days to warmup the strategy
env_type (string): {'BACKTEST', 'PAPPER', 'LIVE'}
which environment to run the startegy
start, end (datetime):
Only for backtesting to specificy the range of data to test
"""
n = ceil(freq.one_day)
num_pos = len(positions)
# getting neccesary parameters
self.symbol_list = symbol_list
self.freq = freq
self.warmup = warmup * n
if start:
self.start_dt = clean_timestamp(start)
if end:
self.end_dt = clean_timestamp(end) + pd.DateOffset(seconds=-1, days=1)
# allocation parameters for tracking portfolio
self.allocation = allocation
self.cash = allocation
self.commission = 0
self.fixed_allocation = fixed_allocation
pos_dict = {}
for perm_tick, v in positions.items():
# want to have position, must know its market ticks for decision
if perm_tick not in self.symbol_list:
self.symbol_list.append(perm_tick)
pos = Position(
perm_tick,
pct_portfolio=v.get('pct_portfolio', 1/num_pos),
rebalance=v.get('rebalance', 0) * n,
hard_stop=v.get('hard_stop', 0),
)
pos_dict[perm_tick] = pos
self.pos = pos_dict
# starting is always 0, it will increment itself every market tick
self.t = 0
self._hist = []
self.batch_size = batch_size
super().__init__(comp_type='STGY', required=['feed', 'exe'])
@abstractmethod
def calculate_signals(self):
"""Provide the mechanism to calculate a list of signals"""
raise NotImplementedError(
"Should implement calculate_signals()\n" + \
"By calling this method to calculate 'Signal' Events"
)
def subscriptions(self):
return [
('ack-reg-feed', self.id, self.on_ack_reg_feed),
('ack-dereg_feed', self.id, self.on_ack_dereg_feed),
('ack-reg-exe', self.id, self.on_ack_reg_exe),
('ack-dereg-exe', self.id, self.on_ack_dereg_exe),
('eod', self.id, self.on_eod),
('tick', self.id, self.on_market),
('fill', self.id, self.on_fill),
]
def update_data(self, ticks):
pass
def on_hard_stop(self, symbol):
pass
def on_rebalance(self, symbol):
pass
def has_position(self, symbol):
return self.pos[symbol].has_position
def has_open_orders(self, symbol):
return self.pos[symbol].has_open_orders
def has_long(self, symbol):
return self.pos[symbol].has_long
def has_short(self, symbol):
return self.pos[symbol].has_short
@property
def nav(self):
"""Net Account Value / Net Liquidating Value"""
return sum(pos.mv for pos in self.pos.values()) + self.cash
@property
def total_cost(self):
return sum(pos.cost for pos in self.pos.values())
@property
def total_bp(self):
if self.fixed_allocation:
return self.allocation
else:
return self.nav
@property
def avaliable_bp(self):
return self.total_bp - self.total_cost
def start(self):
while self.status != 'RUNNING':
time.sleep(2)
# setting up progress bar
self._pbar = tqdm(
total=int(np.ceil(
pd.bdate_range(self.start_dt, self.end_dt).size
* np.ceil(self.freq.one_day)
)),
miniters=int(np.ceil(self.freq.one_day)),
unit=' tick<{}>'.format(self.freq.value),
)
# publish event to get started
logger.info('Warming up Strategy')
self.basic_publish('warmup', sender=self.id)
logger.info('Really Starting up calculating Signals')
self.basic_publish('next', sender=self.id)
def on_ack_reg_feed(self, oid, body):
self.required['feed'] = True
def on_ack_reg_exe(self, oid, body):
self.required['exe'] = True
def on_ack_dereg_feed(self, oid, body):
self.required['feed'] = False
def on_ack_dereg_exe(self, oid, body):
self.required['exe'] = False
def on_eod(self, oid, body):
"""Handlering End of Data Event"""
self._pbar.update(self._pbar.total - self._pbar.n)
self._pbar.close()
self.basic_publish('dereg-feed', sender=self.id)
self.basic_publish('dereg-exe', sender=self.id)
self._stop()
def on_fill(self, oid, body):
"""Upon filled order
- update strategy's position, spot position reversion
- update holding time
- update position quantity
Parameter:
----------
fill (Fill Event)
"""
logger.info('Consuming filled Order')
fill = body['fill']
# update the position first
self.pos[fill.symbol].on_fill(fill)
# getting data from the fill event
Q = fill.quantity
K, D, C = fill.fill_cost, fill.fill_type, fill.commission
cost = D.value * K * Q
self.commission += C
self.cash -= cost + C
def on_market(self, oid, body):
"""On market event
- update information for each existing poistion
- generate orders for rebalancing()
- the strategy will calculate signal(s)
- and publish them to the exchange for processing
- then a "done" will be published to indicate
the strategy is finish doing everything this heartbeat
- so then the risk manager will collect all signals
before sending order for execution
Parameter:
----------
ticks (Market Event)
"""
if body['freq'] != self.freq: return
ticks = body['ticks']
self._update_data(ticks)
if self.t >= self.warmup:
self._calculate_signals()
# publish generated signals
equity = self.total_bp
bp = copy(self.avaliable_bp) # current snap_shot of buying power
for S, pos in self.pos.items():
for order, lvl in pos.generate_orders(equity):
used_bp = self.on_order(order, lvl, bp)
bp -= used_bp
# save old strategy performance history
self._pbar.update(1)
# if ticks.timestamp >= self.start_dt:
# self.basic_publish('next', sender=self.id)
if self.t >= self.warmup:
self._save_positions()
def on_order(self, order, lvl, bp):
"""Handling new order
- Orders are generated from signals
- will have to check currently avaliable buying power before publish
Parameter:
---------
order (Order Event)
lvl (str): Level of urgency for the order
This flag will be used to call corresponding callback
bp (float): The amount of avaliable buying power
Return:
-------
used buying power (float)
"""
S = order.symbol
need_bp = order.quantity * self.ticks[S].close
if need_bp <= bp: # have enough buying power to place order
used_bp = need_bp
if lvl == 'hard_stop':
self.on_hard_stop(S)
elif lvl == 'rebalance':
self.on_rebalance(S)
self.pos[order.symbol].confirm_order(order)
logger.info(
'Publish Order={} for Strategy={}'
.format(order, self.id)
)
self.basic_publish('order', sender=self.id, order=order)
else:
used_bp = 0
return used_bp
def generate_signal(self, symbol, signal_type, **kws):
"""Generate a signal that will stored at Strategy level
- Then all signals will be batch processed
Parameter
---------
symbol: str, the target symbol for the signal
signal_type: {LONG, SHORT, EXIT}
kws: additional arguments passes to the SignalEvent class
- especially the `strength` for percentage of portfolio
- if not passed, the default `pct_portfolio` will be used
"""
self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)
def _calculate_signals(self):
# update existing position information
for pos in self.pos.values():
pos._calculate_signals()
self.calculate_signals()
def _update_data(self, ticks):
"""Update the existing state of strategies
- based on given market observation
Note:
-----
1. It will always be called before calculating the new signal
2. this will be called no matter strategy is in warmup period or not
becuase warmup period is used for gathering nessceary data
"""
self.ticks = ticks
self.t += 1
for S, pos in self.pos.items():
pos._update_data(ticks[S])
self.update_data(ticks)
def _save_positions(self):
output = {
'timestamp': self.ticks.timestamp, 't': self.t,
'cash': self.cash, 'commission': self.commission,
'nav': self.nav,
}
for k, v in self.pos.items():
output[str(k)+'_quantity'] = v.quantity
output[str(k)+'_mv'] = v.mv
self._hist.append(output)
| [
18,
19,
28,
30,
32
] |
531 | 3ae0149af78216d6cc85313ebaa6f7cd99185c05 | <mask token>
| def postfix(expression):
operators, stack = '+-*/', []
for item in expression.split():
if item not in operators:
stack.append(item)
else:
operand_1, operand_2 = stack.pop(), stack.pop()
stack.append(str(eval(operand_2 + item + operand_1)))
return int(float(stack.pop()))
| null | null | null | [
0,
1
] |
532 | d95d899c6eae5a90c90d3d920ee40b38bf304805 | <mask token>
| <mask token>
if __name__ == '__main__':
pass
| #coding: utf-8
"""
1) Encontre em um texto os nomes próprios e os retorne em uma lista. Utilize o Regex (‘import re’) e a função findall(). Na versão básica, retorne todas as palavras que iniciam com maiúscula.
2) Apresente um plot de alguns segundos dos dados de acelerômetro do dataset:
https://archive.ics.uci.edu/ml/datasets/Activity+Recognition+from+Single+Chest-Mounted+Accelerometer#
Use a função read_csv() para abrir os arquivos
"""
if __name__ == "__main__":
pass | null | null | [
0,
1,
2
] |
533 | dd7ade05ef912f7c094883507768cc21f95f31f6 | <mask token>
| <mask token>
KEYS = ['CM', 'GM']
NOTES_FOR_KEY = {'CM': [21, 23, 24, 26, 28, 29, 31, 33, 35, 36, 38, 40, 41,
43, 45, 47, 48, 50, 52, 53, 55, 57, 59, 60, 62, 64, 65, 67, 69, 71, 72,
74, 76, 77, 79, 81, 83, 84, 86, 88, 89, 91, 93, 95, 96, 98, 100, 101,
103, 105, 107, 108], 'GM': [21, 23, 24, 26, 28, 30, 31, 33, 35, 36, 38,
40, 42, 43, 45, 47, 48, 50, 52, 54, 55, 57, 59, 60, 62, 64, 66, 67, 69,
71, 72, 74, 76, 78, 79, 81, 83, 84, 86, 88, 90, 91, 93, 95, 96, 98, 100,
102, 103, 105, 107, 108], 'DM': [], 'AM': [], 'EM': [], 'BM': [], 'FSM':
[], 'CSM': [], 'Am': [], 'Em': [], 'Bm': [], 'FSm': [], 'CSm': [],
'GSm': [], 'DSm': [], 'ASm': []}
TONIC_NOTE_FOR_KEY = {'CM': 60, 'GM': 67, 'DM': None, 'AM': None, 'EM':
None, 'BM': None, 'FSM': None, 'CSM': None, 'Am': None, 'Em': None,
'Bm': None, 'FSm': None, 'CSm': None, 'GSm': None, 'DSm': None, 'ASm': None
}
STEPS_FOR_CHORD = {'major_triad': [0, 4, 7]}
NOTE_IN_KEY_REWARD = 1
NOTE_IN_CHORDS_REWARD = 1
SUPER_CONSONANT_INTERVAL_REWARD = 3
CONSONANT_INTERVAL_REWARD = 2
SOMEWHAT_CONSONANT_INTERVAL_REWARD = 1
DISSONANT_INTERVAL_REWARD = -2
SOMEWHAT_DISSONANT_INTERVAL_REWARD = -1
CENTRICITY_FACTOR = 1
| """
A module for constants.
"""
# fin adding notes for keys and uncomment
KEYS = [
"CM",
"GM"
# ,
# "DM",
# "AM",
# "EM",
# "BM",
# "FSM",
# "CSM",
# "Am",
# "Em",
# "Bm",
# "FSm",
# "CSm",
# "GSm",
# "DSm",
# "ASm",
]
NOTES_FOR_KEY = {
"CM": [
21,
23,
24,
26,
28,
29,
31,
33,
35,
36,
38,
40,
41,
43,
45,
47,
48,
50,
52,
53,
55,
57,
59,
60,
62,
64,
65,
67,
69,
71,
72,
74,
76,
77,
79,
81,
83,
84,
86,
88,
89,
91,
93,
95,
96,
98,
100,
101,
103,
105,
107,
108,
],
"GM": [
21,
23,
24,
26,
28,
30,
31,
33,
35,
36,
38,
40,
42,
43,
45,
47,
48,
50,
52,
54,
55,
57,
59,
60,
62,
64,
66,
67,
69,
71,
72,
74,
76,
78,
79,
81,
83,
84,
86,
88,
90,
91,
93,
95,
96,
98,
100,
102,
103,
105,
107,
108,
],
"DM": [],
"AM": [],
"EM": [],
"BM": [],
"FSM": [],
"CSM": [],
"Am": [],
"Em": [],
"Bm": [],
"FSm": [],
"CSm": [],
"GSm": [],
"DSm": [],
"ASm": [],
}
TONIC_NOTE_FOR_KEY = {
"CM": 60,
"GM": 67,
"DM": None,
"AM": None,
"EM": None,
"BM": None,
"FSM": None,
"CSM": None,
"Am": None,
"Em": None,
"Bm": None,
"FSm": None,
"CSm": None,
"GSm": None,
"DSm": None,
"ASm": None,
}
# add more chords later
STEPS_FOR_CHORD = {"major_triad": [0, 4, 7]}
# constants for value function
# add more complex rewards
NOTE_IN_KEY_REWARD = 1
NOTE_IN_CHORDS_REWARD = 1
SUPER_CONSONANT_INTERVAL_REWARD = 3
CONSONANT_INTERVAL_REWARD = 2
SOMEWHAT_CONSONANT_INTERVAL_REWARD = 1
DISSONANT_INTERVAL_REWARD = -2
SOMEWHAT_DISSONANT_INTERVAL_REWARD = -1
CENTRICITY_FACTOR = 1 # reward is number of times note occured before * CENTRICITY_FACTOR | null | null | [
0,
1,
2
] |
534 | 0c68bd65cac3c8b9fd080900a00991b2d19260ee | <mask token>
class PairMatcherTestCase(TestCase):
<mask token>
<mask token>
| <mask token>
class PairMatcherTestCase(TestCase):
<mask token>
def test_simple(self):
employees = EmployeeFactory.create_batch(41, company=self.company)
matcher = MaximumWeightGraphMatcher()
groups = matcher.match(self.company, employees)
print('\n'.join([','.join(e.user.username for e in group) for group in
groups]))
| <mask token>
class PairMatcherTestCase(TestCase):
def setUp(self):
self.company = CompanyFactory.create()
def test_simple(self):
employees = EmployeeFactory.create_batch(41, company=self.company)
matcher = MaximumWeightGraphMatcher()
groups = matcher.match(self.company, employees)
print('\n'.join([','.join(e.user.username for e in group) for group in
groups]))
| from django.test import TestCase
from core.factories import CompanyFactory, EmployeeFactory
from core.pair_matcher import MaximumWeightGraphMatcher
class PairMatcherTestCase(TestCase):
def setUp(self):
self.company = CompanyFactory.create()
def test_simple(self):
employees = EmployeeFactory.create_batch(41, company=self.company)
matcher = MaximumWeightGraphMatcher()
groups = matcher.match(self.company, employees)
print('\n'.join([','.join(e.user.username for e in group) for group in
groups]))
| null | [
1,
2,
3,
4
] |
535 | 32ed07a89a6f929a6c4b78fd79e687b85e01015b | <mask token>
class ForgotForm(FlaskForm):
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (
'stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Change your Password')
class changepassword(FlaskForm):
password = PasswordField('Enter Password', validators=[DataRequired()])
submit = SubmitField('Change Password')
class ComplaintForm(FlaskForm):
fname = StringField('Full Name *', validators=[DataRequired()])
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
date = DateField('Date', validators=[DataRequired()])
degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (
'masters', 'Masters')], validators=[DataRequired()])
semester = SelectField(u'Semester*', choices=[('first', 'First'), (
'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (
'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (
'eighth', 'Eighth')], validators=[DataRequired()])
complaintcategory = SelectField(u'Complain Category*', choices=[(
'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (
'academics', 'Academics'), ('management', 'Management'), ('faculty',
'Faculty'), ('library', 'Library')], validators=[DataRequired()])
message = TextAreaField('Enter Complain Details', validators=[
DataRequired(), Length(max=100)])
submit = SubmitField('Submit')
class complaint_status(FlaskForm):
status = SelectField(u'Complaint Status', choices=[('Pending',
'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]
)
submit = SubmitField('Update')
| <mask token>
class LoginForm(FlaskForm):
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
password = PasswordField('Password*', validators=[DataRequired()])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (
'stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Login >>')
class ForgotForm(FlaskForm):
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (
'stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Change your Password')
class changepassword(FlaskForm):
password = PasswordField('Enter Password', validators=[DataRequired()])
submit = SubmitField('Change Password')
class ComplaintForm(FlaskForm):
fname = StringField('Full Name *', validators=[DataRequired()])
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
date = DateField('Date', validators=[DataRequired()])
degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (
'masters', 'Masters')], validators=[DataRequired()])
semester = SelectField(u'Semester*', choices=[('first', 'First'), (
'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (
'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (
'eighth', 'Eighth')], validators=[DataRequired()])
complaintcategory = SelectField(u'Complain Category*', choices=[(
'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (
'academics', 'Academics'), ('management', 'Management'), ('faculty',
'Faculty'), ('library', 'Library')], validators=[DataRequired()])
message = TextAreaField('Enter Complain Details', validators=[
DataRequired(), Length(max=100)])
submit = SubmitField('Submit')
class complaint_status(FlaskForm):
status = SelectField(u'Complaint Status', choices=[('Pending',
'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]
)
submit = SubmitField('Update')
| <mask token>
class SignUpForm(FlaskForm):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
class LoginForm(FlaskForm):
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
password = PasswordField('Password*', validators=[DataRequired()])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (
'stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Login >>')
class ForgotForm(FlaskForm):
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (
'stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Change your Password')
class changepassword(FlaskForm):
password = PasswordField('Enter Password', validators=[DataRequired()])
submit = SubmitField('Change Password')
class ComplaintForm(FlaskForm):
fname = StringField('Full Name *', validators=[DataRequired()])
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
date = DateField('Date', validators=[DataRequired()])
degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (
'masters', 'Masters')], validators=[DataRequired()])
semester = SelectField(u'Semester*', choices=[('first', 'First'), (
'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (
'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (
'eighth', 'Eighth')], validators=[DataRequired()])
complaintcategory = SelectField(u'Complain Category*', choices=[(
'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (
'academics', 'Academics'), ('management', 'Management'), ('faculty',
'Faculty'), ('library', 'Library')], validators=[DataRequired()])
message = TextAreaField('Enter Complain Details', validators=[
DataRequired(), Length(max=100)])
submit = SubmitField('Submit')
class complaint_status(FlaskForm):
status = SelectField(u'Complaint Status', choices=[('Pending',
'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]
)
submit = SubmitField('Update')
| <mask token>
class SignUpForm(FlaskForm):
id = StringField('ID*', validators=[DataRequired()])
fname = StringField('Full Name*', validators=[DataRequired()])
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
password = PasswordField('Password*', validators=[DataRequired()])
contactno = TelField('Mobile No*.', validators=[DataRequired(), Length(
min=10, max=10)])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (
'stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Sign Up >>')
class LoginForm(FlaskForm):
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
password = PasswordField('Password*', validators=[DataRequired()])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (
'stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Login >>')
class ForgotForm(FlaskForm):
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (
'stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Change your Password')
class changepassword(FlaskForm):
password = PasswordField('Enter Password', validators=[DataRequired()])
submit = SubmitField('Change Password')
class ComplaintForm(FlaskForm):
fname = StringField('Full Name *', validators=[DataRequired()])
email = EmailField('Email Id*', validators=[DataRequired(), Email()])
date = DateField('Date', validators=[DataRequired()])
degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (
'masters', 'Masters')], validators=[DataRequired()])
semester = SelectField(u'Semester*', choices=[('first', 'First'), (
'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (
'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (
'eighth', 'Eighth')], validators=[DataRequired()])
complaintcategory = SelectField(u'Complain Category*', choices=[(
'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (
'academics', 'Academics'), ('management', 'Management'), ('faculty',
'Faculty'), ('library', 'Library')], validators=[DataRequired()])
message = TextAreaField('Enter Complain Details', validators=[
DataRequired(), Length(max=100)])
submit = SubmitField('Submit')
class complaint_status(FlaskForm):
status = SelectField(u'Complaint Status', choices=[('Pending',
'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]
)
submit = SubmitField('Update')
| from flask_wtf import FlaskForm
from wtforms import StringField, SelectField,SubmitField, PasswordField, RadioField, MultipleFileField, SubmitField, TextAreaField
from wtforms.fields.html5 import EmailField, TelField, DateField
from wtforms.validators import DataRequired, Email, Length, InputRequired
class SignUpForm(FlaskForm):
id = StringField('ID*', validators=[DataRequired()])
fname = StringField('Full Name*', validators=[DataRequired()])
email = EmailField('Email Id*',validators=[DataRequired(), Email()])
password = PasswordField('Password*', validators=[DataRequired()])
contactno = TelField('Mobile No*.', validators=[DataRequired(), Length(min=10, max=10)])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Sign Up >>')
class LoginForm(FlaskForm):
email = EmailField('Email Id*',validators=[DataRequired(), Email()])
password = PasswordField('Password*', validators=[DataRequired()])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Login >>')
class ForgotForm(FlaskForm):
email = EmailField('Email Id*',validators=[DataRequired(), Email()])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Change your Password')
class changepassword(FlaskForm):
password = PasswordField('Enter Password', validators=[DataRequired()])
submit = SubmitField('Change Password')
class ComplaintForm(FlaskForm):
fname = StringField('Full Name *', validators=[DataRequired()])
email = EmailField('Email Id*',validators=[DataRequired(), Email()])
date = DateField('Date', validators=[DataRequired()])
degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), ('masters', 'Masters')], validators=[DataRequired()])
semester = SelectField(u'Semester*', choices=[('first', 'First'), ('second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), ('fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), ('eighth', 'Eighth')], validators=[DataRequired()])
complaintcategory = SelectField(u'Complain Category*', choices=[('infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), ('academics', 'Academics'), ('management', 'Management'), ('faculty', 'Faculty'), ('library', 'Library')], validators=[DataRequired()])
message = TextAreaField('Enter Complain Details', validators=[DataRequired(), Length(max=100)])
#file = MultipleFileField(u'Upload File')
submit = SubmitField('Submit')
class complaint_status(FlaskForm):
status = SelectField(u'Complaint Status', choices=[('Pending', 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')])
submit = SubmitField('Update')
| [
8,
10,
11,
12,
14
] |
536 | 257f18db95e069c037341d2af372269e988b0a80 | <mask token>
| <mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
| <mask token>
class Migration(migrations.Migration):
dependencies = [('asset', '0001_initial')]
operations = [migrations.RemoveField(model_name='balance', name='title')]
| from django.db import migrations
class Migration(migrations.Migration):
dependencies = [('asset', '0001_initial')]
operations = [migrations.RemoveField(model_name='balance', name='title')]
| # Generated by Django 3.1.2 on 2021-07-02 05:38
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('asset', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='balance',
name='title',
),
]
| [
0,
1,
2,
3,
4
] |
537 | 74cb06ffa41748af431b46c9ff98eb91771a5015 | <mask token>
| <mask token>
for i in range(count):
print('Enter details for student', i + 1, 'below:')
rollNo = int(input('Rollno: '))
name = input('Name: ')
marks = float(input('Marks: '))
records = str(rollNo) + ',' + name + ',' + str(marks) + '\n'
fileObj.write(records)
fileObj.close()
| count = int(input('How many students are there in class? '))
fileObj = open('marks.txt', 'w')
for i in range(count):
print('Enter details for student', i + 1, 'below:')
rollNo = int(input('Rollno: '))
name = input('Name: ')
marks = float(input('Marks: '))
records = str(rollNo) + ',' + name + ',' + str(marks) + '\n'
fileObj.write(records)
fileObj.close()
| #Get roll numbers, name & marks of the students of a class(get from user) and store these details in a file- marks.txt
count = int(input("How many students are there in class? "))
fileObj = open('marks.txt',"w")
for i in range(count):
print("Enter details for student",(i+1),"below:")
rollNo = int(input("Rollno: "))
name = input("Name: ")
marks = float(input("Marks: "))
records = str(rollNo) + "," + name + "," + str(marks) + '\n'
fileObj.write(records)
fileObj.close() | null | [
0,
1,
2,
3
] |
538 | 955cf040aaf882328e31e6a943bce04cf721cb11 | <mask token>
def get_repo_url(repo):
url = repo.replace('upstream:', 'git://git.baserock.org/delta/')
url = url.replace('baserock:baserock/',
'git://git.baserock.org/baserock/baserock/')
url = url.replace('freedesktop:', 'git://anongit.freedesktop.org/')
url = url.replace('github:', 'git://github.com/')
url = url.replace('gnome:', 'git://git.gnome.org')
if url.endswith('.git'):
url = url[:-4]
return url
def get_repo_name(repo):
""" Convert URIs to strings that only contain digits, letters, _ and %.
NOTE: When changing the code of this function, make sure to also apply
the same to the quote_url() function of lorry. Otherwise the git tarballs
generated by lorry may no longer be found by morph.
"""
valid_chars = string.digits + string.ascii_letters + '%_'
transl = lambda x: x if x in valid_chars else '_'
return ''.join([transl(x) for x in get_repo_url(repo)])
def get_upstream_version(repo, ref):
try:
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
with app.chdir(gitdir), open(os.devnull, 'w') as fnull:
last_tag = check_output(['git', 'describe', '--abbrev=0',
'--tags', ref], stderr=fnull)[0:-1]
commits = check_output(['git', 'rev-list', last_tag + '..' +
ref, '--count'])
result = '%s (%s + %s commits)' % (ref[:8], last_tag, commits[0:-1])
except:
result = ref[:8] + ' ' + '(No tag found)'
return result
<mask token>
def copy_repo(repo, destdir):
"""Copies a cached repository into a directory using cp.
This also fixes up the repository afterwards, so that it can contain
code etc. It does not leave any given branch ready for use.
"""
call(['cp', '-a', repo, os.path.join(destdir, '.git')])
call(['git', 'config', 'core.bare', 'false'])
call(['git', 'config', '--unset', 'remote.origin.mirror'])
with open(os.devnull, 'w') as fnull:
call(['git', 'config', 'remote.origin.fetch',
'+refs/heads/*:refs/remotes/origin/*'], stdout=fnull, stderr=fnull)
call(['git', 'config', 'remote.origin.url', repo])
call(['git', 'pack-refs', '--all', '--prune'])
with open(os.path.join(destdir, '.git', 'packed-refs'), 'r') as ref_fh:
pack_lines = ref_fh.read().split('\n')
with open(os.path.join(destdir, '.git', 'packed-refs'), 'w') as ref_fh:
ref_fh.write(pack_lines.pop(0) + '\n')
for refline in pack_lines:
if ' refs/remotes/' in refline:
continue
if ' refs/heads/' in refline:
sha, ref = refline[:40], refline[41:]
if ref.startswith('refs/heads/'):
ref = 'refs/remotes/origin/' + ref[11:]
refline = '%s %s' % (sha, ref)
ref_fh.write('%s\n' % refline)
with open(os.devnull, 'w') as fnull:
call(['git', 'remote', 'update', 'origin', '--prune'], stdout=fnull,
stderr=fnull)
<mask token>
def checkout_submodules(name, ref):
app.log(name, 'Git submodules')
with open('.gitmodules', 'r') as gitfile:
content = '\n'.join([l.strip() for l in gitfile.read().splitlines()])
io = StringIO.StringIO(content)
parser = ConfigParser.RawConfigParser()
parser.readfp(io)
for section in parser.sections():
submodule = re.sub('submodule "(.*)"', '\\1', section)
url = parser.get(section, 'url')
path = parser.get(section, 'path')
try:
commit = check_output(['git', 'ls-tree', ref, path])
fields = commit.split()
if len(fields) >= 2 and fields[1] == 'commit':
submodule_commit = commit.split()[2]
if len(submodule_commit) != 40:
raise Exception
fulldir = os.path.join(os.getcwd(), path)
checkout(submodule, url, submodule_commit, fulldir)
else:
app.log(this,
'Skipping submodule "%s" as %s:%s has a non-commit object for it'
% (name, url))
except:
app.log(name, 'ERROR: Git submodules problem')
raise SystemExit
| <mask token>
def get_repo_url(repo):
url = repo.replace('upstream:', 'git://git.baserock.org/delta/')
url = url.replace('baserock:baserock/',
'git://git.baserock.org/baserock/baserock/')
url = url.replace('freedesktop:', 'git://anongit.freedesktop.org/')
url = url.replace('github:', 'git://github.com/')
url = url.replace('gnome:', 'git://git.gnome.org')
if url.endswith('.git'):
url = url[:-4]
return url
def get_repo_name(repo):
""" Convert URIs to strings that only contain digits, letters, _ and %.
NOTE: When changing the code of this function, make sure to also apply
the same to the quote_url() function of lorry. Otherwise the git tarballs
generated by lorry may no longer be found by morph.
"""
valid_chars = string.digits + string.ascii_letters + '%_'
transl = lambda x: x if x in valid_chars else '_'
return ''.join([transl(x) for x in get_repo_url(repo)])
def get_upstream_version(repo, ref):
try:
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
with app.chdir(gitdir), open(os.devnull, 'w') as fnull:
last_tag = check_output(['git', 'describe', '--abbrev=0',
'--tags', ref], stderr=fnull)[0:-1]
commits = check_output(['git', 'rev-list', last_tag + '..' +
ref, '--count'])
result = '%s (%s + %s commits)' % (ref[:8], last_tag, commits[0:-1])
except:
result = ref[:8] + ' ' + '(No tag found)'
return result
def get_tree(this):
ref = this['ref']
gitdir = os.path.join(app.settings['gits'], get_repo_name(this['repo']))
if not os.path.exists(gitdir):
try:
url = app.settings['cache-server-url'] + 'repo=' + get_repo_url(
this['repo']) + '&ref=' + ref
with urllib2.urlopen(url) as response:
tree = json.loads(response.read().decode())['tree']
return tree
except:
app.log(this, 'WARNING: no tree from cache-server', ref)
mirror(this['name'], this['repo'])
with app.chdir(gitdir), open(os.devnull, 'w') as fnull:
if call(['git', 'rev-parse', ref + '^{object}'], stdout=fnull,
stderr=fnull):
call(['git', 'fetch', 'origin'], stdout=fnull, stderr=fnull)
try:
tree = check_output(['git', 'rev-parse', ref + '^{tree}'],
universal_newlines=True)[0:-1]
return tree
except:
app.log(this, 'ERROR: could not find tree for ref', ref)
raise SystemExit
def copy_repo(repo, destdir):
"""Copies a cached repository into a directory using cp.
This also fixes up the repository afterwards, so that it can contain
code etc. It does not leave any given branch ready for use.
"""
call(['cp', '-a', repo, os.path.join(destdir, '.git')])
call(['git', 'config', 'core.bare', 'false'])
call(['git', 'config', '--unset', 'remote.origin.mirror'])
with open(os.devnull, 'w') as fnull:
call(['git', 'config', 'remote.origin.fetch',
'+refs/heads/*:refs/remotes/origin/*'], stdout=fnull, stderr=fnull)
call(['git', 'config', 'remote.origin.url', repo])
call(['git', 'pack-refs', '--all', '--prune'])
with open(os.path.join(destdir, '.git', 'packed-refs'), 'r') as ref_fh:
pack_lines = ref_fh.read().split('\n')
with open(os.path.join(destdir, '.git', 'packed-refs'), 'w') as ref_fh:
ref_fh.write(pack_lines.pop(0) + '\n')
for refline in pack_lines:
if ' refs/remotes/' in refline:
continue
if ' refs/heads/' in refline:
sha, ref = refline[:40], refline[41:]
if ref.startswith('refs/heads/'):
ref = 'refs/remotes/origin/' + ref[11:]
refline = '%s %s' % (sha, ref)
ref_fh.write('%s\n' % refline)
with open(os.devnull, 'w') as fnull:
call(['git', 'remote', 'update', 'origin', '--prune'], stdout=fnull,
stderr=fnull)
<mask token>
def checkout(name, repo, ref, checkoutdir):
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
if not os.path.exists(gitdir):
mirror(name, repo)
app.log(name, 'Upstream version:', get_upstream_version(repo, ref))
app.log(name, 'Git checkout %s in %s' % (repo, checkoutdir))
with app.chdir(checkoutdir), open(os.devnull, 'w') as fnull:
copy_repo(gitdir, checkoutdir)
if call(['git', 'checkout', ref], stdout=fnull, stderr=fnull):
app.log(name, 'ERROR: git checkout failed for', ref)
raise SystemExit
if os.path.exists('.gitmodules'):
checkout_submodules(name, ref)
utils.set_mtime_recursively(checkoutdir)
def checkout_submodules(name, ref):
app.log(name, 'Git submodules')
with open('.gitmodules', 'r') as gitfile:
content = '\n'.join([l.strip() for l in gitfile.read().splitlines()])
io = StringIO.StringIO(content)
parser = ConfigParser.RawConfigParser()
parser.readfp(io)
for section in parser.sections():
submodule = re.sub('submodule "(.*)"', '\\1', section)
url = parser.get(section, 'url')
path = parser.get(section, 'path')
try:
commit = check_output(['git', 'ls-tree', ref, path])
fields = commit.split()
if len(fields) >= 2 and fields[1] == 'commit':
submodule_commit = commit.split()[2]
if len(submodule_commit) != 40:
raise Exception
fulldir = os.path.join(os.getcwd(), path)
checkout(submodule, url, submodule_commit, fulldir)
else:
app.log(this,
'Skipping submodule "%s" as %s:%s has a non-commit object for it'
% (name, url))
except:
app.log(name, 'ERROR: Git submodules problem')
raise SystemExit
| <mask token>
def get_repo_url(repo):
url = repo.replace('upstream:', 'git://git.baserock.org/delta/')
url = url.replace('baserock:baserock/',
'git://git.baserock.org/baserock/baserock/')
url = url.replace('freedesktop:', 'git://anongit.freedesktop.org/')
url = url.replace('github:', 'git://github.com/')
url = url.replace('gnome:', 'git://git.gnome.org')
if url.endswith('.git'):
url = url[:-4]
return url
def get_repo_name(repo):
""" Convert URIs to strings that only contain digits, letters, _ and %.
NOTE: When changing the code of this function, make sure to also apply
the same to the quote_url() function of lorry. Otherwise the git tarballs
generated by lorry may no longer be found by morph.
"""
valid_chars = string.digits + string.ascii_letters + '%_'
transl = lambda x: x if x in valid_chars else '_'
return ''.join([transl(x) for x in get_repo_url(repo)])
def get_upstream_version(repo, ref):
try:
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
with app.chdir(gitdir), open(os.devnull, 'w') as fnull:
last_tag = check_output(['git', 'describe', '--abbrev=0',
'--tags', ref], stderr=fnull)[0:-1]
commits = check_output(['git', 'rev-list', last_tag + '..' +
ref, '--count'])
result = '%s (%s + %s commits)' % (ref[:8], last_tag, commits[0:-1])
except:
result = ref[:8] + ' ' + '(No tag found)'
return result
def get_tree(this):
ref = this['ref']
gitdir = os.path.join(app.settings['gits'], get_repo_name(this['repo']))
if not os.path.exists(gitdir):
try:
url = app.settings['cache-server-url'] + 'repo=' + get_repo_url(
this['repo']) + '&ref=' + ref
with urllib2.urlopen(url) as response:
tree = json.loads(response.read().decode())['tree']
return tree
except:
app.log(this, 'WARNING: no tree from cache-server', ref)
mirror(this['name'], this['repo'])
with app.chdir(gitdir), open(os.devnull, 'w') as fnull:
if call(['git', 'rev-parse', ref + '^{object}'], stdout=fnull,
stderr=fnull):
call(['git', 'fetch', 'origin'], stdout=fnull, stderr=fnull)
try:
tree = check_output(['git', 'rev-parse', ref + '^{tree}'],
universal_newlines=True)[0:-1]
return tree
except:
app.log(this, 'ERROR: could not find tree for ref', ref)
raise SystemExit
def copy_repo(repo, destdir):
"""Copies a cached repository into a directory using cp.
This also fixes up the repository afterwards, so that it can contain
code etc. It does not leave any given branch ready for use.
"""
call(['cp', '-a', repo, os.path.join(destdir, '.git')])
call(['git', 'config', 'core.bare', 'false'])
call(['git', 'config', '--unset', 'remote.origin.mirror'])
with open(os.devnull, 'w') as fnull:
call(['git', 'config', 'remote.origin.fetch',
'+refs/heads/*:refs/remotes/origin/*'], stdout=fnull, stderr=fnull)
call(['git', 'config', 'remote.origin.url', repo])
call(['git', 'pack-refs', '--all', '--prune'])
with open(os.path.join(destdir, '.git', 'packed-refs'), 'r') as ref_fh:
pack_lines = ref_fh.read().split('\n')
with open(os.path.join(destdir, '.git', 'packed-refs'), 'w') as ref_fh:
ref_fh.write(pack_lines.pop(0) + '\n')
for refline in pack_lines:
if ' refs/remotes/' in refline:
continue
if ' refs/heads/' in refline:
sha, ref = refline[:40], refline[41:]
if ref.startswith('refs/heads/'):
ref = 'refs/remotes/origin/' + ref[11:]
refline = '%s %s' % (sha, ref)
ref_fh.write('%s\n' % refline)
with open(os.devnull, 'w') as fnull:
call(['git', 'remote', 'update', 'origin', '--prune'], stdout=fnull,
stderr=fnull)
def mirror(name, repo):
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
repo_url = get_repo_url(repo)
try:
os.makedirs(gitdir)
tar_file = get_repo_name(repo_url) + '.tar'
app.log(name, 'Try fetching tarball %s' % tar_file)
with app.chdir(gitdir), open(os.devnull, 'w') as fnull:
call(['wget', app['tar-url']], stdout=fnull, stderr=fnull)
call(['tar', 'xf', tar_file], stdout=fnull, stderr=fnull)
os.remove(tar_file)
call(['git', 'config', 'remote.origin.url', repo_url], stdout=
fnull, stderr=fnull)
call(['git', 'config', 'remote.origin.mirror', 'true'], stdout=
fnull, stderr=fnull)
if call(['git', 'config', 'remote.origin.fetch',
'+refs/*:refs/*'], stdout=fnull, stderr=fnull) != 0:
raise BaseException('Did not get a valid git repo')
call(['git', 'fetch', 'origin'], stdout=fnull, stderr=fnull)
except:
app.log(name, 'Using git clone from ', repo_url)
try:
with open(os.devnull, 'w') as fnull:
call(['git', 'clone', '--mirror', '-n', repo_url, gitdir],
stdout=fnull, stderr=fnull)
except:
app.log(name, 'ERROR: failed to clone', repo)
raise SystemExit
app.log(name, 'Git repo is mirrored at', gitdir)
<mask token>
def checkout(name, repo, ref, checkoutdir):
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
if not os.path.exists(gitdir):
mirror(name, repo)
app.log(name, 'Upstream version:', get_upstream_version(repo, ref))
app.log(name, 'Git checkout %s in %s' % (repo, checkoutdir))
with app.chdir(checkoutdir), open(os.devnull, 'w') as fnull:
copy_repo(gitdir, checkoutdir)
if call(['git', 'checkout', ref], stdout=fnull, stderr=fnull):
app.log(name, 'ERROR: git checkout failed for', ref)
raise SystemExit
if os.path.exists('.gitmodules'):
checkout_submodules(name, ref)
utils.set_mtime_recursively(checkoutdir)
def checkout_submodules(name, ref):
app.log(name, 'Git submodules')
with open('.gitmodules', 'r') as gitfile:
content = '\n'.join([l.strip() for l in gitfile.read().splitlines()])
io = StringIO.StringIO(content)
parser = ConfigParser.RawConfigParser()
parser.readfp(io)
for section in parser.sections():
submodule = re.sub('submodule "(.*)"', '\\1', section)
url = parser.get(section, 'url')
path = parser.get(section, 'path')
try:
commit = check_output(['git', 'ls-tree', ref, path])
fields = commit.split()
if len(fields) >= 2 and fields[1] == 'commit':
submodule_commit = commit.split()[2]
if len(submodule_commit) != 40:
raise Exception
fulldir = os.path.join(os.getcwd(), path)
checkout(submodule, url, submodule_commit, fulldir)
else:
app.log(this,
'Skipping submodule "%s" as %s:%s has a non-commit object for it'
% (name, url))
except:
app.log(name, 'ERROR: Git submodules problem')
raise SystemExit
| import os
import app
import re
from subprocess import call
from subprocess import check_output
import string
import definitions
import urllib2
import json
import utils
import ConfigParser
import StringIO
import re
def get_repo_url(repo):
url = repo.replace('upstream:', 'git://git.baserock.org/delta/')
url = url.replace('baserock:baserock/',
'git://git.baserock.org/baserock/baserock/')
url = url.replace('freedesktop:', 'git://anongit.freedesktop.org/')
url = url.replace('github:', 'git://github.com/')
url = url.replace('gnome:', 'git://git.gnome.org')
if url.endswith('.git'):
url = url[:-4]
return url
def get_repo_name(repo):
""" Convert URIs to strings that only contain digits, letters, _ and %.
NOTE: When changing the code of this function, make sure to also apply
the same to the quote_url() function of lorry. Otherwise the git tarballs
generated by lorry may no longer be found by morph.
"""
valid_chars = string.digits + string.ascii_letters + '%_'
transl = lambda x: x if x in valid_chars else '_'
return ''.join([transl(x) for x in get_repo_url(repo)])
def get_upstream_version(repo, ref):
try:
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
with app.chdir(gitdir), open(os.devnull, 'w') as fnull:
last_tag = check_output(['git', 'describe', '--abbrev=0',
'--tags', ref], stderr=fnull)[0:-1]
commits = check_output(['git', 'rev-list', last_tag + '..' +
ref, '--count'])
result = '%s (%s + %s commits)' % (ref[:8], last_tag, commits[0:-1])
except:
result = ref[:8] + ' ' + '(No tag found)'
return result
def get_tree(this):
ref = this['ref']
gitdir = os.path.join(app.settings['gits'], get_repo_name(this['repo']))
if not os.path.exists(gitdir):
try:
url = app.settings['cache-server-url'] + 'repo=' + get_repo_url(
this['repo']) + '&ref=' + ref
with urllib2.urlopen(url) as response:
tree = json.loads(response.read().decode())['tree']
return tree
except:
app.log(this, 'WARNING: no tree from cache-server', ref)
mirror(this['name'], this['repo'])
with app.chdir(gitdir), open(os.devnull, 'w') as fnull:
if call(['git', 'rev-parse', ref + '^{object}'], stdout=fnull,
stderr=fnull):
call(['git', 'fetch', 'origin'], stdout=fnull, stderr=fnull)
try:
tree = check_output(['git', 'rev-parse', ref + '^{tree}'],
universal_newlines=True)[0:-1]
return tree
except:
app.log(this, 'ERROR: could not find tree for ref', ref)
raise SystemExit
def copy_repo(repo, destdir):
"""Copies a cached repository into a directory using cp.
This also fixes up the repository afterwards, so that it can contain
code etc. It does not leave any given branch ready for use.
"""
call(['cp', '-a', repo, os.path.join(destdir, '.git')])
call(['git', 'config', 'core.bare', 'false'])
call(['git', 'config', '--unset', 'remote.origin.mirror'])
with open(os.devnull, 'w') as fnull:
call(['git', 'config', 'remote.origin.fetch',
'+refs/heads/*:refs/remotes/origin/*'], stdout=fnull, stderr=fnull)
call(['git', 'config', 'remote.origin.url', repo])
call(['git', 'pack-refs', '--all', '--prune'])
with open(os.path.join(destdir, '.git', 'packed-refs'), 'r') as ref_fh:
pack_lines = ref_fh.read().split('\n')
with open(os.path.join(destdir, '.git', 'packed-refs'), 'w') as ref_fh:
ref_fh.write(pack_lines.pop(0) + '\n')
for refline in pack_lines:
if ' refs/remotes/' in refline:
continue
if ' refs/heads/' in refline:
sha, ref = refline[:40], refline[41:]
if ref.startswith('refs/heads/'):
ref = 'refs/remotes/origin/' + ref[11:]
refline = '%s %s' % (sha, ref)
ref_fh.write('%s\n' % refline)
with open(os.devnull, 'w') as fnull:
call(['git', 'remote', 'update', 'origin', '--prune'], stdout=fnull,
stderr=fnull)
def mirror(name, repo):
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
repo_url = get_repo_url(repo)
try:
os.makedirs(gitdir)
tar_file = get_repo_name(repo_url) + '.tar'
app.log(name, 'Try fetching tarball %s' % tar_file)
with app.chdir(gitdir), open(os.devnull, 'w') as fnull:
call(['wget', app['tar-url']], stdout=fnull, stderr=fnull)
call(['tar', 'xf', tar_file], stdout=fnull, stderr=fnull)
os.remove(tar_file)
call(['git', 'config', 'remote.origin.url', repo_url], stdout=
fnull, stderr=fnull)
call(['git', 'config', 'remote.origin.mirror', 'true'], stdout=
fnull, stderr=fnull)
if call(['git', 'config', 'remote.origin.fetch',
'+refs/*:refs/*'], stdout=fnull, stderr=fnull) != 0:
raise BaseException('Did not get a valid git repo')
call(['git', 'fetch', 'origin'], stdout=fnull, stderr=fnull)
except:
app.log(name, 'Using git clone from ', repo_url)
try:
with open(os.devnull, 'w') as fnull:
call(['git', 'clone', '--mirror', '-n', repo_url, gitdir],
stdout=fnull, stderr=fnull)
except:
app.log(name, 'ERROR: failed to clone', repo)
raise SystemExit
app.log(name, 'Git repo is mirrored at', gitdir)
def fetch(repo):
with app.chdir(repo), open(os.devnull, 'w') as fnull:
call(['git', 'fetch', 'origin'], stdout=fnull, stderr=fnull)
def checkout(name, repo, ref, checkoutdir):
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
if not os.path.exists(gitdir):
mirror(name, repo)
app.log(name, 'Upstream version:', get_upstream_version(repo, ref))
app.log(name, 'Git checkout %s in %s' % (repo, checkoutdir))
with app.chdir(checkoutdir), open(os.devnull, 'w') as fnull:
copy_repo(gitdir, checkoutdir)
if call(['git', 'checkout', ref], stdout=fnull, stderr=fnull):
app.log(name, 'ERROR: git checkout failed for', ref)
raise SystemExit
if os.path.exists('.gitmodules'):
checkout_submodules(name, ref)
utils.set_mtime_recursively(checkoutdir)
def checkout_submodules(name, ref):
app.log(name, 'Git submodules')
with open('.gitmodules', 'r') as gitfile:
content = '\n'.join([l.strip() for l in gitfile.read().splitlines()])
io = StringIO.StringIO(content)
parser = ConfigParser.RawConfigParser()
parser.readfp(io)
for section in parser.sections():
submodule = re.sub('submodule "(.*)"', '\\1', section)
url = parser.get(section, 'url')
path = parser.get(section, 'path')
try:
commit = check_output(['git', 'ls-tree', ref, path])
fields = commit.split()
if len(fields) >= 2 and fields[1] == 'commit':
submodule_commit = commit.split()[2]
if len(submodule_commit) != 40:
raise Exception
fulldir = os.path.join(os.getcwd(), path)
checkout(submodule, url, submodule_commit, fulldir)
else:
app.log(this,
'Skipping submodule "%s" as %s:%s has a non-commit object for it'
% (name, url))
except:
app.log(name, 'ERROR: Git submodules problem')
raise SystemExit
| #!/usr/bin/env python3
#
# Copyright (C) 2011-2015 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# =*= License: GPL-2 =*=
import os
import app
import re
from subprocess import call
from subprocess import check_output
import string
import definitions
import urllib2
import json
import utils
import ConfigParser
import StringIO
import re
def get_repo_url(repo):
url = repo.replace('upstream:', 'git://git.baserock.org/delta/')
url = url.replace('baserock:baserock/',
'git://git.baserock.org/baserock/baserock/')
url = url.replace('freedesktop:', 'git://anongit.freedesktop.org/')
url = url.replace('github:', 'git://github.com/')
url = url.replace('gnome:', 'git://git.gnome.org')
if url.endswith('.git'):
url = url[:-4]
return url
def get_repo_name(repo):
''' Convert URIs to strings that only contain digits, letters, _ and %.
NOTE: When changing the code of this function, make sure to also apply
the same to the quote_url() function of lorry. Otherwise the git tarballs
generated by lorry may no longer be found by morph.
'''
valid_chars = string.digits + string.ascii_letters + '%_'
transl = lambda x: x if x in valid_chars else '_'
return ''.join([transl(x) for x in get_repo_url(repo)])
def get_upstream_version(repo, ref):
try:
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
with app.chdir(gitdir), open(os.devnull, "w") as fnull:
last_tag = check_output(['git', 'describe', '--abbrev=0',
'--tags', ref], stderr=fnull)[0:-1]
commits = check_output(['git', 'rev-list', last_tag + '..' + ref,
'--count'])
result = "%s (%s + %s commits)" % (ref[:8], last_tag, commits[0:-1])
except:
result = ref[:8] + " " + "(No tag found)"
return result
def get_tree(this):
ref = this['ref']
gitdir = os.path.join(app.settings['gits'], get_repo_name(this['repo']))
if not os.path.exists(gitdir):
try:
url = (app.settings['cache-server-url'] + 'repo='
+ get_repo_url(this['repo']) + '&ref=' + ref)
with urllib2.urlopen(url) as response:
tree = json.loads(response.read().decode())['tree']
return tree
except:
app.log(this, 'WARNING: no tree from cache-server', ref)
mirror(this['name'], this['repo'])
with app.chdir(gitdir), open(os.devnull, "w") as fnull:
if call(['git', 'rev-parse', ref + '^{object}'], stdout=fnull,
stderr=fnull):
# can't resolve this ref. is it upstream?
call(['git', 'fetch', 'origin'], stdout=fnull, stderr=fnull)
try:
tree = check_output(['git', 'rev-parse', ref + '^{tree}'],
universal_newlines=True)[0:-1]
return tree
except:
# either we don't have a git dir, or ref is not unique
# or ref does not exist
app.log(this, 'ERROR: could not find tree for ref', ref)
raise SystemExit
def copy_repo(repo, destdir):
'''Copies a cached repository into a directory using cp.
This also fixes up the repository afterwards, so that it can contain
code etc. It does not leave any given branch ready for use.
'''
# core.bare should be false so that git believes work trees are possible
# we do not want the origin remote to behave as a mirror for pulls
# we want a traditional refs/heads -> refs/remotes/origin ref mapping
# set the origin url to the cached repo so that we can quickly clean up
# by packing the refs, we can then edit then en-masse easily
call(['cp', '-a', repo, os.path.join(destdir, '.git')])
call(['git', 'config', 'core.bare', 'false'])
call(['git', 'config', '--unset', 'remote.origin.mirror'])
with open(os.devnull, "w") as fnull:
call(['git', 'config', 'remote.origin.fetch',
'+refs/heads/*:refs/remotes/origin/*'],
stdout=fnull,
stderr=fnull)
call(['git', 'config', 'remote.origin.url', repo])
call(['git', 'pack-refs', '--all', '--prune'])
# turn refs/heads/* into refs/remotes/origin/* in the packed refs
# so that the new copy behaves more like a traditional clone.
with open(os.path.join(destdir, ".git", "packed-refs"), "r") as ref_fh:
pack_lines = ref_fh.read().split("\n")
with open(os.path.join(destdir, ".git", "packed-refs"), "w") as ref_fh:
ref_fh.write(pack_lines.pop(0) + "\n")
for refline in pack_lines:
if ' refs/remotes/' in refline:
continue
if ' refs/heads/' in refline:
sha, ref = refline[:40], refline[41:]
if ref.startswith("refs/heads/"):
ref = "refs/remotes/origin/" + ref[11:]
refline = "%s %s" % (sha, ref)
ref_fh.write("%s\n" % (refline))
# Finally run a remote update to clear up the refs ready for use.
with open(os.devnull, "w") as fnull:
call(['git', 'remote', 'update', 'origin', '--prune'], stdout=fnull,
stderr=fnull)
def mirror(name, repo):
# try tarball first
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
repo_url = get_repo_url(repo)
try:
os.makedirs(gitdir)
tar_file = get_repo_name(repo_url) + '.tar'
app.log(name, 'Try fetching tarball %s' % tar_file)
with app.chdir(gitdir), open(os.devnull, "w") as fnull:
call(['wget', app['tar-url']], stdout=fnull, stderr=fnull)
call(['tar', 'xf', tar_file], stdout=fnull, stderr=fnull)
os.remove(tar_file)
call(['git', 'config', 'remote.origin.url', repo_url],
stdout=fnull, stderr=fnull)
call(['git', 'config', 'remote.origin.mirror', 'true'],
stdout=fnull, stderr=fnull)
if call(['git', 'config', 'remote.origin.fetch',
'+refs/*:refs/*'],
stdout=fnull, stderr=fnull) != 0:
raise BaseException('Did not get a valid git repo')
call(['git', 'fetch', 'origin'], stdout=fnull, stderr=fnull)
except:
app.log(name, 'Using git clone from ', repo_url)
try:
with open(os.devnull, "w") as fnull:
call(['git', 'clone', '--mirror', '-n', repo_url, gitdir],
stdout=fnull, stderr=fnull)
except:
app.log(name, 'ERROR: failed to clone', repo)
raise SystemExit
app.log(name, 'Git repo is mirrored at', gitdir)
def fetch(repo):
with app.chdir(repo), open(os.devnull, "w") as fnull:
call(['git', 'fetch', 'origin'], stdout=fnull, stderr=fnull)
def checkout(name, repo, ref, checkoutdir):
gitdir = os.path.join(app.settings['gits'], get_repo_name(repo))
if not os.path.exists(gitdir):
mirror(name, repo)
app.log(name, 'Upstream version:', get_upstream_version(repo, ref))
app.log(name, 'Git checkout %s in %s' % (repo, checkoutdir))
# checkout the required version of this from git
with app.chdir(checkoutdir), open(os.devnull, "w") as fnull:
copy_repo(gitdir, checkoutdir)
if call(['git', 'checkout', ref], stdout=fnull, stderr=fnull):
app.log(name, 'ERROR: git checkout failed for', ref)
raise SystemExit
if os.path.exists('.gitmodules'):
checkout_submodules(name, ref)
utils.set_mtime_recursively(checkoutdir)
def checkout_submodules(name, ref):
app.log(name, 'Git submodules')
with open('.gitmodules', "r") as gitfile:
# drop indentation in sections, as RawConfigParser cannot handle it
content = '\n'.join([l.strip() for l in gitfile.read().splitlines()])
io = StringIO.StringIO(content)
parser = ConfigParser.RawConfigParser()
parser.readfp(io)
for section in parser.sections():
# validate section name against the 'submodule "foo"' pattern
submodule = re.sub(r'submodule "(.*)"', r'\1', section)
url = parser.get(section, 'url')
path = parser.get(section, 'path')
try:
# list objects in the parent repo tree to find the commit
# object that corresponds to the submodule
commit = check_output(['git', 'ls-tree', ref, path])
# read the commit hash from the output
fields = commit.split()
if len(fields) >= 2 and fields[1] == 'commit':
submodule_commit = commit.split()[2]
# fail if the commit hash is invalid
if len(submodule_commit) != 40:
raise Exception
fulldir = os.path.join(os.getcwd(), path)
checkout(submodule, url, submodule_commit, fulldir)
else:
app.log(this, 'Skipping submodule "%s" as %s:%s has '
'a non-commit object for it' % (name, url))
except:
app.log(name, "ERROR: Git submodules problem")
raise SystemExit
| [
5,
7,
8,
10,
11
] |
539 | 68a503b2a94304530e20d79baf9fb094024ba67e | <mask token>
| <mask token>
admin.autodiscover()
<mask token>
dajaxice_autodiscover()
<mask token>
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| <mask token>
admin.autodiscover()
<mask token>
dajaxice_autodiscover()
<mask token>
urlpatterns = patterns('', url('^$', views.IndexView.as_view(), name=
'index'), url('^play/$', views.index, name='play'), url('^compose/$',
views.compose, name='compose'), url('^random/$', views.random, name=
'random'), url('^play/(?P<pk>\\d+)/$', views.DetailView.as_view(), name
='quiz'), url('^compose/(?P<pk>\\d+)/$', views.UpdateView.as_view()),
url('^clip/(?P<clip_id>\\d+)/$', views.clip, name='clip'), url(
'^accounts/login/$', 'django.contrib.auth.views.login', {
'template_name': 'login.html', 'extra_context': {'next': '/'}}, name=
'login'), url('^accounts/logout/$', 'django.contrib.auth.views.logout',
{'next_page': '/'}, name='logout'), url(dajaxice_config.dajaxice_url,
include('dajaxice.urls')), url('^admin/doc/', include(
'django.contrib.admindocs.urls')), url('^admin/', include(admin.site.urls))
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from dajaxice.core import dajaxice_autodiscover, dajaxice_config
dajaxice_autodiscover()
from spoticle import views
urlpatterns = patterns('', url('^$', views.IndexView.as_view(), name=
'index'), url('^play/$', views.index, name='play'), url('^compose/$',
views.compose, name='compose'), url('^random/$', views.random, name=
'random'), url('^play/(?P<pk>\\d+)/$', views.DetailView.as_view(), name
='quiz'), url('^compose/(?P<pk>\\d+)/$', views.UpdateView.as_view()),
url('^clip/(?P<clip_id>\\d+)/$', views.clip, name='clip'), url(
'^accounts/login/$', 'django.contrib.auth.views.login', {
'template_name': 'login.html', 'extra_context': {'next': '/'}}, name=
'login'), url('^accounts/logout/$', 'django.contrib.auth.views.logout',
{'next_page': '/'}, name='logout'), url(dajaxice_config.dajaxice_url,
include('dajaxice.urls')), url('^admin/doc/', include(
'django.contrib.admindocs.urls')), url('^admin/', include(admin.site.urls))
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from dajaxice.core import dajaxice_autodiscover, dajaxice_config
dajaxice_autodiscover()
from spoticle import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'spoticle.views.home', name='home'),
# url(r'^spoticle/', include('spoticle.foo.urls')),
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^play/$', views.index, name='play'),
url(r'^compose/$', views.compose, name='compose'),
url(r'^random/$', views.random, name='random'),
url(r'^play/(?P<pk>\d+)/$', views.DetailView.as_view(), name='quiz'),
url(r'^compose/(?P<pk>\d+)/$', views.UpdateView.as_view()),
url(r'^clip/(?P<clip_id>\d+)/$', views.clip, name='clip'),
# Auth
url(r'^accounts/login/$', 'django.contrib.auth.views.login', { 'template_name': 'login.html', 'extra_context': { 'next': '/' }}, name='login'),
url(r'^accounts/logout/$', 'django.contrib.auth.views.logout', { 'next_page': '/' }, name='logout'),
url(dajaxice_config.dajaxice_url, include('dajaxice.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
0,
1,
2,
3,
4
] |
540 | 81dec10686b521dc9400a209caabc1601efd2a88 | <mask token>
@six.add_metaclass(abc.ABCMeta)
class Hal:
def __init__(self, configpath):
self.configpath = configpath
dir_path = os.path.join(os.path.dirname(__file__), 'libraries')
lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os.
path.join(dir_path, f)) and f.lower().endswith('.py')]
self.responses = []
self.libraries = []
for f in lib_files:
try:
module_name = 'hal.libraries.' + f[:-3]
module = importlib.import_module(module_name)
for name, obj in inspect.getmembers(module):
if inspect.isclass(obj) and issubclass(obj, HalLibrary
) and name != 'HalLibrary' and not inspect.isabstract(
obj):
self.libraries.append(obj)
except:
self.add_response('Error loading library {}'.format(f))
raise
<mask token>
<mask token>
@abc.abstractmethod
def display_help(self):
""" Present some information to the user """
pass
<mask token>
<mask token>
| <mask token>
@six.add_metaclass(abc.ABCMeta)
class Hal:
def __init__(self, configpath):
self.configpath = configpath
dir_path = os.path.join(os.path.dirname(__file__), 'libraries')
lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os.
path.join(dir_path, f)) and f.lower().endswith('.py')]
self.responses = []
self.libraries = []
for f in lib_files:
try:
module_name = 'hal.libraries.' + f[:-3]
module = importlib.import_module(module_name)
for name, obj in inspect.getmembers(module):
if inspect.isclass(obj) and issubclass(obj, HalLibrary
) and name != 'HalLibrary' and not inspect.isabstract(
obj):
self.libraries.append(obj)
except:
self.add_response('Error loading library {}'.format(f))
raise
<mask token>
def say_all(self):
response = '\n'.join(self.responses)
return response
@abc.abstractmethod
def display_help(self):
""" Present some information to the user """
pass
def greet(self):
hour = datetime.datetime.now().hour
greeting = 'Good Evening'
if hour < 12:
greeting = 'Good morning'
elif 12 <= hour < 18:
greeting = 'Good afternoon'
self.add_response('{}. What can I help you with?'.format(greeting))
<mask token>
| <mask token>
@six.add_metaclass(abc.ABCMeta)
class Hal:
def __init__(self, configpath):
self.configpath = configpath
dir_path = os.path.join(os.path.dirname(__file__), 'libraries')
lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os.
path.join(dir_path, f)) and f.lower().endswith('.py')]
self.responses = []
self.libraries = []
for f in lib_files:
try:
module_name = 'hal.libraries.' + f[:-3]
module = importlib.import_module(module_name)
for name, obj in inspect.getmembers(module):
if inspect.isclass(obj) and issubclass(obj, HalLibrary
) and name != 'HalLibrary' and not inspect.isabstract(
obj):
self.libraries.append(obj)
except:
self.add_response('Error loading library {}'.format(f))
raise
def add_response(self, text):
self.responses.append(text)
def say_all(self):
response = '\n'.join(self.responses)
return response
@abc.abstractmethod
def display_help(self):
""" Present some information to the user """
pass
def greet(self):
hour = datetime.datetime.now().hour
greeting = 'Good Evening'
if hour < 12:
greeting = 'Good morning'
elif 12 <= hour < 18:
greeting = 'Good afternoon'
self.add_response('{}. What can I help you with?'.format(greeting))
<mask token>
| import abc
import datetime
import importlib
import inspect
import os
import re
import six
from .library import HalLibrary
@six.add_metaclass(abc.ABCMeta)
class Hal:
def __init__(self, configpath):
self.configpath = configpath
dir_path = os.path.join(os.path.dirname(__file__), 'libraries')
lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os.
path.join(dir_path, f)) and f.lower().endswith('.py')]
self.responses = []
self.libraries = []
for f in lib_files:
try:
module_name = 'hal.libraries.' + f[:-3]
module = importlib.import_module(module_name)
for name, obj in inspect.getmembers(module):
if inspect.isclass(obj) and issubclass(obj, HalLibrary
) and name != 'HalLibrary' and not inspect.isabstract(
obj):
self.libraries.append(obj)
except:
self.add_response('Error loading library {}'.format(f))
raise
def add_response(self, text):
self.responses.append(text)
def say_all(self):
response = '\n'.join(self.responses)
return response
@abc.abstractmethod
def display_help(self):
""" Present some information to the user """
pass
def greet(self):
hour = datetime.datetime.now().hour
greeting = 'Good Evening'
if hour < 12:
greeting = 'Good morning'
elif 12 <= hour < 18:
greeting = 'Good afternoon'
self.add_response('{}. What can I help you with?'.format(greeting))
def process(self, command):
"""
Process the command and get response by querying each plugin if required.
"""
self.responses = []
if len(command) == 0:
self.greet()
return self.say_all()
command = command.strip()
help_regex = re.compile('help\\s+([^\\s]+)')
help_match = help_regex.match(command)
if help_match:
keyword = help_match.group(1).lower()
for lib in self.libraries:
if keyword in lib.keywords:
help_content = lib.help()
self.display_help(help_content)
return
matched = False
for lib in self.libraries:
lib_obj = lib(command)
lib_obj.process_input()
if (lib_obj.status == HalLibrary.SUCCESS or lib_obj.status ==
HalLibrary.INCOMPLETE):
matched = True
lib_obj.process()
resp = lib_obj.get_response()
for r in resp:
self.add_response(r)
elif lib_obj.status == HalLibrary.ERROR:
matched = True
self.add_response('ERROR: ' + lib_obj.get_error())
else:
pass
if not matched:
self.add_response("I don't understand what you're saying.")
return self.say_all()
| # -*- coding: utf-8 -*-
import abc
import datetime
import importlib
import inspect
import os
import re
import six
from .library import HalLibrary
@six.add_metaclass(abc.ABCMeta)
class Hal():
def __init__(self, configpath):
self.configpath = configpath
# Find libraries inside the lib directory
dir_path = os.path.join(os.path.dirname(__file__), "libraries")
lib_files = [f for f in os.listdir(dir_path) if
os.path.isfile(os.path.join(dir_path, f)) and
f.lower().endswith(".py")
]
self.responses = []
self.libraries = []
for f in lib_files:
# Try to load the module
try:
module_name = "hal.libraries." + f[:-3]
module = importlib.import_module(module_name)
for name, obj in inspect.getmembers(module):
# Find classes that inherit from HalLibrary
if inspect.isclass(obj) and issubclass(obj, HalLibrary) and \
name != "HalLibrary" and not inspect.isabstract(obj):
self.libraries.append(obj)
except:
self.add_response("Error loading library {}".format(f))
raise
def add_response(self, text):
self.responses.append(text)
def say_all(self):
response = "\n".join(self.responses)
return response
@abc.abstractmethod
def display_help(self):
""" Present some information to the user """
pass
def greet(self):
hour = datetime.datetime.now().hour
greeting = "Good Evening"
if hour < 12:
greeting = 'Good morning'
elif 12 <= hour < 18:
greeting = 'Good afternoon'
self.add_response("{}. What can I help you with?".format(greeting))
def process(self, command):
"""
Process the command and get response by querying each plugin if required.
"""
self.responses = []
if(len(command) == 0):
self.greet()
return self.say_all()
# prepare the command
command = command.strip()
# Some hard coded patterns: If first word is help, activate help
# moudule
help_regex = re.compile("help\s+([^\s]+)")
help_match = help_regex.match(command)
if help_match:
keyword = help_match.group(1).lower()
# Try to find libraries with the keyword and print their help
for lib in self.libraries:
if keyword in lib.keywords:
# Print the help text
help_content = lib.help()
self.display_help(help_content)
return
matched = False
for lib in self.libraries:
lib_obj = lib(command)
# try to match the command with the library
lib_obj.process_input()
if lib_obj.status == HalLibrary.SUCCESS or lib_obj.status == HalLibrary.INCOMPLETE:
matched = True
lib_obj.process()
resp = lib_obj.get_response()
for r in resp:
self.add_response(r)
elif lib_obj.status == HalLibrary.ERROR:
matched = True
self.add_response("ERROR: " + lib_obj.get_error())
else:
# Failure to match
pass
if not matched:
self.add_response("I don't understand what you're saying.")
return self.say_all()
| [
3,
5,
6,
8,
9
] |
541 | 66edf0d2f7e25e166563bdb1063a1ed45ecda0e6 | <mask token>
| Easy = [['4 + 12 = ?', 16], ['45 -34 = ?', 11], ['27 + 12 -18 = ?', 21], [
'25 - 5 * 4 = ?', 5], ['18 + 45 / 5 - 3 * 2 = ?', 21], ['5! = ?', 120],
['3! + 2! = ?', 8], ['7 + 5! / 4! - 6 / 3 = ?', 10], [
'(25 + 5) / 6 * 4 = ?', 20], ['4(3+c)+c=c+4; c=?', -2], [
'\u200b√\u200b121 = ?', 11], ['x = √\u200b81 - √\u200b64; x= ?', 1], [
'x + y = 20; x - y = 4; y = ?', 8]]
Normal = [['8(10−k)=2k; k = ?', 8], ['−4n−8=4(−3n+2); n=?', 2], [
'4(3+c)+c=c+4; c=?', -2], ['\u200b√\u200b121 = ?', 11], [
'x = √\u200b81 - √\u200b64; x= ?', 1], [
'y = √\u200b16 * √\u200b4 / √\u200b9; y=?', 2], [
'y−3=2(x+1); x= -2, y=?', 1], [' y*y = 4x/5 − 11; y= 5, x = ?', 45], [
'How many unique ways are there to arrange the letters in the word CANNON?'
, 120], [
'How many numbers between 1 and 100(inclusive) are divisible by 10 or 7',
23], ['y=−4x+6; \u200b3x+4y=-2 ; x=?', 2], [
'−x+4y=−9; \u200by=−2x-9; y=?', -3]]
Hard = [[
'Emily is packing her bags for her vacation. She has 6 unique Fabergé eggs, but only 3 fit in her bag. How many different groups of 3 Fabergé eggs can she take?'
, 20], [
'You just got a free ticket for a boat ride, and you can bring along 2 friends! Unfortunately, you have 5 friends who want to come along. How many different groups of friends could you take with you?'
, 10], [
'Omar is packing his bags for his vacation. He has 999 unique shirts, but only 5 fit in his bag. How many different groups of 5 shirts can he take?'
, 126], [
'How many numbers between 1 and 100(inclusive) are divisible by 3 or 2?',
67], [
"You need to put your reindeer, Gloopin, Quentin, Ezekiel, and Lancer, in a single-file line to pull your sleigh. However, Quentin and Gloopin are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?"
, 12], [
"You need to put your reindeer, Gloopin, Balthazar, Bloopin, Prancer, and Quentin, in a single-file line to pull your sleigh. However, Prancer and Balthazar are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?"
, 2], ['y−3=2(x+1); x= -2, y=?', 48], [
'How many unique ways are there to arrange the letters in the word CANNON?'
, 120], [
'How many numbers between 1 and 100(inclusive) are divisible by 10 or 7',
23], ['−x+4y=−9; \u200by=−2x-9; y=?', -3], [
'x = √\u200b81 - √\u200b64; x= ?', 1], [
'y = √\u200b16 * √\u200b4 / √\u200b9; y=?', 2], [
'y−3=2(x+1); x= -2, y=?', 1], [' y*y = 4x/5 − 11; y= 5, x = ?', 45], [
'y=−4x+6; \u200b3x+4y=-2 ; x=?', 2], ['−x+4y=−9; \u200by=−2x-9; y=?', -3]]
| Easy = [["4 + 12 = ?", 16],
["45 -34 = ?", 11],
["27 + 12 -18 = ?", 21],
['25 - 5 * 4 = ?', 5],
["18 + 45 / 5 - 3 * 2 = ?", 21],
["5! = ?", 120],
["3! + 2! = ?", 8],
["7 + 5! / 4! - 6 / 3 = ?", 10],
["(25 + 5) / 6 * 4 = ?", 20],
["4(3+c)+c=c+4; c=?", -2],
["√121 = ?" ,11],
["x = √81 - √64; x= ?", 1],
["x + y = 20; x - y = 4; y = ?", 8]]
Normal = [["8(10−k)=2k; k = ?", 8],
["−4n−8=4(−3n+2); n=?", 2],
["4(3+c)+c=c+4; c=?", -2],
["√121 = ?" ,11],
["x = √81 - √64; x= ?", 1],
["y = √16 * √4 / √9; y=?", 2],
["y−3=2(x+1); x= -2, y=?", 1],
[" y*y = 4x/5 − 11; y= 5, x = ?", 45],
["How many unique ways are there to arrange the letters in the word CANNON?", 120],
["How many numbers between 1 and 100(inclusive) are divisible by 10 or 7", 23],
["y=−4x+6; 3x+4y=-2 ; x=?", 2],
["−x+4y=−9; y=−2x-9; y=?", -3]]
Hard = [["Emily is packing her bags for her vacation. She has 6 unique Fabergé eggs, but only 3 fit in her bag. How many different groups of 3 Fabergé eggs can she take?", 20],
["You just got a free ticket for a boat ride, and you can bring along 2 friends! Unfortunately, you have 5 friends who want to come along. How many different groups of friends could you take with you?", 10],
["Omar is packing his bags for his vacation. He has 999 unique shirts, but only 5 fit in his bag. How many different groups of 5 shirts can he take?", 126],
["How many numbers between 1 and 100(inclusive) are divisible by 3 or 2?" ,67],
["You need to put your reindeer, Gloopin, Quentin, Ezekiel, and Lancer, in a single-file line to pull your sleigh. However, Quentin and Gloopin are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?", 12],
["You need to put your reindeer, Gloopin, Balthazar, Bloopin, Prancer, and Quentin, in a single-file line to pull your sleigh. However, Prancer and Balthazar are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?", 2],
["y−3=2(x+1); x= -2, y=?", 48],
["How many unique ways are there to arrange the letters in the word CANNON?", 120],
["How many numbers between 1 and 100(inclusive) are divisible by 10 or 7", 23],
["−x+4y=−9; y=−2x-9; y=?", -3],
["x = √81 - √64; x= ?", 1],
["y = √16 * √4 / √9; y=?", 2],
["y−3=2(x+1); x= -2, y=?", 1],
[" y*y = 4x/5 − 11; y= 5, x = ?", 45],
["y=−4x+6; 3x+4y=-2 ; x=?", 2],
["−x+4y=−9; y=−2x-9; y=?", -3]]
| null | null | [
0,
1,
2
] |
542 | 7d099012584b84e9767bf0ce9d9df1596ca3bbab | <mask token>
@app.route('/')
def index():
result_plot = compute_model_output()
return render_template('index.html', graphJSON=result_plot)
def compute_model_output():
num_steps = 500
init_inf = 5
t_inc = 5
t_inf = 9
r_t = 2.5
rho = 1.0
kappa_0 = 0.0
kappa = 0.0
n_pop = 2000
seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,
rho, kappa_0, kappa)
s, e, i, r = seir.run()
days = np.linspace(0, num_steps, num_steps)
trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(
color='rgba(128, 223, 255, 1)'))
trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(
color='rgba(200, 100, 0, 1)'))
trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(
color='rgba(180, 0, 0, 1)'))
trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(
color='rgba(0, 100, 50, 1)'))
data = [trace_0, trace_1, trace_2, trace_3]
graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)
return graphJSON
<mask token>
| <mask token>
sys.path.insert(0, parentdir)
sys.path.append(os.path.join(parentdir, 'utils'))
<mask token>
@app.route('/')
def index():
result_plot = compute_model_output()
return render_template('index.html', graphJSON=result_plot)
def compute_model_output():
num_steps = 500
init_inf = 5
t_inc = 5
t_inf = 9
r_t = 2.5
rho = 1.0
kappa_0 = 0.0
kappa = 0.0
n_pop = 2000
seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,
rho, kappa_0, kappa)
s, e, i, r = seir.run()
days = np.linspace(0, num_steps, num_steps)
trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(
color='rgba(128, 223, 255, 1)'))
trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(
color='rgba(200, 100, 0, 1)'))
trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(
color='rgba(180, 0, 0, 1)'))
trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(
color='rgba(0, 100, 50, 1)'))
data = [trace_0, trace_1, trace_2, trace_3]
graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)
return graphJSON
<mask token>
if __name__ == '__main__':
app.run(debug=True)
| <mask token>
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.
currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
sys.path.append(os.path.join(parentdir, 'utils'))
<mask token>
app = Flask(__name__)
@app.route('/')
def index():
result_plot = compute_model_output()
return render_template('index.html', graphJSON=result_plot)
def compute_model_output():
num_steps = 500
init_inf = 5
t_inc = 5
t_inf = 9
r_t = 2.5
rho = 1.0
kappa_0 = 0.0
kappa = 0.0
n_pop = 2000
seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,
rho, kappa_0, kappa)
s, e, i, r = seir.run()
days = np.linspace(0, num_steps, num_steps)
trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(
color='rgba(128, 223, 255, 1)'))
trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(
color='rgba(200, 100, 0, 1)'))
trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(
color='rgba(180, 0, 0, 1)'))
trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(
color='rgba(0, 100, 50, 1)'))
data = [trace_0, trace_1, trace_2, trace_3]
graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)
return graphJSON
<mask token>
if __name__ == '__main__':
app.run(debug=True)
| import os, sys, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.
currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
sys.path.append(os.path.join(parentdir, 'utils'))
from flask import Flask, render_template
import numpy as np
import plotly
import plotly.graph_objs as pgo
import json
from utils import model
app = Flask(__name__)
@app.route('/')
def index():
result_plot = compute_model_output()
return render_template('index.html', graphJSON=result_plot)
def compute_model_output():
num_steps = 500
init_inf = 5
t_inc = 5
t_inf = 9
r_t = 2.5
rho = 1.0
kappa_0 = 0.0
kappa = 0.0
n_pop = 2000
seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,
rho, kappa_0, kappa)
s, e, i, r = seir.run()
days = np.linspace(0, num_steps, num_steps)
trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(
color='rgba(128, 223, 255, 1)'))
trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(
color='rgba(200, 100, 0, 1)'))
trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(
color='rgba(180, 0, 0, 1)'))
trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(
color='rgba(0, 100, 50, 1)'))
data = [trace_0, trace_1, trace_2, trace_3]
graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)
return graphJSON
<mask token>
if __name__ == '__main__':
app.run(debug=True)
| # Set up path references and dependencies.
import os, sys, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
sys.path.append(os.path.join(parentdir, "utils"))
# Import important helper libraries.
from flask import Flask, render_template
import numpy as np
import plotly
import plotly.graph_objs as pgo
import json
# Import modules created to serve the project.
#from utils import DB_interface as DBI
#from utils import path_config as pc
from utils import model
app = Flask(__name__)
# Global variable
#DAYS = 500
@app.route('/')
def index():
result_plot = compute_model_output()
return render_template("index.html", graphJSON=result_plot)
def compute_model_output():
num_steps = 500
init_inf = 5
t_inc = 5
t_inf = 9
r_t = 2.5 #np.random.normal(2.5, 1.0)
rho = 1.0
kappa_0 = 0.0
kappa = 0.0
n_pop = 2000
seir = model.SEIRModel(num_steps,n_pop, init_inf, t_inc, t_inf, r_t, rho, kappa_0, kappa)
s, e, i, r = seir.run()
days = np.linspace(0, num_steps, num_steps)
trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(color='rgba(128, 223, 255, 1)'))
trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(color='rgba(200, 100, 0, 1)'))
trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(color='rgba(180, 0, 0, 1)'))
trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(color='rgba(0, 100, 50, 1)'))
data = [trace_0, trace_1, trace_2, trace_3]
graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)
return (graphJSON)
"""
@app.callback(
Output('test','children')
[Input('val_num_steps', 'num_steps')]
)
@app.route('/start_bckgrnd_update')
def start_bckgrnd_update():
p = Process(target=bckgrnd_update, name="background_update")
p.start()
#p.join()
now = datetime.now()
user = {'username': 'MSE!'}
posts = [
{
'author': {'username': 'Paul'},
'body': 'Henrik has the update just been started?'
},
{
'author': {'username': 'Henrik'},
'body': 'You bet your sweet ass it has!'
},
{
'author': {'username': 'Paul'},
'body': 'So what time was is when it started?'
},
{
'author': {'username': 'Henrik'},
'body': 'It was exactly %s !' % now
}
]
return render_template("start_bckgrnd_update.html", title="home", user = user, posts=posts)
def bckgrnd_update():
global updating
updating = True
while updating:
print(datetime.now())
print("updating RKI DBs now")
DB = DBI.DB_interface()
DB.update_RKI_csv()
DB.update_RKI_landkreise_csv()
day = 24 * 3600
time.sleep(day)
"""
if __name__ == "__main__":
app.run(debug=True)
| [
2,
3,
4,
5,
6
] |
543 | 169ad888e7629faff9509399ac7ead7a149a9602 | <mask token>
| <mask token>
print('\n\n\n\n')
<mask token>
admin1.describe_user()
print('\n')
admin1.set_user_name('Reven10')
print('\n')
admin1.describe_user()
admin1.privileges.show_privileges()
| <mask token>
print('\n\n\n\n')
admin1 = userObj.Admin('john', 'deer', 30)
admin1.describe_user()
print('\n')
admin1.set_user_name('Reven10')
print('\n')
admin1.describe_user()
admin1.privileges.show_privileges()
| import TryItYourSelf_9_8 as userObj
print('\n\n\n\n')
admin1 = userObj.Admin('john', 'deer', 30)
admin1.describe_user()
print('\n')
admin1.set_user_name('Reven10')
print('\n')
admin1.describe_user()
admin1.privileges.show_privileges()
| null | [
0,
1,
2,
3
] |
544 | eb246beb05249f5dfde019b773698ba3bb1b1118 | <mask token>
class Coin(object):
def __init__(self):
self.sideup = 'Heads'
def toss(self):
if random.randint(0, 1) == 0:
self.sideup = 'Heads'
else:
self.sideup = 'Tails'
def get_sideup(self):
return self.sideup
<mask token>
| <mask token>
class Coin(object):
def __init__(self):
self.sideup = 'Heads'
def toss(self):
if random.randint(0, 1) == 0:
self.sideup = 'Heads'
else:
self.sideup = 'Tails'
def get_sideup(self):
return self.sideup
<mask token>
print(mycoin.sideup)
print(mycoin.get_sideup())
mycoin.toss()
print(mycoin.get_sideup())
| <mask token>
class Coin(object):
def __init__(self):
self.sideup = 'Heads'
def toss(self):
if random.randint(0, 1) == 0:
self.sideup = 'Heads'
else:
self.sideup = 'Tails'
def get_sideup(self):
return self.sideup
mycoin = Coin()
print(mycoin.sideup)
print(mycoin.get_sideup())
mycoin.toss()
print(mycoin.get_sideup())
| import random
class Coin(object):
def __init__(self):
self.sideup = 'Heads'
def toss(self):
if random.randint(0, 1) == 0:
self.sideup = 'Heads'
else:
self.sideup = 'Tails'
def get_sideup(self):
return self.sideup
mycoin = Coin()
print(mycoin.sideup)
print(mycoin.get_sideup())
mycoin.toss()
print(mycoin.get_sideup())
| #-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: Nirvana
#
# Created: 07/06/2014
# Copyright: (c) Nirvana 2014
# Licence: <your licence>
#-------------------------------------------------------------------------------
import random
class Coin(object):
def __init__(self):
self.sideup = "Heads"
def toss(self):
if random.randint(0,1)==0:
self.sideup = "Heads"
else:
self.sideup = "Tails"
def get_sideup(self):
return self.sideup
mycoin=Coin()
print (mycoin.sideup)
print (mycoin.get_sideup())
mycoin.toss()
print (mycoin.get_sideup())
| [
4,
5,
6,
7,
8
] |
545 | bcc4276ea240247519cabbf5fc5646a9147ee3be | <mask token>
class SurroundByCommand(sublime_plugin.TextCommand):
def run(self, edit, tag):
for region in self.view.sel():
text = self.view.substr(region)
self.view.replace(edit, region, '<' + tag + '>' + text + '</' +
tag.split()[0] + '>')
| <mask token>
class PromptSurrounderCommand(sublime_plugin.WindowCommand):
<mask token>
<mask token>
class SurroundByCommand(sublime_plugin.TextCommand):
def run(self, edit, tag):
for region in self.view.sel():
text = self.view.substr(region)
self.view.replace(edit, region, '<' + tag + '>' + text + '</' +
tag.split()[0] + '>')
| <mask token>
class PromptSurrounderCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.show_input_panel('Surround by:', '', self.on_done, None,
None)
def on_done(self, tag):
try:
if self.window.active_view():
self.window.active_view().run_command('surround_by', {'tag':
tag})
except ValueError:
print('hi')
class SurroundByCommand(sublime_plugin.TextCommand):
def run(self, edit, tag):
for region in self.view.sel():
text = self.view.substr(region)
self.view.replace(edit, region, '<' + tag + '>' + text + '</' +
tag.split()[0] + '>')
| import sublime
import sublime_plugin
class PromptSurrounderCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.show_input_panel('Surround by:', '', self.on_done, None,
None)
def on_done(self, tag):
try:
if self.window.active_view():
self.window.active_view().run_command('surround_by', {'tag':
tag})
except ValueError:
print('hi')
class SurroundByCommand(sublime_plugin.TextCommand):
def run(self, edit, tag):
for region in self.view.sel():
text = self.view.substr(region)
self.view.replace(edit, region, '<' + tag + '>' + text + '</' +
tag.split()[0] + '>')
| import sublime
import sublime_plugin
class PromptSurrounderCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.show_input_panel("Surround by:", "", self.on_done, None, None)
def on_done(self, tag):
try:
if self.window.active_view():
self.window.active_view().run_command("surround_by", {"tag": tag})
except ValueError:
print('hi')
class SurroundByCommand(sublime_plugin.TextCommand):
def run(self, edit, tag):
for region in self.view.sel():
text = self.view.substr(region)
self.view.replace(edit,region,"<"+tag+">"+text+"</"+tag.split()[0]+">")
| [
2,
3,
5,
6,
7
] |
546 | 07783921da2fb4ae9452324f833b08b3f92ba294 | <mask token>
| <mask token>
class Map(BaseCommand):
<mask token>
| <mask token>
class Map(BaseCommand):
def run(self):
from lib.models import Mapping
from lib.models import Migration
migration = Migration.load(self.options['MIGRATION_FILE'])
mapping = Mapping(self.options)
migration.mappings.append(mapping)
migration.write()
| <mask token>
from json import dumps
from .base_command import BaseCommand
class Map(BaseCommand):
def run(self):
from lib.models import Mapping
from lib.models import Migration
migration = Migration.load(self.options['MIGRATION_FILE'])
mapping = Mapping(self.options)
migration.mappings.append(mapping)
migration.write()
| """
commands/map.py
description:
Generates a blank configuration file in the current directory
"""
from json import dumps
from .base_command import BaseCommand
class Map(BaseCommand):
def run(self):
from lib.models import Mapping
from lib.models import Migration
migration = Migration.load(self.options['MIGRATION_FILE'])
mapping = Mapping(self.options)
migration.mappings.append(mapping)
migration.write() | [
0,
1,
2,
3,
4
] |
547 | 2f8dff78f5bc5ed18df97e2574b47f0a7711d372 | <mask token>
| <mask token>
def main():
"""
公共参数:
store: 商城或书店名称(小米|文泉), browser: 浏览器(目前只支持Chrome),
version: 浏览器版本号, quit: 运行完后是否退出浏览器(默认不退出),
hidden: 是否启用界面(默认启用),
商城抢购:
url: 抢购商城地址, addr_nth: 收货地址(选择第几个收货地址,默认第一个),
书店扒书(quit默认退出, hidden默认不启用):
books: {'书名': '电子书链接地址'}, path: 电子书图片保存地址(保存地址文件不存在需要先创建),
account: 账号, password: 密码,
"""
books = {'书名': '电子书链接地址'}
xm = Panic(browser='Chrome', version='78.0.0', store='文泉', books=books,
path='路径', account='账号', password='密码')
xm.start()
<mask token>
| <mask token>
sys.path.append('../')
try:
from panicbuying.panic import Panic
except:
from panicbuying.panicbuying.panic import Panic
def main():
"""
公共参数:
store: 商城或书店名称(小米|文泉), browser: 浏览器(目前只支持Chrome),
version: 浏览器版本号, quit: 运行完后是否退出浏览器(默认不退出),
hidden: 是否启用界面(默认启用),
商城抢购:
url: 抢购商城地址, addr_nth: 收货地址(选择第几个收货地址,默认第一个),
书店扒书(quit默认退出, hidden默认不启用):
books: {'书名': '电子书链接地址'}, path: 电子书图片保存地址(保存地址文件不存在需要先创建),
account: 账号, password: 密码,
"""
books = {'书名': '电子书链接地址'}
xm = Panic(browser='Chrome', version='78.0.0', store='文泉', books=books,
path='路径', account='账号', password='密码')
xm.start()
if __name__ == '__main__':
main()
| <mask token>
import sys
sys.path.append('../')
try:
from panicbuying.panic import Panic
except:
from panicbuying.panicbuying.panic import Panic
def main():
"""
公共参数:
store: 商城或书店名称(小米|文泉), browser: 浏览器(目前只支持Chrome),
version: 浏览器版本号, quit: 运行完后是否退出浏览器(默认不退出),
hidden: 是否启用界面(默认启用),
商城抢购:
url: 抢购商城地址, addr_nth: 收货地址(选择第几个收货地址,默认第一个),
书店扒书(quit默认退出, hidden默认不启用):
books: {'书名': '电子书链接地址'}, path: 电子书图片保存地址(保存地址文件不存在需要先创建),
account: 账号, password: 密码,
"""
books = {'书名': '电子书链接地址'}
xm = Panic(browser='Chrome', version='78.0.0', store='文泉', books=books,
path='路径', account='账号', password='密码')
xm.start()
if __name__ == '__main__':
main()
| # encoding: utf-8
"""
File: demo.py
Author: Rock Johnson
Description: 此文件为案例文件
"""
import sys
sys.path.append('../')
try:
from panicbuying.panic import Panic
except:
from panicbuying.panicbuying.panic import Panic
def main():
'''
公共参数:
store: 商城或书店名称(小米|文泉), browser: 浏览器(目前只支持Chrome),
version: 浏览器版本号, quit: 运行完后是否退出浏览器(默认不退出),
hidden: 是否启用界面(默认启用),
商城抢购:
url: 抢购商城地址, addr_nth: 收货地址(选择第几个收货地址,默认第一个),
书店扒书(quit默认退出, hidden默认不启用):
books: {'书名': '电子书链接地址'}, path: 电子书图片保存地址(保存地址文件不存在需要先创建),
account: 账号, password: 密码,
'''
books = {
'书名': '电子书链接地址',
}
xm = Panic(browser='Chrome', version='78.0.0', store='文泉',
books=books, path='路径', account='账号', password='密码',
)
xm.start()
if __name__ == '__main__':
main()
| [
0,
1,
2,
3,
4
] |
548 | 03dd37346ed12bbd66cbebc46fadc37be319b986 | <mask token>
class TestSwitchMapIndex(unittest.TestCase):
def test_switch_map_indexed_uses_index(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(300, 'a'), on_next(400,
'b'), on_next(500, 'c'))
def create_inner(x: str, i: int):
def create_changing(j: int):
return i, j, x
return interval(20).pipe(ops.map(create_changing))
def create():
return xs.pipe(ops.switch_map_indexed(project=create_inner))
results = scheduler.start(create, disposed=580)
assert results.messages == [on_next(320, (0, 0, 'a')), on_next(340,
(0, 1, 'a')), on_next(360, (0, 2, 'a')), on_next(380, (0, 3,
'a')), on_next(420, (1, 0, 'b')), on_next(440, (1, 1, 'b')),
on_next(460, (1, 2, 'b')), on_next(480, (1, 3, 'b')), on_next(
520, (2, 0, 'c')), on_next(540, (2, 1, 'c')), on_next(560, (2,
2, 'c'))]
assert xs.subscriptions == [Subscription(200, 580)]
def test_switch_map_indexed_inner_throws(self):
"""Inner throwing causes outer to throw"""
ex = 'ex'
scheduler = TestScheduler()
sources = [scheduler.create_cold_observable(on_next(100, 'a'),
on_next(300, 'aa')), scheduler.create_cold_observable(on_next(
50, 'b'), on_error(120, ex)), scheduler.create_cold_observable(
on_next(50, 'wont happen'), on_error(120, 'no'))]
xs = scheduler.create_hot_observable(on_next(250, 0), on_next(400,
1), on_next(550, 2))
def create_inner(x: int, _i: int):
return sources[x]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_next(350, 'a'), on_next(450, 'b'),
on_error(520, ex)]
assert sources[0].subscriptions == [Subscription(250, 400)]
assert sources[1].subscriptions == [Subscription(400, 520)]
assert sources[2].subscriptions == []
<mask token>
<mask token>
def test_switch_map_indexed_inner_completes(self):
"""Inner completions do not affect outer"""
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(300, 'd'), on_next(330,
'f'), on_completed(540))
def create_inner(x: str, i: int):
"""An observable which will complete after 40 ticks"""
return interval(20).pipe(ops.map(lambda j: (i, j, x)), ops.take(2))
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_next(320, (0, 0, 'd')), on_next(350,
(1, 0, 'f')), on_next(370, (1, 1, 'f')), on_completed(540)]
def test_switch_map_default_mapper(self):
with marbles_testing(timespan=10) as (start, cold, hot, exp):
xs = hot(' ---a---b------c-----', {'a': cold(
' --1--2', None, None), 'b': cold(' --1-2-3-4-5|',
None, None), 'c': cold(' --1--2', None, None)
}, None)
expected = exp(' -----1---1-2-3--1--2', None, None)
result = start(xs.pipe(ops.switch_map_indexed()))
assert result == expected
| <mask token>
class TestSwitchMapIndex(unittest.TestCase):
def test_switch_map_indexed_uses_index(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(300, 'a'), on_next(400,
'b'), on_next(500, 'c'))
def create_inner(x: str, i: int):
def create_changing(j: int):
return i, j, x
return interval(20).pipe(ops.map(create_changing))
def create():
return xs.pipe(ops.switch_map_indexed(project=create_inner))
results = scheduler.start(create, disposed=580)
assert results.messages == [on_next(320, (0, 0, 'a')), on_next(340,
(0, 1, 'a')), on_next(360, (0, 2, 'a')), on_next(380, (0, 3,
'a')), on_next(420, (1, 0, 'b')), on_next(440, (1, 1, 'b')),
on_next(460, (1, 2, 'b')), on_next(480, (1, 3, 'b')), on_next(
520, (2, 0, 'c')), on_next(540, (2, 1, 'c')), on_next(560, (2,
2, 'c'))]
assert xs.subscriptions == [Subscription(200, 580)]
def test_switch_map_indexed_inner_throws(self):
"""Inner throwing causes outer to throw"""
ex = 'ex'
scheduler = TestScheduler()
sources = [scheduler.create_cold_observable(on_next(100, 'a'),
on_next(300, 'aa')), scheduler.create_cold_observable(on_next(
50, 'b'), on_error(120, ex)), scheduler.create_cold_observable(
on_next(50, 'wont happen'), on_error(120, 'no'))]
xs = scheduler.create_hot_observable(on_next(250, 0), on_next(400,
1), on_next(550, 2))
def create_inner(x: int, _i: int):
return sources[x]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_next(350, 'a'), on_next(450, 'b'),
on_error(520, ex)]
assert sources[0].subscriptions == [Subscription(250, 400)]
assert sources[1].subscriptions == [Subscription(400, 520)]
assert sources[2].subscriptions == []
def test_switch_map_indexed_outer_throws(self):
"""Outer throwing unsubscribes from all"""
ex = 'ABC'
scheduler = TestScheduler()
sources = [scheduler.create_cold_observable(on_next(100, 'a'),
on_next(300, 'aa')), scheduler.create_cold_observable(on_next(
50, 'b'), on_error(120, ex)), scheduler.create_cold_observable(
on_next(50, 'wont happen'), on_error(120, 'no'))]
xs = scheduler.create_hot_observable(on_next(250, 0), on_next(400,
1), on_error(430, ex))
def create_inner(x: int, _i: int):
return sources[x]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_next(350, 'a'), on_error(430, ex)]
assert sources[0].subscriptions == [Subscription(250, 400)]
assert sources[1].subscriptions == [Subscription(400, 430)]
assert sources[2].subscriptions == []
def test_switch_map_indexed_no_inner(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_completed(500))
sources = [scheduler.create_cold_observable(on_next(20, 2))]
def create_inner(_x: int, i: int):
return sources[i]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_completed(500)]
assert xs.subscriptions == [Subscription(200, 500)]
assert sources[0].subscriptions == []
def test_switch_map_indexed_inner_completes(self):
"""Inner completions do not affect outer"""
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(300, 'd'), on_next(330,
'f'), on_completed(540))
def create_inner(x: str, i: int):
"""An observable which will complete after 40 ticks"""
return interval(20).pipe(ops.map(lambda j: (i, j, x)), ops.take(2))
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_next(320, (0, 0, 'd')), on_next(350,
(1, 0, 'f')), on_next(370, (1, 1, 'f')), on_completed(540)]
def test_switch_map_default_mapper(self):
with marbles_testing(timespan=10) as (start, cold, hot, exp):
xs = hot(' ---a---b------c-----', {'a': cold(
' --1--2', None, None), 'b': cold(' --1-2-3-4-5|',
None, None), 'c': cold(' --1--2', None, None)
}, None)
expected = exp(' -----1---1-2-3--1--2', None, None)
result = start(xs.pipe(ops.switch_map_indexed()))
assert result == expected
| <mask token>
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestSwitchMapIndex(unittest.TestCase):
def test_switch_map_indexed_uses_index(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(300, 'a'), on_next(400,
'b'), on_next(500, 'c'))
def create_inner(x: str, i: int):
def create_changing(j: int):
return i, j, x
return interval(20).pipe(ops.map(create_changing))
def create():
return xs.pipe(ops.switch_map_indexed(project=create_inner))
results = scheduler.start(create, disposed=580)
assert results.messages == [on_next(320, (0, 0, 'a')), on_next(340,
(0, 1, 'a')), on_next(360, (0, 2, 'a')), on_next(380, (0, 3,
'a')), on_next(420, (1, 0, 'b')), on_next(440, (1, 1, 'b')),
on_next(460, (1, 2, 'b')), on_next(480, (1, 3, 'b')), on_next(
520, (2, 0, 'c')), on_next(540, (2, 1, 'c')), on_next(560, (2,
2, 'c'))]
assert xs.subscriptions == [Subscription(200, 580)]
def test_switch_map_indexed_inner_throws(self):
"""Inner throwing causes outer to throw"""
ex = 'ex'
scheduler = TestScheduler()
sources = [scheduler.create_cold_observable(on_next(100, 'a'),
on_next(300, 'aa')), scheduler.create_cold_observable(on_next(
50, 'b'), on_error(120, ex)), scheduler.create_cold_observable(
on_next(50, 'wont happen'), on_error(120, 'no'))]
xs = scheduler.create_hot_observable(on_next(250, 0), on_next(400,
1), on_next(550, 2))
def create_inner(x: int, _i: int):
return sources[x]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_next(350, 'a'), on_next(450, 'b'),
on_error(520, ex)]
assert sources[0].subscriptions == [Subscription(250, 400)]
assert sources[1].subscriptions == [Subscription(400, 520)]
assert sources[2].subscriptions == []
def test_switch_map_indexed_outer_throws(self):
"""Outer throwing unsubscribes from all"""
ex = 'ABC'
scheduler = TestScheduler()
sources = [scheduler.create_cold_observable(on_next(100, 'a'),
on_next(300, 'aa')), scheduler.create_cold_observable(on_next(
50, 'b'), on_error(120, ex)), scheduler.create_cold_observable(
on_next(50, 'wont happen'), on_error(120, 'no'))]
xs = scheduler.create_hot_observable(on_next(250, 0), on_next(400,
1), on_error(430, ex))
def create_inner(x: int, _i: int):
return sources[x]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_next(350, 'a'), on_error(430, ex)]
assert sources[0].subscriptions == [Subscription(250, 400)]
assert sources[1].subscriptions == [Subscription(400, 430)]
assert sources[2].subscriptions == []
def test_switch_map_indexed_no_inner(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_completed(500))
sources = [scheduler.create_cold_observable(on_next(20, 2))]
def create_inner(_x: int, i: int):
return sources[i]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_completed(500)]
assert xs.subscriptions == [Subscription(200, 500)]
assert sources[0].subscriptions == []
def test_switch_map_indexed_inner_completes(self):
"""Inner completions do not affect outer"""
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(300, 'd'), on_next(330,
'f'), on_completed(540))
def create_inner(x: str, i: int):
"""An observable which will complete after 40 ticks"""
return interval(20).pipe(ops.map(lambda j: (i, j, x)), ops.take(2))
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_next(320, (0, 0, 'd')), on_next(350,
(1, 0, 'f')), on_next(370, (1, 1, 'f')), on_completed(540)]
def test_switch_map_default_mapper(self):
with marbles_testing(timespan=10) as (start, cold, hot, exp):
xs = hot(' ---a---b------c-----', {'a': cold(
' --1--2', None, None), 'b': cold(' --1-2-3-4-5|',
None, None), 'c': cold(' --1--2', None, None)
}, None)
expected = exp(' -----1---1-2-3--1--2', None, None)
result = start(xs.pipe(ops.switch_map_indexed()))
assert result == expected
| import unittest
from reactivex import interval
from reactivex import operators as ops
from reactivex.testing import ReactiveTest, TestScheduler
from reactivex.testing.marbles import marbles_testing
from reactivex.testing.subscription import Subscription
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestSwitchMapIndex(unittest.TestCase):
def test_switch_map_indexed_uses_index(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(300, 'a'), on_next(400,
'b'), on_next(500, 'c'))
def create_inner(x: str, i: int):
def create_changing(j: int):
return i, j, x
return interval(20).pipe(ops.map(create_changing))
def create():
return xs.pipe(ops.switch_map_indexed(project=create_inner))
results = scheduler.start(create, disposed=580)
assert results.messages == [on_next(320, (0, 0, 'a')), on_next(340,
(0, 1, 'a')), on_next(360, (0, 2, 'a')), on_next(380, (0, 3,
'a')), on_next(420, (1, 0, 'b')), on_next(440, (1, 1, 'b')),
on_next(460, (1, 2, 'b')), on_next(480, (1, 3, 'b')), on_next(
520, (2, 0, 'c')), on_next(540, (2, 1, 'c')), on_next(560, (2,
2, 'c'))]
assert xs.subscriptions == [Subscription(200, 580)]
def test_switch_map_indexed_inner_throws(self):
"""Inner throwing causes outer to throw"""
ex = 'ex'
scheduler = TestScheduler()
sources = [scheduler.create_cold_observable(on_next(100, 'a'),
on_next(300, 'aa')), scheduler.create_cold_observable(on_next(
50, 'b'), on_error(120, ex)), scheduler.create_cold_observable(
on_next(50, 'wont happen'), on_error(120, 'no'))]
xs = scheduler.create_hot_observable(on_next(250, 0), on_next(400,
1), on_next(550, 2))
def create_inner(x: int, _i: int):
return sources[x]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_next(350, 'a'), on_next(450, 'b'),
on_error(520, ex)]
assert sources[0].subscriptions == [Subscription(250, 400)]
assert sources[1].subscriptions == [Subscription(400, 520)]
assert sources[2].subscriptions == []
def test_switch_map_indexed_outer_throws(self):
"""Outer throwing unsubscribes from all"""
ex = 'ABC'
scheduler = TestScheduler()
sources = [scheduler.create_cold_observable(on_next(100, 'a'),
on_next(300, 'aa')), scheduler.create_cold_observable(on_next(
50, 'b'), on_error(120, ex)), scheduler.create_cold_observable(
on_next(50, 'wont happen'), on_error(120, 'no'))]
xs = scheduler.create_hot_observable(on_next(250, 0), on_next(400,
1), on_error(430, ex))
def create_inner(x: int, _i: int):
return sources[x]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_next(350, 'a'), on_error(430, ex)]
assert sources[0].subscriptions == [Subscription(250, 400)]
assert sources[1].subscriptions == [Subscription(400, 430)]
assert sources[2].subscriptions == []
def test_switch_map_indexed_no_inner(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_completed(500))
sources = [scheduler.create_cold_observable(on_next(20, 2))]
def create_inner(_x: int, i: int):
return sources[i]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_completed(500)]
assert xs.subscriptions == [Subscription(200, 500)]
assert sources[0].subscriptions == []
def test_switch_map_indexed_inner_completes(self):
"""Inner completions do not affect outer"""
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(300, 'd'), on_next(330,
'f'), on_completed(540))
def create_inner(x: str, i: int):
"""An observable which will complete after 40 ticks"""
return interval(20).pipe(ops.map(lambda j: (i, j, x)), ops.take(2))
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_next(320, (0, 0, 'd')), on_next(350,
(1, 0, 'f')), on_next(370, (1, 1, 'f')), on_completed(540)]
def test_switch_map_default_mapper(self):
with marbles_testing(timespan=10) as (start, cold, hot, exp):
xs = hot(' ---a---b------c-----', {'a': cold(
' --1--2', None, None), 'b': cold(' --1-2-3-4-5|',
None, None), 'c': cold(' --1--2', None, None)
}, None)
expected = exp(' -----1---1-2-3--1--2', None, None)
result = start(xs.pipe(ops.switch_map_indexed()))
assert result == expected
| import unittest
from reactivex import interval
from reactivex import operators as ops
from reactivex.testing import ReactiveTest, TestScheduler
from reactivex.testing.marbles import marbles_testing
from reactivex.testing.subscription import Subscription
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestSwitchMapIndex(unittest.TestCase):
def test_switch_map_indexed_uses_index(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(300, "a"),
on_next(400, "b"),
on_next(500, "c"),
)
def create_inner(x: str, i: int):
def create_changing(j: int):
return (i, j, x)
return interval(20).pipe(ops.map(create_changing))
def create():
return xs.pipe(ops.switch_map_indexed(project=create_inner))
results = scheduler.start(create, disposed=580)
# (i, j, x): i is the index of the outer emit;
# j is the value of the inner interval;
# x is the value of the outer emission
assert results.messages == [
on_next(320, (0, 0, "a")),
on_next(340, (0, 1, "a")),
on_next(360, (0, 2, "a")),
on_next(380, (0, 3, "a")),
on_next(420, (1, 0, "b")),
on_next(440, (1, 1, "b")),
on_next(460, (1, 2, "b")),
on_next(480, (1, 3, "b")),
on_next(520, (2, 0, "c")),
on_next(540, (2, 1, "c")),
on_next(560, (2, 2, "c")),
]
assert xs.subscriptions == [Subscription(200, 580)]
def test_switch_map_indexed_inner_throws(self):
"""Inner throwing causes outer to throw"""
ex = "ex"
scheduler = TestScheduler()
sources = [
scheduler.create_cold_observable(on_next(100, "a"), on_next(300, "aa")),
scheduler.create_cold_observable(on_next(50, "b"), on_error(120, ex)),
scheduler.create_cold_observable(
on_next(50, "wont happen"), on_error(120, "no")
),
]
xs = scheduler.create_hot_observable(
on_next(
250,
0,
),
on_next(400, 1),
on_next(
550,
2,
),
)
def create_inner(x: int, _i: int):
return sources[x]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [
on_next(350, "a"),
on_next(450, "b"),
on_error(520, ex),
]
assert sources[0].subscriptions == [Subscription(250, 400)]
assert sources[1].subscriptions == [Subscription(400, 520)]
assert sources[2].subscriptions == []
def test_switch_map_indexed_outer_throws(self):
"""Outer throwing unsubscribes from all"""
ex = "ABC"
scheduler = TestScheduler()
sources = [
scheduler.create_cold_observable(on_next(100, "a"), on_next(300, "aa")),
scheduler.create_cold_observable(on_next(50, "b"), on_error(120, ex)),
scheduler.create_cold_observable(
on_next(50, "wont happen"), on_error(120, "no")
),
]
xs = scheduler.create_hot_observable(
on_next(
250,
0,
),
on_next(400, 1),
on_error(430, ex),
)
def create_inner(x: int, _i: int):
return sources[x]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [
on_next(350, "a"),
on_error(430, ex),
]
assert sources[0].subscriptions == [Subscription(250, 400)]
assert sources[1].subscriptions == [Subscription(400, 430)]
assert sources[2].subscriptions == []
def test_switch_map_indexed_no_inner(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_completed(500))
# Fake inner which should never be subscribed to
sources = [scheduler.create_cold_observable(on_next(20, 2))]
def create_inner(_x: int, i: int):
return sources[i]
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [on_completed(500)]
assert xs.subscriptions == [Subscription(200, 500)]
assert sources[0].subscriptions == []
def test_switch_map_indexed_inner_completes(self):
"""Inner completions do not affect outer"""
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(300, "d"),
on_next(330, "f"),
on_completed(540),
)
def create_inner(x: str, i: int):
"""An observable which will complete after 40 ticks"""
return interval(20).pipe(ops.map(lambda j: (i, j, x)), ops.take(2))
def create():
return xs.pipe(ops.switch_map_indexed(create_inner))
results = scheduler.start(create)
assert results.messages == [
on_next(320, (0, 0, "d")),
on_next(350, (1, 0, "f")),
on_next(
370, (1, 1, "f")
), # here the current inner is unsubscribed but not the outer
on_completed(540), # only outer completion affects
]
def test_switch_map_default_mapper(self):
with marbles_testing(timespan=10) as (start, cold, hot, exp):
xs = hot(
" ---a---b------c-----",
{
"a": cold(" --1--2", None, None),
"b": cold(" --1-2-3-4-5|", None, None),
"c": cold(" --1--2", None, None),
},
None,
)
expected = exp(" -----1---1-2-3--1--2", None, None)
result = start(xs.pipe(ops.switch_map_indexed()))
assert result == expected
| [
5,
7,
8,
9,
10
] |
549 | e8a36bd7826c5d71cf8012ea82df6c127dd858fc | <mask token>
def load_yaml_config(config_path: str) ->Dict:
with open(config_path, 'r') as stream:
return yaml.load(stream)
def get_optimizer(model: nn.Module, optim_config: Dict) ->optim.Optimizer:
return optim.Adam(model.parameters(), **optim_config)
def save_checkpoint(model: nn.Module, path: str):
torch.save(model.state_dict(), path)
def load_state_dict(path: str) ->OrderedDict:
return torch.load(path)
<mask token>
| <mask token>
def get_device() ->torch.device:
if torch.cuda.is_available():
return torch.device('cuda')
return torch.device('cpu')
def load_yaml_config(config_path: str) ->Dict:
with open(config_path, 'r') as stream:
return yaml.load(stream)
def get_optimizer(model: nn.Module, optim_config: Dict) ->optim.Optimizer:
return optim.Adam(model.parameters(), **optim_config)
def save_checkpoint(model: nn.Module, path: str):
torch.save(model.state_dict(), path)
def load_state_dict(path: str) ->OrderedDict:
return torch.load(path)
<mask token>
| <mask token>
def get_device() ->torch.device:
if torch.cuda.is_available():
return torch.device('cuda')
return torch.device('cpu')
def load_yaml_config(config_path: str) ->Dict:
with open(config_path, 'r') as stream:
return yaml.load(stream)
def get_optimizer(model: nn.Module, optim_config: Dict) ->optim.Optimizer:
return optim.Adam(model.parameters(), **optim_config)
def save_checkpoint(model: nn.Module, path: str):
torch.save(model.state_dict(), path)
def load_state_dict(path: str) ->OrderedDict:
return torch.load(path)
def load_checkpoint(model: nn.Module, checkpoint_path: Optional[str]):
if checkpoint_path:
model.load_state_dict(load_state_dict(checkpoint_path))
| from typing import Dict, Optional
from collections import OrderedDict
import torch
import torch.nn as nn
import torch.optim as optim
import yaml
def get_device() ->torch.device:
if torch.cuda.is_available():
return torch.device('cuda')
return torch.device('cpu')
def load_yaml_config(config_path: str) ->Dict:
with open(config_path, 'r') as stream:
return yaml.load(stream)
def get_optimizer(model: nn.Module, optim_config: Dict) ->optim.Optimizer:
return optim.Adam(model.parameters(), **optim_config)
def save_checkpoint(model: nn.Module, path: str):
torch.save(model.state_dict(), path)
def load_state_dict(path: str) ->OrderedDict:
return torch.load(path)
def load_checkpoint(model: nn.Module, checkpoint_path: Optional[str]):
if checkpoint_path:
model.load_state_dict(load_state_dict(checkpoint_path))
| from typing import Dict, Optional
from collections import OrderedDict
import torch
import torch.nn as nn
import torch.optim as optim
import yaml
def get_device() -> torch.device:
if torch.cuda.is_available():
return torch.device("cuda")
return torch.device("cpu")
def load_yaml_config(config_path: str) -> Dict:
with open(config_path, "r") as stream:
return yaml.load(stream)
def get_optimizer(model: nn.Module, optim_config: Dict) -> optim.Optimizer:
return optim.Adam(model.parameters(), **optim_config)
def save_checkpoint(model: nn.Module, path: str):
torch.save(model.state_dict(), path)
def load_state_dict(path: str) -> OrderedDict:
return torch.load(path)
def load_checkpoint(model: nn.Module, checkpoint_path: Optional[str]):
if checkpoint_path:
model.load_state_dict(load_state_dict(checkpoint_path))
| [
4,
5,
6,
7,
8
] |
550 | 63c214d9e831356345ba2eee68634af36964dcff | # Overview file
#import python classes
import numpy as np
import random as rn
import math
import matplotlib.pyplot as plt
import pylab
from mpl_toolkits.mplot3d import Axes3D
#import self produced classes
import forcemodule as fm
import init_sys
# independent parameters
dt = 0.004
N=2048
lpnum = 1000
density = 0.85
temp = 0.8
# Loading initial conditions
mom = init_sys.init_mom(N, temp)
pos, l = init_sys.init_pos(N, density)
forces = init_sys.init_forc(N)
pot = init_sys.init_pot(N)
print N, 'N'
# Iteration Verlet method
forces, pot = fm.calc_forces(pos,forces,pot,l,[N])
formersummom = 0
for lp in range(lpnum):
mom = mom + forces*0.5*dt
pos = (pos + mom*dt) % l # % l means modulo of l, hence it adds/subtracts n*l untill 0<pos<l
forces, pot = fm.calc_forces(pos,forces,pot,l,[N])
mom = mom + forces*0.5*dt
Ken = np.sum(mom*mom*0.5, axis=1)
toten = sum(Ken) - sum(pot)
print toten, np.sum(mom)
'''
fig = pylab.figure()
ax = Axes3D(fig)
ax.scatter(pos[:,0],pos[:,1],pos[:,2],c='b')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
plt.show()
'''
# Plotting the positions
| null | null | null | null | [
0
] |
551 | 8bb39149a5b7f4f4b1d3d62a002ab97421905ea1 | <mask token>
def get_product(symbol):
"""
从合约名中提取产品名
:param symbol:
:return:
"""
pattern = re.compile('(\\D{1,2})(\\d{0,1})(\\d{3})')
match = pattern.match(symbol)
if match:
return match.expand('\\g<1>')
else:
return symbol
def get_exchange(symbol):
"""
从带.的合约名中提取交易所
:param symbol:
:return:
"""
pattern = re.compile('(\\.)(\\D{1,4})')
match = pattern.match(symbol)
if match:
return match.expand('\\g<2>')
else:
return symbol
<mask token>
| <mask token>
def product_to_exchange(product):
"""
将合约产品码转成交易所
:param product:
:return:
"""
PRODUCT_ = product.upper()
if PRODUCT_ in PRODUCTS_CFFEX:
return 'CFFEX'
if PRODUCT_ in PRODUCTS_CZCE:
return 'CZCE'
product_ = product.lower()
if product_ in PRODUCTS_SHFE:
return 'SHFE'
if product_ in PRODUCTS_DCE:
return 'DCE'
return 'Unknown'
def is_shfe(product):
"""
是否上期所
多添加此函数的主要原因是上期所需要区分平今与平昨
:param product:
:return:
"""
product_ = product.lower()
return product_ in PRODUCTS_SHFE
def get_product(symbol):
"""
从合约名中提取产品名
:param symbol:
:return:
"""
pattern = re.compile('(\\D{1,2})(\\d{0,1})(\\d{3})')
match = pattern.match(symbol)
if match:
return match.expand('\\g<1>')
else:
return symbol
def get_exchange(symbol):
"""
从带.的合约名中提取交易所
:param symbol:
:return:
"""
pattern = re.compile('(\\.)(\\D{1,4})')
match = pattern.match(symbol)
if match:
return match.expand('\\g<2>')
else:
return symbol
if __name__ == '__main__':
import pandas as pd
df = pd.DataFrame({'Symbol': ['IF1603', 'rb1708', '600000']})
df['IsSHFE'] = list(map(is_shfe, map(get_product, df['Symbol'])))
df['product'] = list(map(get_product, df['Symbol']))
df['IsSHFE'] = list(map(is_shfe, df['product']))
print(df)
print(get_product('IF1406'))
print(EXCHANGES_wind_code_xapi.get('SH'))
| <mask token>
PRODUCTS_SHFE = {'cu', 'al', 'zn', 'pb', 'ni', 'sn', 'au', 'ag', 'rb', 'wr',
'hc', 'fu', 'bu', 'ru'}
PRODUCTS_CFFEX = {'IF', 'IC', 'IH', 'T', 'TF'}
PRODUCTS_CZCE = {'SR', 'CF', 'ZC', 'FG', 'TA', 'WH', 'PM', 'RI', 'LR', 'JR',
'RS', 'OI', 'RM', 'SF', 'SM', 'MA', 'WT', 'WS', 'RO', 'ER', 'ME', 'TC'}
PRODUCTS_DCE = {'m', 'y', 'a', 'b', 'p', 'c', 'cs', 'jd', 'fb', 'bb', 'l',
'v', 'pp', 'j', 'jm', 'i'}
EXCHANGES_wind_code_xapi = {'CFE': 'CFFEX', 'SHF': 'SHFE', 'CZC': 'CZCE',
'DCE': 'DCE', 'SH': 'SSE', 'SZ': 'SZSE'}
EXCHANGES_xapi_wind_code = dict((v, k) for k, v in EXCHANGES_wind_code_xapi
.items())
def product_to_exchange(product):
"""
将合约产品码转成交易所
:param product:
:return:
"""
PRODUCT_ = product.upper()
if PRODUCT_ in PRODUCTS_CFFEX:
return 'CFFEX'
if PRODUCT_ in PRODUCTS_CZCE:
return 'CZCE'
product_ = product.lower()
if product_ in PRODUCTS_SHFE:
return 'SHFE'
if product_ in PRODUCTS_DCE:
return 'DCE'
return 'Unknown'
def is_shfe(product):
"""
是否上期所
多添加此函数的主要原因是上期所需要区分平今与平昨
:param product:
:return:
"""
product_ = product.lower()
return product_ in PRODUCTS_SHFE
def get_product(symbol):
"""
从合约名中提取产品名
:param symbol:
:return:
"""
pattern = re.compile('(\\D{1,2})(\\d{0,1})(\\d{3})')
match = pattern.match(symbol)
if match:
return match.expand('\\g<1>')
else:
return symbol
def get_exchange(symbol):
"""
从带.的合约名中提取交易所
:param symbol:
:return:
"""
pattern = re.compile('(\\.)(\\D{1,4})')
match = pattern.match(symbol)
if match:
return match.expand('\\g<2>')
else:
return symbol
if __name__ == '__main__':
import pandas as pd
df = pd.DataFrame({'Symbol': ['IF1603', 'rb1708', '600000']})
df['IsSHFE'] = list(map(is_shfe, map(get_product, df['Symbol'])))
df['product'] = list(map(get_product, df['Symbol']))
df['IsSHFE'] = list(map(is_shfe, df['product']))
print(df)
print(get_product('IF1406'))
print(EXCHANGES_wind_code_xapi.get('SH'))
| <mask token>
import re
PRODUCTS_SHFE = {'cu', 'al', 'zn', 'pb', 'ni', 'sn', 'au', 'ag', 'rb', 'wr',
'hc', 'fu', 'bu', 'ru'}
PRODUCTS_CFFEX = {'IF', 'IC', 'IH', 'T', 'TF'}
PRODUCTS_CZCE = {'SR', 'CF', 'ZC', 'FG', 'TA', 'WH', 'PM', 'RI', 'LR', 'JR',
'RS', 'OI', 'RM', 'SF', 'SM', 'MA', 'WT', 'WS', 'RO', 'ER', 'ME', 'TC'}
PRODUCTS_DCE = {'m', 'y', 'a', 'b', 'p', 'c', 'cs', 'jd', 'fb', 'bb', 'l',
'v', 'pp', 'j', 'jm', 'i'}
EXCHANGES_wind_code_xapi = {'CFE': 'CFFEX', 'SHF': 'SHFE', 'CZC': 'CZCE',
'DCE': 'DCE', 'SH': 'SSE', 'SZ': 'SZSE'}
EXCHANGES_xapi_wind_code = dict((v, k) for k, v in EXCHANGES_wind_code_xapi
.items())
def product_to_exchange(product):
"""
将合约产品码转成交易所
:param product:
:return:
"""
PRODUCT_ = product.upper()
if PRODUCT_ in PRODUCTS_CFFEX:
return 'CFFEX'
if PRODUCT_ in PRODUCTS_CZCE:
return 'CZCE'
product_ = product.lower()
if product_ in PRODUCTS_SHFE:
return 'SHFE'
if product_ in PRODUCTS_DCE:
return 'DCE'
return 'Unknown'
def is_shfe(product):
"""
是否上期所
多添加此函数的主要原因是上期所需要区分平今与平昨
:param product:
:return:
"""
product_ = product.lower()
return product_ in PRODUCTS_SHFE
def get_product(symbol):
"""
从合约名中提取产品名
:param symbol:
:return:
"""
pattern = re.compile('(\\D{1,2})(\\d{0,1})(\\d{3})')
match = pattern.match(symbol)
if match:
return match.expand('\\g<1>')
else:
return symbol
def get_exchange(symbol):
"""
从带.的合约名中提取交易所
:param symbol:
:return:
"""
pattern = re.compile('(\\.)(\\D{1,4})')
match = pattern.match(symbol)
if match:
return match.expand('\\g<2>')
else:
return symbol
if __name__ == '__main__':
import pandas as pd
df = pd.DataFrame({'Symbol': ['IF1603', 'rb1708', '600000']})
df['IsSHFE'] = list(map(is_shfe, map(get_product, df['Symbol'])))
df['product'] = list(map(get_product, df['Symbol']))
df['IsSHFE'] = list(map(is_shfe, df['product']))
print(df)
print(get_product('IF1406'))
print(EXCHANGES_wind_code_xapi.get('SH'))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
处理与合约名字有关的变量
"""
import re
# 上期所
PRODUCTS_SHFE = {'cu', 'al', 'zn', 'pb', 'ni', 'sn', 'au', 'ag', 'rb', 'wr', 'hc', 'fu', 'bu', 'ru'}
# 中金所
PRODUCTS_CFFEX = {'IF', 'IC', 'IH', 'T', 'TF'}
# 郑商所
PRODUCTS_CZCE = {'SR', 'CF', 'ZC', 'FG', 'TA', 'WH', 'PM', 'RI', 'LR', 'JR', 'RS', 'OI', 'RM', 'SF', 'SM', 'MA', 'WT',
'WS', 'RO', 'ER', 'ME', 'TC'}
# 大商所
PRODUCTS_DCE = {'m', 'y', 'a', 'b', 'p', 'c', 'cs', 'jd', 'fb', 'bb', 'l', 'v', 'pp', 'j', 'jm', 'i'}
EXCHANGES_wind_code_xapi = {
'CFE': 'CFFEX',
'SHF': 'SHFE',
'CZC': 'CZCE',
'DCE': 'DCE',
'SH': 'SSE',
'SZ': 'SZSE',
}
EXCHANGES_xapi_wind_code = dict((v, k) for k, v in EXCHANGES_wind_code_xapi.items())
def product_to_exchange(product):
"""
将合约产品码转成交易所
:param product:
:return:
"""
PRODUCT_ = product.upper()
if PRODUCT_ in PRODUCTS_CFFEX:
return 'CFFEX'
if PRODUCT_ in PRODUCTS_CZCE:
return 'CZCE'
product_ = product.lower()
if product_ in PRODUCTS_SHFE:
return 'SHFE'
if product_ in PRODUCTS_DCE:
return 'DCE'
return 'Unknown'
def is_shfe(product):
"""
是否上期所
多添加此函数的主要原因是上期所需要区分平今与平昨
:param product:
:return:
"""
product_ = product.lower()
return product_ in PRODUCTS_SHFE
def get_product(symbol):
"""
从合约名中提取产品名
:param symbol:
:return:
"""
pattern = re.compile(r'(\D{1,2})(\d{0,1})(\d{3})')
match = pattern.match(symbol)
if match:
return match.expand(r'\g<1>')
else:
return symbol
def get_exchange(symbol):
"""
从带.的合约名中提取交易所
:param symbol:
:return:
"""
pattern = re.compile(r'(\.)(\D{1,4})')
match = pattern.match(symbol)
if match:
return match.expand(r'\g<2>')
else:
return symbol
if __name__ == '__main__':
import pandas as pd
df = pd.DataFrame({'Symbol': ['IF1603', 'rb1708','600000']})
df['IsSHFE'] = list(map(is_shfe, map(get_product, df['Symbol'])))
df['product'] = list(map(get_product, df['Symbol']))
df['IsSHFE'] = list(map(is_shfe, df['product']))
print(df)
print(get_product('IF1406'))
print(EXCHANGES_wind_code_xapi.get('SH'))
| [
2,
5,
6,
7,
8
] |
552 | 58aa72588357b18ab42391dfffbf2a1b66589edd | <mask token>
class KV11Z7(Kinetis):
<mask token>
def __init__(self, session):
super(KV11Z7, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')
| <mask token>
class KV11Z7(Kinetis):
MEMORY_MAP = MemoryMap(FlashRegion(start=0, length=131072, blocksize=
1024, is_boot_memory=True, algo=FLASH_ALGO, flash_class=
Flash_Kinetis), RamRegion(start=536866816, length=16384))
def __init__(self, session):
super(KV11Z7, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')
| <mask token>
FLASH_ALGO = {'load_address': 536870912, 'instructions': [3758800384,
103643149, 604520552, 3539992640, 509886552, 474599930, 704650834,
1198576114, 151200256, 3547546251, 1116408323, 184800088, 3543941771,
1116408835, 3759330081, 1124812291, 570479743, 1116407875, 151245684,
3546235531, 1116408323, 184800068, 3542631051, 1116408835, 587191053,
3121742345, 1116408835, 303223554, 3496280585, 1116408579, 3758150425,
197331465, 3540075147, 448791499, 193151314, 3540075147, 448791435,
188957010, 3540075147, 448791371, 184762706, 3540075147, 448791307,
180568402, 3540075147, 448791243, 176374098, 3540075147, 448791179,
172179794, 3540075147, 448791115, 167985490, 3540075147, 448791051,
3536666962, 1116408259, 30135041, 1095899840, 1116408195, 25940737,
1095899840, 1116408131, 21746433, 1095899840, 1116408067, 17552129,
1095899840, 1116408003, 13357825, 1095899840, 1116407939, 9163521,
1095899840, 1116407875, 4969217, 1095899840, 3523222081, 1095910913,
1198540304, 264953949, 1112133632, 3539996675, 1079198272, 1184637440,
1116408067, 168022829, 3541189259, 25764604, 168016402, 3540796043,
294781321, 3540533899, 294781321, 3540271755, 3493462409, 3758100882,
163776905, 3540075147, 448790987, 159596882, 3540075147, 448790923,
155402578, 3540075147, 448790859, 151208274, 3540075147, 448790795,
147013970, 3540075147, 448790731, 142819666, 3540075147, 448790667,
3537453394, 1116407875, 4969217, 1095899840, 3523222081, 1180911105,
274415954, 3540076048, 721437248, 1112134912, 1180911472, 3539996763,
3036758592, 1186996224, 3171043008, 3037743114, 1145587976, 4177326080,
3506710528, 553666566, 4026549320, 1241905609, 587294929, 1125712539,
3171967185, 1801807467, 4, 4026544128, 1275901296, 1145849349,
1259030017, 1759659552, 4171558912, 3506776064, 1177101056, 1759659552,
4184797184, 1758087429, 43721473, 1623868186, 48496, 4, 1801807467,
4026544128, 1208595728, 2176928007, 2176928008, 139036673, 2147549257,
1145587718, 4176670720, 3489671168, 3171950593, 50464, 1074077696,
55592, 4, 1175238000, 1174816267, 1208829441, 1175826564, 4026549320,
671152387, 2416038154, 553750530, 2432714759, 1176651307, 1145587249,
4185452544, 1758087428, 43721473, 1623868186, 3178278916, 4, 4026544128,
1198530560, 3490195456, 3624020239, 3506711044, 689168389, 705222657,
537186305, 536889200, 671106928, 537186561, 3020965744, 1175199323,
3506520604, 3489808922, 543538192, 1745045360, 3624157835, 411658304,
1116215320, 3155218946, 1198530662, 536919056, 18288, 1116227843,
543936513, 536889200, 18288, 1801807467, 561006602, 562065409,
2013360129, 3590063625, 109148160, 543675649, 113330032, 543741185,
130041712, 543805692, 18288, 1073872896, 1174779384, 1175209494,
4160701976, 671154135, 587518251, 1176585778, 4160701992, 524210,
430362915, 1760108150, 2432714288, 4264359935, 3489868032, 473996800,
511066950, 3625140916, 1148733450, 1610901504, 537479433, 4160713160,
1174929343, 671115688, 1199624192, 3506646784, 419719400, 3656073908,
3187164728, 634, 1073872896, 1174713616, 4160701960, 671154079,
738251014, 1225052165, 1908940868, 4288739327, 537181456, 48400,
1073872896, 3490457600, 3490327040, 3524929800, 1148911627, 417036571,
117785759, 218827015, 537137423, 1757431664, 1749082122, 1753276424,
1744887814, 536993796, 1761665026, 1765859328, 536895504, 543836016,
18288, 3490326528, 1758021903, 235489807, 1148846153, 50944593,
543478019, 537151344, 3020965744, 1610818304, 553738305, 42557569,
1225220289, 361396748, 1627537570, 1636006601, 3155190081, 1198540312,
1074036800, 426, 1073872928, 3506514432, 1198530564, 1176286719,
1175826561, 587482638, 2550220322, 4279891967, 3508076551, 3491113984,
1148733452, 1611556865, 1744882946, 1225416769, 1908940806, 4281399295,
2550220295, 671115648, 1199624192, 3506581248, 522460470, 1178128872,
3186667525, 354, 1073872896, 3489802240, 536895873, 537151344, 18288,
2961290751, 1175340564, 587482629, 4276615167, 3509200896, 1751673001,
4251252735, 1114738688, 1073824320, 1119306319, 2550190337, 738203711,
465424410, 3640672933, 1225606693, 1148782760, 1611556873, 570509579,
167932362, 1921544906, 1917360132, 4276811775, 3506841600, 459577344,
406788470, 3521391616, 2953125888, 48624, 218, 1073872896, 3506513920,
1198530564, 1208202512, 1908548160, 4160713089, 3172007631, 1073872896,
3506514688, 1198530564, 1176286712, 1175340565, 2668110596, 4271503359,
3508086784, 3491179776, 1148865039, 1611032592, 553797646, 2567336385,
1745973953, 1619552288, 4272879615, 3490195456, 687905031, 1611583488,
3489738496, 1614356736, 523091448, 490085668, 3521195264, 48632, 98,
1073872896, 262146, 524288, 1048576, 2097152, 4194304, 0, 0, 2097152,
1073872900, 0], 'pc_init': 536871549, 'pc_unInit': 536871673,
'pc_program_page': 536871601, 'pc_erase_sector': 536871485,
'pc_eraseAll': 536871433, 'static_base': 536870912 + 32 + 1596,
'begin_stack': 536870912 + 2048, 'begin_data': 536870912 + 2560,
'page_buffers': [536873472, 536875520], 'min_program_length': 4,
'analyzer_supported': True, 'analyzer_address': 536868864}
class KV11Z7(Kinetis):
MEMORY_MAP = MemoryMap(FlashRegion(start=0, length=131072, blocksize=
1024, is_boot_memory=True, algo=FLASH_ALGO, flash_class=
Flash_Kinetis), RamRegion(start=536866816, length=16384))
def __init__(self, session):
super(KV11Z7, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')
| from ..family.target_kinetis import Kinetis
from ..family.flash_kinetis import Flash_Kinetis
from ...core.memory_map import FlashRegion, RamRegion, MemoryMap
from ...debug.svd.loader import SVDFile
FLASH_ALGO = {'load_address': 536870912, 'instructions': [3758800384,
103643149, 604520552, 3539992640, 509886552, 474599930, 704650834,
1198576114, 151200256, 3547546251, 1116408323, 184800088, 3543941771,
1116408835, 3759330081, 1124812291, 570479743, 1116407875, 151245684,
3546235531, 1116408323, 184800068, 3542631051, 1116408835, 587191053,
3121742345, 1116408835, 303223554, 3496280585, 1116408579, 3758150425,
197331465, 3540075147, 448791499, 193151314, 3540075147, 448791435,
188957010, 3540075147, 448791371, 184762706, 3540075147, 448791307,
180568402, 3540075147, 448791243, 176374098, 3540075147, 448791179,
172179794, 3540075147, 448791115, 167985490, 3540075147, 448791051,
3536666962, 1116408259, 30135041, 1095899840, 1116408195, 25940737,
1095899840, 1116408131, 21746433, 1095899840, 1116408067, 17552129,
1095899840, 1116408003, 13357825, 1095899840, 1116407939, 9163521,
1095899840, 1116407875, 4969217, 1095899840, 3523222081, 1095910913,
1198540304, 264953949, 1112133632, 3539996675, 1079198272, 1184637440,
1116408067, 168022829, 3541189259, 25764604, 168016402, 3540796043,
294781321, 3540533899, 294781321, 3540271755, 3493462409, 3758100882,
163776905, 3540075147, 448790987, 159596882, 3540075147, 448790923,
155402578, 3540075147, 448790859, 151208274, 3540075147, 448790795,
147013970, 3540075147, 448790731, 142819666, 3540075147, 448790667,
3537453394, 1116407875, 4969217, 1095899840, 3523222081, 1180911105,
274415954, 3540076048, 721437248, 1112134912, 1180911472, 3539996763,
3036758592, 1186996224, 3171043008, 3037743114, 1145587976, 4177326080,
3506710528, 553666566, 4026549320, 1241905609, 587294929, 1125712539,
3171967185, 1801807467, 4, 4026544128, 1275901296, 1145849349,
1259030017, 1759659552, 4171558912, 3506776064, 1177101056, 1759659552,
4184797184, 1758087429, 43721473, 1623868186, 48496, 4, 1801807467,
4026544128, 1208595728, 2176928007, 2176928008, 139036673, 2147549257,
1145587718, 4176670720, 3489671168, 3171950593, 50464, 1074077696,
55592, 4, 1175238000, 1174816267, 1208829441, 1175826564, 4026549320,
671152387, 2416038154, 553750530, 2432714759, 1176651307, 1145587249,
4185452544, 1758087428, 43721473, 1623868186, 3178278916, 4, 4026544128,
1198530560, 3490195456, 3624020239, 3506711044, 689168389, 705222657,
537186305, 536889200, 671106928, 537186561, 3020965744, 1175199323,
3506520604, 3489808922, 543538192, 1745045360, 3624157835, 411658304,
1116215320, 3155218946, 1198530662, 536919056, 18288, 1116227843,
543936513, 536889200, 18288, 1801807467, 561006602, 562065409,
2013360129, 3590063625, 109148160, 543675649, 113330032, 543741185,
130041712, 543805692, 18288, 1073872896, 1174779384, 1175209494,
4160701976, 671154135, 587518251, 1176585778, 4160701992, 524210,
430362915, 1760108150, 2432714288, 4264359935, 3489868032, 473996800,
511066950, 3625140916, 1148733450, 1610901504, 537479433, 4160713160,
1174929343, 671115688, 1199624192, 3506646784, 419719400, 3656073908,
3187164728, 634, 1073872896, 1174713616, 4160701960, 671154079,
738251014, 1225052165, 1908940868, 4288739327, 537181456, 48400,
1073872896, 3490457600, 3490327040, 3524929800, 1148911627, 417036571,
117785759, 218827015, 537137423, 1757431664, 1749082122, 1753276424,
1744887814, 536993796, 1761665026, 1765859328, 536895504, 543836016,
18288, 3490326528, 1758021903, 235489807, 1148846153, 50944593,
543478019, 537151344, 3020965744, 1610818304, 553738305, 42557569,
1225220289, 361396748, 1627537570, 1636006601, 3155190081, 1198540312,
1074036800, 426, 1073872928, 3506514432, 1198530564, 1176286719,
1175826561, 587482638, 2550220322, 4279891967, 3508076551, 3491113984,
1148733452, 1611556865, 1744882946, 1225416769, 1908940806, 4281399295,
2550220295, 671115648, 1199624192, 3506581248, 522460470, 1178128872,
3186667525, 354, 1073872896, 3489802240, 536895873, 537151344, 18288,
2961290751, 1175340564, 587482629, 4276615167, 3509200896, 1751673001,
4251252735, 1114738688, 1073824320, 1119306319, 2550190337, 738203711,
465424410, 3640672933, 1225606693, 1148782760, 1611556873, 570509579,
167932362, 1921544906, 1917360132, 4276811775, 3506841600, 459577344,
406788470, 3521391616, 2953125888, 48624, 218, 1073872896, 3506513920,
1198530564, 1208202512, 1908548160, 4160713089, 3172007631, 1073872896,
3506514688, 1198530564, 1176286712, 1175340565, 2668110596, 4271503359,
3508086784, 3491179776, 1148865039, 1611032592, 553797646, 2567336385,
1745973953, 1619552288, 4272879615, 3490195456, 687905031, 1611583488,
3489738496, 1614356736, 523091448, 490085668, 3521195264, 48632, 98,
1073872896, 262146, 524288, 1048576, 2097152, 4194304, 0, 0, 2097152,
1073872900, 0], 'pc_init': 536871549, 'pc_unInit': 536871673,
'pc_program_page': 536871601, 'pc_erase_sector': 536871485,
'pc_eraseAll': 536871433, 'static_base': 536870912 + 32 + 1596,
'begin_stack': 536870912 + 2048, 'begin_data': 536870912 + 2560,
'page_buffers': [536873472, 536875520], 'min_program_length': 4,
'analyzer_supported': True, 'analyzer_address': 536868864}
class KV11Z7(Kinetis):
MEMORY_MAP = MemoryMap(FlashRegion(start=0, length=131072, blocksize=
1024, is_boot_memory=True, algo=FLASH_ALGO, flash_class=
Flash_Kinetis), RamRegion(start=536866816, length=16384))
def __init__(self, session):
super(KV11Z7, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')
| # pyOCD debugger
# Copyright (c) 2006-2013,2018 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..family.target_kinetis import Kinetis
from ..family.flash_kinetis import Flash_Kinetis
from ...core.memory_map import (FlashRegion, RamRegion, MemoryMap)
from ...debug.svd.loader import SVDFile
FLASH_ALGO = { 'load_address' : 0x20000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x09032200, 0xd373428b, 0x428b0a03, 0x0b03d358, 0xd33c428b, 0x428b0c03, 0xe012d321, 0x430b4603,
0x2200d47f, 0x428b0843, 0x0903d374, 0xd35f428b, 0x428b0a03, 0x0b03d344, 0xd328428b, 0x428b0c03,
0x22ffd30d, 0xba120209, 0x428b0c03, 0x1212d302, 0xd0650209, 0x428b0b03, 0xe000d319, 0x0bc30a09,
0xd301428b, 0x1ac003cb, 0x0b834152, 0xd301428b, 0x1ac0038b, 0x0b434152, 0xd301428b, 0x1ac0034b,
0x0b034152, 0xd301428b, 0x1ac0030b, 0x0ac34152, 0xd301428b, 0x1ac002cb, 0x0a834152, 0xd301428b,
0x1ac0028b, 0x0a434152, 0xd301428b, 0x1ac0024b, 0x0a034152, 0xd301428b, 0x1ac0020b, 0xd2cd4152,
0x428b09c3, 0x01cbd301, 0x41521ac0, 0x428b0983, 0x018bd301, 0x41521ac0, 0x428b0943, 0x014bd301,
0x41521ac0, 0x428b0903, 0x010bd301, 0x41521ac0, 0x428b08c3, 0x00cbd301, 0x41521ac0, 0x428b0883,
0x008bd301, 0x41521ac0, 0x428b0843, 0x004bd301, 0x41521ac0, 0xd2001a41, 0x41524601, 0x47704610,
0x0fcae05d, 0x4249d000, 0xd3001003, 0x40534240, 0x469c2200, 0x428b0903, 0x0a03d32d, 0xd312428b,
0x018922fc, 0x0a03ba12, 0xd30c428b, 0x11920189, 0xd308428b, 0x11920189, 0xd304428b, 0xd03a0189,
0xe0001192, 0x09c30989, 0xd301428b, 0x1ac001cb, 0x09834152, 0xd301428b, 0x1ac0018b, 0x09434152,
0xd301428b, 0x1ac0014b, 0x09034152, 0xd301428b, 0x1ac0010b, 0x08c34152, 0xd301428b, 0x1ac000cb,
0x08834152, 0xd301428b, 0x1ac0008b, 0xd2d94152, 0x428b0843, 0x004bd301, 0x41521ac0, 0xd2001a41,
0x46634601, 0x105b4152, 0xd3014610, 0x2b004240, 0x4249d500, 0x46634770, 0xd300105b, 0xb5014240,
0x46c02000, 0xbd0246c0, 0xb510480a, 0x44484908, 0xf8fcf000, 0xd1042800, 0x21004806, 0xf0004448,
0x4a05f9c9, 0x230168d1, 0x4319029b, 0xbd1060d1, 0x6b65666b, 0x00000004, 0xf0003000, 0x4c0cb570,
0x444c4605, 0x4b0b4601, 0x68e24620, 0xf8a4f000, 0xd1052800, 0x46292300, 0x68e24620, 0xf96ef000,
0x68ca4905, 0x029b2301, 0x60ca431a, 0x0000bd70, 0x00000004, 0x6b65666b, 0xf0003000, 0x4809b510,
0x81c14907, 0x81c14908, 0x08498801, 0x80010049, 0x44484806, 0xf8f2f000, 0xd0002800, 0xbd102001,
0x0000c520, 0x40052000, 0x0000d928, 0x00000004, 0x460cb570, 0x4606460b, 0x480d4601, 0x4615b084,
0xf0004448, 0x2800f903, 0x9001d10a, 0x21019002, 0x91004807, 0x4622462b, 0x44484631, 0xf978f000,
0x68ca4904, 0x029b2301, 0x60ca431a, 0xbd70b004, 0x00000004, 0xf0003000, 0x47702000, 0xd0082800,
0xd802290f, 0xd1042a04, 0x2913e005, 0x2a08d801, 0x2004d001, 0x20004770, 0x28004770, 0x2004d101,
0xb4104770, 0x460c1e5b, 0xd101421c, 0xd002421a, 0x2065bc10, 0x68034770, 0xd804428b, 0x18896840,
0x42881818, 0xbc10d202, 0x47702066, 0x2000bc10, 0x00004770, 0x42884903, 0x206bd001, 0x20004770,
0x00004770, 0x6b65666b, 0x2170480a, 0x21807001, 0x78017001, 0xd5fc0609, 0x06817800, 0x2067d501,
0x06c14770, 0x2068d501, 0x07c04770, 0x2069d0fc, 0x00004770, 0x40020000, 0x4605b5f8, 0x460c4616,
0xf7ff4618, 0x2800ffd7, 0x2304d12b, 0x46214632, 0xf7ff4628, 0x0007ffb2, 0x19a6d123, 0x68e91e76,
0x91004630, 0xfe2cf7ff, 0xd0032900, 0x1c409e00, 0x1e764346, 0xd81342b4, 0x4478480a, 0x60046800,
0x20094909, 0xf7ff71c8, 0x4607ffbf, 0x280069a8, 0x4780d000, 0xd1032f00, 0x190468e8, 0xd9eb42b4,
0xbdf84638, 0x0000027a, 0x40020000, 0x4604b510, 0xf7ff4608, 0x2800ff9f, 0x2c00d106, 0x4904d005,
0x71c82044, 0xffa0f7ff, 0x2004bd10, 0x0000bd10, 0x40020000, 0xd00c2800, 0xd00a2a00, 0xd21a2908,
0x447b000b, 0x18db791b, 0x0705449f, 0x0d0b0907, 0x2004110f, 0x68c04770, 0x6840e00a, 0x6880e008,
0x6800e006, 0x2001e004, 0x6900e002, 0x6940e000, 0x20006010, 0x206a4770, 0x00004770, 0xd00a2800,
0x68c9490f, 0x0e094a0f, 0x447a0049, 0x03095a51, 0x2064d103, 0x20044770, 0xb4104770, 0x60032300,
0x21016041, 0x02896081, 0x490760c1, 0x158a7a0c, 0x610240a2, 0x61837ac9, 0xbc106141, 0x47704618,
0x40048040, 0x000001aa, 0x40020020, 0xd1012a00, 0x47702004, 0x461cb5ff, 0x4615b081, 0x2304460e,
0x98014622, 0xff19f7ff, 0xd1190007, 0xd0162c00, 0x4478480c, 0x600e6801, 0x6800cd02, 0x490a6041,
0x71c82006, 0xff30f7ff, 0x98014607, 0x28006980, 0x4780d000, 0xd1022f00, 0x1f241d36, 0x4638d1e8,
0xbdf0b005, 0x00000162, 0x40020000, 0xd0022800, 0x20006181, 0x20044770, 0x00004770, 0xb081b5ff,
0x460e4614, 0x23044605, 0xfee7f7ff, 0xd12a2800, 0x686868a9, 0xfd64f7ff, 0x42719000, 0x40014240,
0x42b7424f, 0x9800d101, 0x2c00183f, 0x1bbdd01a, 0xd90042a5, 0x490d4625, 0x447908a8, 0x600e6809,
0x2201490b, 0x0a0271ca, 0x728872ca, 0x72489804, 0xfeeaf7ff, 0xd1062800, 0x1b649800, 0x183f1976,
0xd1e42c00, 0xb0052000, 0x0000bdf0, 0x000000da, 0x40020000, 0xd1012800, 0x47702004, 0x4803b510,
0x71c22240, 0xf7ff7181, 0xbd10fecf, 0x40020000, 0xd1012b00, 0x47702004, 0x461cb5f8, 0x460e4615,
0x9f082304, 0xfe99f7ff, 0xd1192800, 0xd0172d00, 0x447a4a0f, 0x60066810, 0x2102480e, 0x990671c1,
0x681172c1, 0x60886820, 0xfeaef7ff, 0xd0082800, 0x29009907, 0x600ed000, 0xd0012f00, 0x60392100,
0x1f2dbdf8, 0x1d361d24, 0xd1e12d00, 0x0000bdf8, 0x00000062, 0x40020000, 0x00040002, 0x00080000,
0x00100000, 0x00200000, 0x00400000, 0x00000000, 0x00000000, 0x00200000, 0x40020004, 0x00000000,
],
'pc_init' : 0x2000027D,
'pc_unInit': 0x200002F9,
'pc_program_page': 0x200002B1,
'pc_erase_sector': 0x2000023D,
'pc_eraseAll' : 0x20000209,
'static_base' : 0x20000000 + 0x00000020 + 0x0000063c,
'begin_stack' : 0x20000000 + 0x00000800,
'begin_data' : 0x20000000 + 0x00000A00,
'page_buffers' : [0x20000a00, 0x20001200], # Enable double buffering
'min_program_length' : 4,
'analyzer_supported' : True,
'analyzer_address' : 0x1ffff800
}
class KV11Z7(Kinetis):
MEMORY_MAP = MemoryMap(
FlashRegion( start=0, length=0x20000, blocksize=0x400, is_boot_memory=True,
algo=FLASH_ALGO, flash_class=Flash_Kinetis),
RamRegion( start=0x1ffff000, length=0x4000)
)
def __init__(self, session):
super(KV11Z7, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin("MKV11Z7.svd")
| [
2,
3,
4,
5,
6
] |
553 | 18c2fe40b51ad1489d55aa2be068a1c4f381a2a5 | <mask token>
| <mask token>
class Acoount(models.Model):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
class Meta:
verbose_name = 'Акаунт'
verbose_name_plural = 'Акаунти'
| <mask token>
class Acoount(models.Model):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def __str__(self):
return self.first_name + ' ' + self.last_name
class Meta:
verbose_name = 'Акаунт'
verbose_name_plural = 'Акаунти'
| import datetime
from django.db import models
from django.utils import timezone
class Acoount(models.Model):
first_name = models.CharField("Ім'я", max_length=50)
last_name = models.CharField('Прізвище', max_length=50)
username = models.CharField('Псевдонім', max_length=50)
email = models.CharField('Електронна почта', max_length=16)
password = models.CharField('Пароль', max_length=16)
def __str__(self):
return self.first_name + ' ' + self.last_name
class Meta:
verbose_name = 'Акаунт'
verbose_name_plural = 'Акаунти'
| import datetime
from django.db import models
from django.utils import timezone
class Acoount(models.Model):
first_name = models.CharField("Ім\'я", max_length=50)
last_name = models.CharField('Прізвище', max_length=50)
username = models.CharField('Псевдонім', max_length=50)
email = models.CharField('Електронна почта', max_length=16)
password = models.CharField('Пароль', max_length=16)
def __str__(self):
return self.first_name + ' ' + self.last_name
class Meta:
verbose_name = 'Акаунт'
verbose_name_plural = 'Акаунти' | [
0,
1,
2,
4,
5
] |
554 | 3f8c13be547099aa6612365452926db95828b9a0 | <mask token>
| <mask token>
setup(name='RedHatSecurityAdvisory', version='0.1', description=
'Script that automatically checks the RedHat security advisories to see if a CVE applies'
, author='Pieter-Jan Moreels', url=
'https://github.com/PidgeyL/RedHat-Advisory-Checker', entry_points={
'console_scripts': ['rhsa = RHSA:redhatAdvisory.main']}, packages=[
'RHSA'], license='Modified BSD license')
| from setuptools import setup
setup(name='RedHatSecurityAdvisory', version='0.1', description=
'Script that automatically checks the RedHat security advisories to see if a CVE applies'
, author='Pieter-Jan Moreels', url=
'https://github.com/PidgeyL/RedHat-Advisory-Checker', entry_points={
'console_scripts': ['rhsa = RHSA:redhatAdvisory.main']}, packages=[
'RHSA'], license='Modified BSD license')
| from setuptools import setup
setup(name='RedHatSecurityAdvisory',
version='0.1',
description='Script that automatically checks the RedHat security advisories to see if a CVE applies',
author='Pieter-Jan Moreels',
url='https://github.com/PidgeyL/RedHat-Advisory-Checker',
entry_points={'console_scripts': ['rhsa = RHSA:redhatAdvisory.main']},
packages=['RHSA'],
license="Modified BSD license",
)
| null | [
0,
1,
2,
3
] |
555 | 5ef65ace397be17be62625ed27b5753d15565d61 | <mask token>
| <mask token>
class DatasetFileManager(ABC):
<mask token>
| <mask token>
class DatasetFileManager(ABC):
@abstractmethod
def read_dataset(self):
pass
| from abc import ABC, abstractmethod
class DatasetFileManager(ABC):
@abstractmethod
def read_dataset(self):
pass
| null | [
0,
1,
2,
3
] |
556 | 236dd70dec8d53062d6c38c370cb8f11dc5ef9d0 | <mask token>
class Dice2(Pmf):
<mask token>
<mask token>
| <mask token>
class Dice2(Pmf):
def __init__(self, sides):
Pmf.__init__(self)
for x in range(1, sides + 1):
self.Set(x, 1)
self.Normalize()
<mask token>
| <mask token>
class Dice2(Pmf):
def __init__(self, sides):
Pmf.__init__(self)
for x in range(1, sides + 1):
self.Set(x, 1)
self.Normalize()
if __name__ == '__main__':
d6 = Dice2(6)
dices = [d6] * 6
three = thinkbayes.SampleSum(dices, 1000)
thinkplot.Pmf(three)
| import thinkbayes2 as thinkbayes
from thinkbayes2 import Pmf
import thinkplot
class Dice2(Pmf):
def __init__(self, sides):
Pmf.__init__(self)
for x in range(1, sides + 1):
self.Set(x, 1)
self.Normalize()
if __name__ == '__main__':
d6 = Dice2(6)
dices = [d6] * 6
three = thinkbayes.SampleSum(dices, 1000)
thinkplot.Pmf(three)
| import thinkbayes2 as thinkbayes
from thinkbayes2 import Pmf
import thinkplot
class Dice2(Pmf):
def __init__(self, sides):
Pmf.__init__(self)
for x in range(1, sides + 1):
self.Set(x, 1)
self.Normalize()
if __name__ == "__main__":
d6 = Dice2(6)
dices = [d6] * 6
three = thinkbayes.SampleSum(dices, 1000)
thinkplot.Pmf(three)
| [
1,
2,
3,
4,
5
] |
557 | b9bd1c0f4a5d2e6eeb75ba4f27d33ad5fb22530e | <mask token>
class DepositCompleteResponse(object):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
@property
def currency(self):
"""Gets the currency of this DepositCompleteResponse. # noqa: E501
화폐를 의미하는 영문 대문자 코드 # noqa: E501
:return: The currency of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._currency
<mask token>
@property
def deposit_address(self):
"""Gets the deposit_address of this DepositCompleteResponse. # noqa: E501
입금 주소 # noqa: E501
:return: The deposit_address of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._deposit_address
<mask token>
@property
def secondary_address(self):
"""Gets the secondary_address of this DepositCompleteResponse. # noqa: E501
2차 입금 주소 # noqa: E501
:return: The secondary_address of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._secondary_address
<mask token>
<mask token>
<mask token>
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
<mask token>
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| <mask token>
class DepositCompleteResponse(object):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
@property
def currency(self):
"""Gets the currency of this DepositCompleteResponse. # noqa: E501
화폐를 의미하는 영문 대문자 코드 # noqa: E501
:return: The currency of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._currency
<mask token>
@property
def deposit_address(self):
"""Gets the deposit_address of this DepositCompleteResponse. # noqa: E501
입금 주소 # noqa: E501
:return: The deposit_address of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._deposit_address
<mask token>
@property
def secondary_address(self):
"""Gets the secondary_address of this DepositCompleteResponse. # noqa: E501
2차 입금 주소 # noqa: E501
:return: The secondary_address of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._secondary_address
@secondary_address.setter
def secondary_address(self, secondary_address):
"""Sets the secondary_address of this DepositCompleteResponse.
2차 입금 주소 # noqa: E501
:param secondary_address: The secondary_address of this DepositCompleteResponse. # noqa: E501
:type: str
"""
self._secondary_address = secondary_address
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,
'to_dict') else x, value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(lambda item: (item[0], item[1].
to_dict()) if hasattr(item[1], 'to_dict') else item,
value.items()))
else:
result[attr] = value
if issubclass(DepositCompleteResponse, dict):
for key, value in self.items():
result[key] = value
return result
<mask token>
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DepositCompleteResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| <mask token>
class DepositCompleteResponse(object):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
@property
def currency(self):
"""Gets the currency of this DepositCompleteResponse. # noqa: E501
화폐를 의미하는 영문 대문자 코드 # noqa: E501
:return: The currency of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._currency
@currency.setter
def currency(self, currency):
"""Sets the currency of this DepositCompleteResponse.
화폐를 의미하는 영문 대문자 코드 # noqa: E501
:param currency: The currency of this DepositCompleteResponse. # noqa: E501
:type: str
"""
self._currency = currency
@property
def deposit_address(self):
"""Gets the deposit_address of this DepositCompleteResponse. # noqa: E501
입금 주소 # noqa: E501
:return: The deposit_address of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._deposit_address
@deposit_address.setter
def deposit_address(self, deposit_address):
"""Sets the deposit_address of this DepositCompleteResponse.
입금 주소 # noqa: E501
:param deposit_address: The deposit_address of this DepositCompleteResponse. # noqa: E501
:type: str
"""
self._deposit_address = deposit_address
@property
def secondary_address(self):
"""Gets the secondary_address of this DepositCompleteResponse. # noqa: E501
2차 입금 주소 # noqa: E501
:return: The secondary_address of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._secondary_address
@secondary_address.setter
def secondary_address(self, secondary_address):
"""Sets the secondary_address of this DepositCompleteResponse.
2차 입금 주소 # noqa: E501
:param secondary_address: The secondary_address of this DepositCompleteResponse. # noqa: E501
:type: str
"""
self._secondary_address = secondary_address
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,
'to_dict') else x, value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(lambda item: (item[0], item[1].
to_dict()) if hasattr(item[1], 'to_dict') else item,
value.items()))
else:
result[attr] = value
if issubclass(DepositCompleteResponse, dict):
for key, value in self.items():
result[key] = value
return result
<mask token>
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DepositCompleteResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| <mask token>
import pprint
import re
import six
class DepositCompleteResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {'currency': 'str', 'deposit_address': 'str',
'secondary_address': 'str'}
attribute_map = {'currency': 'currency', 'deposit_address':
'deposit_address', 'secondary_address': 'secondary_address'}
def __init__(self, currency=None, deposit_address=None,
secondary_address=None):
"""DepositCompleteResponse - a model defined in Swagger"""
self._currency = None
self._deposit_address = None
self._secondary_address = None
self.discriminator = None
if currency is not None:
self.currency = currency
if deposit_address is not None:
self.deposit_address = deposit_address
if secondary_address is not None:
self.secondary_address = secondary_address
@property
def currency(self):
"""Gets the currency of this DepositCompleteResponse. # noqa: E501
화폐를 의미하는 영문 대문자 코드 # noqa: E501
:return: The currency of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._currency
@currency.setter
def currency(self, currency):
"""Sets the currency of this DepositCompleteResponse.
화폐를 의미하는 영문 대문자 코드 # noqa: E501
:param currency: The currency of this DepositCompleteResponse. # noqa: E501
:type: str
"""
self._currency = currency
@property
def deposit_address(self):
"""Gets the deposit_address of this DepositCompleteResponse. # noqa: E501
입금 주소 # noqa: E501
:return: The deposit_address of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._deposit_address
@deposit_address.setter
def deposit_address(self, deposit_address):
"""Sets the deposit_address of this DepositCompleteResponse.
입금 주소 # noqa: E501
:param deposit_address: The deposit_address of this DepositCompleteResponse. # noqa: E501
:type: str
"""
self._deposit_address = deposit_address
@property
def secondary_address(self):
"""Gets the secondary_address of this DepositCompleteResponse. # noqa: E501
2차 입금 주소 # noqa: E501
:return: The secondary_address of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._secondary_address
@secondary_address.setter
def secondary_address(self, secondary_address):
"""Sets the secondary_address of this DepositCompleteResponse.
2차 입금 주소 # noqa: E501
:param secondary_address: The secondary_address of this DepositCompleteResponse. # noqa: E501
:type: str
"""
self._secondary_address = secondary_address
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,
'to_dict') else x, value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(lambda item: (item[0], item[1].
to_dict()) if hasattr(item[1], 'to_dict') else item,
value.items()))
else:
result[attr] = value
if issubclass(DepositCompleteResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DepositCompleteResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| # coding: utf-8
"""
Upbit Open API
## REST API for Upbit Exchange - Base URL: [https://api.upbit.com] - Official Upbit API Documents: [https://docs.upbit.com] - Official Support email: [[email protected]] # noqa: E501
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class DepositCompleteResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'currency': 'str',
'deposit_address': 'str',
'secondary_address': 'str'
}
attribute_map = {
'currency': 'currency',
'deposit_address': 'deposit_address',
'secondary_address': 'secondary_address'
}
def __init__(self, currency=None, deposit_address=None, secondary_address=None): # noqa: E501
"""DepositCompleteResponse - a model defined in Swagger""" # noqa: E501
self._currency = None
self._deposit_address = None
self._secondary_address = None
self.discriminator = None
if currency is not None:
self.currency = currency
if deposit_address is not None:
self.deposit_address = deposit_address
if secondary_address is not None:
self.secondary_address = secondary_address
@property
def currency(self):
"""Gets the currency of this DepositCompleteResponse. # noqa: E501
화폐를 의미하는 영문 대문자 코드 # noqa: E501
:return: The currency of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._currency
@currency.setter
def currency(self, currency):
"""Sets the currency of this DepositCompleteResponse.
화폐를 의미하는 영문 대문자 코드 # noqa: E501
:param currency: The currency of this DepositCompleteResponse. # noqa: E501
:type: str
"""
self._currency = currency
@property
def deposit_address(self):
"""Gets the deposit_address of this DepositCompleteResponse. # noqa: E501
입금 주소 # noqa: E501
:return: The deposit_address of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._deposit_address
@deposit_address.setter
def deposit_address(self, deposit_address):
"""Sets the deposit_address of this DepositCompleteResponse.
입금 주소 # noqa: E501
:param deposit_address: The deposit_address of this DepositCompleteResponse. # noqa: E501
:type: str
"""
self._deposit_address = deposit_address
@property
def secondary_address(self):
"""Gets the secondary_address of this DepositCompleteResponse. # noqa: E501
2차 입금 주소 # noqa: E501
:return: The secondary_address of this DepositCompleteResponse. # noqa: E501
:rtype: str
"""
return self._secondary_address
@secondary_address.setter
def secondary_address(self, secondary_address):
"""Sets the secondary_address of this DepositCompleteResponse.
2차 입금 주소 # noqa: E501
:param secondary_address: The secondary_address of this DepositCompleteResponse. # noqa: E501
:type: str
"""
self._secondary_address = secondary_address
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(DepositCompleteResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DepositCompleteResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
6,
9,
11,
16,
17
] |
558 | d1dc807ecc92d9108db2c9bd00ee9781e174a1aa | <mask token>
class EntityEmailerInterface(object):
<mask token>
<mask token>
@staticmethod
def convert_events_to_emails():
"""
Converts unseen events to emails and marks them as seen.
"""
email_medium = get_medium()
default_from_email = get_from_email_address()
for event, targets in email_medium.events_targets(seen=False,
mark_seen=True):
from_address = event.context.get('from_address'
) or default_from_email
Email.objects.create_email(event=event, from_address=
from_address, recipients=targets)
| <mask token>
class EntityEmailerInterface(object):
<mask token>
@staticmethod
def send_unsent_scheduled_emails():
"""
Send out any scheduled emails that are unsent
"""
current_time = datetime.utcnow()
email_medium = get_medium()
to_send = Email.objects.filter(scheduled__lte=current_time,
sent__isnull=True).select_related('event').prefetch_related(
'recipients')
context_loader.load_contexts_and_renderers([e.event for e in
to_send], [email_medium])
emails = []
for email in to_send:
to_email_addresses = get_subscribed_email_addresses(email)
if to_email_addresses:
text_message, html_message = email.render(email_medium)
message = create_email_message(to_emails=to_email_addresses,
from_email=email.from_address or get_from_email_address
(), subject=email.subject or
extract_email_subject_from_html_content(html_message),
text=text_message, html=html_message)
emails.append(message)
connection = mail.get_connection()
connection.send_messages(emails)
to_send.update(sent=current_time)
@staticmethod
def convert_events_to_emails():
"""
Converts unseen events to emails and marks them as seen.
"""
email_medium = get_medium()
default_from_email = get_from_email_address()
for event, targets in email_medium.events_targets(seen=False,
mark_seen=True):
from_address = event.context.get('from_address'
) or default_from_email
Email.objects.create_email(event=event, from_address=
from_address, recipients=targets)
| <mask token>
class EntityEmailerInterface(object):
"""
An api interface to do things within entity emailer
"""
@staticmethod
def send_unsent_scheduled_emails():
"""
Send out any scheduled emails that are unsent
"""
current_time = datetime.utcnow()
email_medium = get_medium()
to_send = Email.objects.filter(scheduled__lte=current_time,
sent__isnull=True).select_related('event').prefetch_related(
'recipients')
context_loader.load_contexts_and_renderers([e.event for e in
to_send], [email_medium])
emails = []
for email in to_send:
to_email_addresses = get_subscribed_email_addresses(email)
if to_email_addresses:
text_message, html_message = email.render(email_medium)
message = create_email_message(to_emails=to_email_addresses,
from_email=email.from_address or get_from_email_address
(), subject=email.subject or
extract_email_subject_from_html_content(html_message),
text=text_message, html=html_message)
emails.append(message)
connection = mail.get_connection()
connection.send_messages(emails)
to_send.update(sent=current_time)
@staticmethod
def convert_events_to_emails():
"""
Converts unseen events to emails and marks them as seen.
"""
email_medium = get_medium()
default_from_email = get_from_email_address()
for event, targets in email_medium.events_targets(seen=False,
mark_seen=True):
from_address = event.context.get('from_address'
) or default_from_email
Email.objects.create_email(event=event, from_address=
from_address, recipients=targets)
| from datetime import datetime
from django.core import mail
from entity_event import context_loader
from entity_emailer.models import Email
from entity_emailer.utils import get_medium, get_from_email_address, get_subscribed_email_addresses, create_email_message, extract_email_subject_from_html_content
class EntityEmailerInterface(object):
"""
An api interface to do things within entity emailer
"""
@staticmethod
def send_unsent_scheduled_emails():
"""
Send out any scheduled emails that are unsent
"""
current_time = datetime.utcnow()
email_medium = get_medium()
to_send = Email.objects.filter(scheduled__lte=current_time,
sent__isnull=True).select_related('event').prefetch_related(
'recipients')
context_loader.load_contexts_and_renderers([e.event for e in
to_send], [email_medium])
emails = []
for email in to_send:
to_email_addresses = get_subscribed_email_addresses(email)
if to_email_addresses:
text_message, html_message = email.render(email_medium)
message = create_email_message(to_emails=to_email_addresses,
from_email=email.from_address or get_from_email_address
(), subject=email.subject or
extract_email_subject_from_html_content(html_message),
text=text_message, html=html_message)
emails.append(message)
connection = mail.get_connection()
connection.send_messages(emails)
to_send.update(sent=current_time)
@staticmethod
def convert_events_to_emails():
"""
Converts unseen events to emails and marks them as seen.
"""
email_medium = get_medium()
default_from_email = get_from_email_address()
for event, targets in email_medium.events_targets(seen=False,
mark_seen=True):
from_address = event.context.get('from_address'
) or default_from_email
Email.objects.create_email(event=event, from_address=
from_address, recipients=targets)
| from datetime import datetime
from django.core import mail
from entity_event import context_loader
from entity_emailer.models import Email
from entity_emailer.utils import get_medium, get_from_email_address, get_subscribed_email_addresses, \
create_email_message, extract_email_subject_from_html_content
class EntityEmailerInterface(object):
"""
An api interface to do things within entity emailer
"""
@staticmethod
def send_unsent_scheduled_emails():
"""
Send out any scheduled emails that are unsent
"""
current_time = datetime.utcnow()
email_medium = get_medium()
to_send = Email.objects.filter(
scheduled__lte=current_time,
sent__isnull=True
).select_related(
'event'
).prefetch_related(
'recipients'
)
# Fetch the contexts of every event so that they may be rendered
context_loader.load_contexts_and_renderers([e.event for e in to_send], [email_medium])
emails = []
for email in to_send:
to_email_addresses = get_subscribed_email_addresses(email)
if to_email_addresses:
text_message, html_message = email.render(email_medium)
message = create_email_message(
to_emails=to_email_addresses,
from_email=email.from_address or get_from_email_address(),
subject=email.subject or extract_email_subject_from_html_content(html_message),
text=text_message,
html=html_message,
)
emails.append(message)
connection = mail.get_connection()
connection.send_messages(emails)
to_send.update(sent=current_time)
@staticmethod
def convert_events_to_emails():
"""
Converts unseen events to emails and marks them as seen.
"""
# Get the email medium
email_medium = get_medium()
# Get the default from email
default_from_email = get_from_email_address()
# Find any unseen events and create unsent email objects
for event, targets in email_medium.events_targets(seen=False, mark_seen=True):
# Check the event's context for a from_address, otherwise fallback to default
from_address = event.context.get('from_address') or default_from_email
# Create the emails
Email.objects.create_email(event=event, from_address=from_address, recipients=targets)
| [
2,
3,
4,
5,
6
] |
559 | 7a1be5c9c48413ba1969631e99ecb45cf15ef613 | <mask token>
| <mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
| <mask token>
class Migration(migrations.Migration):
dependencies = [('Registration', '0015_auto_20150525_1815')]
operations = [migrations.AlterField(model_name='user', name=
'created_date', field=models.DateField(auto_now_add=True)),
migrations.AlterField(model_name='user', name='last_login', field=
models.DateTimeField(null=True, verbose_name='last login', blank=
True)), migrations.AlterField(model_name='user', name=
'modified_date', field=models.DateField(auto_now=True)), migrations
.AlterField(model_name='user_skills', name='percentage', field=
models.PositiveSmallIntegerField(default=0, validators=[django.core
.validators.MinValueValidator(0), django.core.validators.
MaxValueValidator(100)]))]
| from __future__ import unicode_literals
from django.db import migrations, models
import django.core.validators
class Migration(migrations.Migration):
dependencies = [('Registration', '0015_auto_20150525_1815')]
operations = [migrations.AlterField(model_name='user', name=
'created_date', field=models.DateField(auto_now_add=True)),
migrations.AlterField(model_name='user', name='last_login', field=
models.DateTimeField(null=True, verbose_name='last login', blank=
True)), migrations.AlterField(model_name='user', name=
'modified_date', field=models.DateField(auto_now=True)), migrations
.AlterField(model_name='user_skills', name='percentage', field=
models.PositiveSmallIntegerField(default=0, validators=[django.core
.validators.MinValueValidator(0), django.core.validators.
MaxValueValidator(100)]))]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('Registration', '0015_auto_20150525_1815'),
]
operations = [
migrations.AlterField(
model_name='user',
name='created_date',
field=models.DateField(auto_now_add=True),
),
migrations.AlterField(
model_name='user',
name='last_login',
field=models.DateTimeField(null=True, verbose_name='last login', blank=True),
),
migrations.AlterField(
model_name='user',
name='modified_date',
field=models.DateField(auto_now=True),
),
migrations.AlterField(
model_name='user_skills',
name='percentage',
field=models.PositiveSmallIntegerField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(100)]),
),
]
| [
0,
1,
2,
3,
4
] |
560 | 34b23e80b3c4aaf62f31c19fee0b47ace1561a8c | <mask token>
| <mask token>
print(f"""s = {s}
m = {m}
d = {d:.2f}
di = {di}
e = {e}""", end='')
| n1 = 7
n2 = 3
s = n1 + n2
m = n1 * n2
d = n1 / n2
di = n1 // n2
e = n1 ** n2
print(f"""s = {s}
m = {m}
d = {d:.2f}
di = {di}
e = {e}""", end='')
| # cor = input('Escolha uma cor: ')
# print(f"Cor escolhida {cor:=^10}\n"
# f"Cor escolhida {cor:>10}\n"
# f"Cor escolhida {cor:<10}\n")
n1 = 7
n2 = 3
#print(f'Soma {n1+n2}')
s = n1 + n2
m = n1 * n2
d = n1 / n2
di = n1 // n2
e = n1 ** n2
print(f's = {s}\n m = {m}\n d = {d:.2f}\n di = {di}\n e = {e}', end='')
| null | [
0,
1,
2,
3
] |
561 | c52d1c187edb17e85a8e2b47aa6731bc9a41ab1b | <mask token>
| print('Hello Workls!')
| print ("Hello Workls!")
| null | null | [
0,
1,
2
] |
562 | de3a4053b5b0d4d2d5c2dcd317e64cf9b4faeb75 | <mask token>
class Hydra(FlaskTopModel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.config['CLIENT_ID'] = 4
self.config['BASE_IMAGE_URL'
] = 'https://static.pharminfo.fr/images/cip/{cip}/{name}.{ext}'
self.config['SQLALCHEMY_DATABASE_URI'
] = 'pgfdw://hydra@localhost/hydra'
self.config.from_envvar('MEDBOX_SETTINGS', silent=True)
self.configure_db(self.config['SQLALCHEMY_DATABASE_URI'])
<mask token>
@product_api.declare('GET')
def get_product(payload, product_id):
products = Product.query.filter_by(cip=str(product_id)).filter_by(client_id
=app.config['CLIENT_ID']).all()
if products:
return product_api.get(payload, product_id=products[0].product_id)
else:
return {'objects': [], 'occurences': 0}
| <mask token>
class Hydra(FlaskTopModel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.config['CLIENT_ID'] = 4
self.config['BASE_IMAGE_URL'
] = 'https://static.pharminfo.fr/images/cip/{cip}/{name}.{ext}'
self.config['SQLALCHEMY_DATABASE_URI'
] = 'pgfdw://hydra@localhost/hydra'
self.config.from_envvar('MEDBOX_SETTINGS', silent=True)
self.configure_db(self.config['SQLALCHEMY_DATABASE_URI'])
def filter_query(query):
return query.filter_by(client_id=app.config['CLIENT_ID'])
<mask token>
rest(Labo, only=('label',))
<mask token>
@image_api.declare('GET')
def get_image(payload, cip, name, ext):
result = image_api.get(payload, cip=cip)
for obj in getattr(result, 'data', result)['objects']:
obj['name'] = quote(obj['name'])
obj['url'] = app.config['BASE_IMAGE_URL'].format(**obj)
return result
@product_api.declare('GET')
def get_product(payload, product_id):
products = Product.query.filter_by(cip=str(product_id)).filter_by(client_id
=app.config['CLIENT_ID']).all()
if products:
return product_api.get(payload, product_id=products[0].product_id)
else:
return {'objects': [], 'occurences': 0}
| <mask token>
class Hydra(FlaskTopModel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.config['CLIENT_ID'] = 4
self.config['BASE_IMAGE_URL'
] = 'https://static.pharminfo.fr/images/cip/{cip}/{name}.{ext}'
self.config['SQLALCHEMY_DATABASE_URI'
] = 'pgfdw://hydra@localhost/hydra'
self.config.from_envvar('MEDBOX_SETTINGS', silent=True)
self.configure_db(self.config['SQLALCHEMY_DATABASE_URI'])
def filter_query(query):
return query.filter_by(client_id=app.config['CLIENT_ID'])
app = Hydra(__name__)
rest = UnRest(app, db.session)
rest(Labo, only=('label',))
product_api = rest(Product, query=filter_query, only=('product_id', 'title',
'description', 'cip', 'resip_labo_code', 'type_product'))
image_api = rest(ProductPhotoCIP, only=('cip', 'name', 'ext'))
@image_api.declare('GET')
def get_image(payload, cip, name, ext):
result = image_api.get(payload, cip=cip)
for obj in getattr(result, 'data', result)['objects']:
obj['name'] = quote(obj['name'])
obj['url'] = app.config['BASE_IMAGE_URL'].format(**obj)
return result
@product_api.declare('GET')
def get_product(payload, product_id):
products = Product.query.filter_by(cip=str(product_id)).filter_by(client_id
=app.config['CLIENT_ID']).all()
if products:
return product_api.get(payload, product_id=products[0].product_id)
else:
return {'objects': [], 'occurences': 0}
| from urllib.parse import quote
from top_model import db
from top_model.ext.flask import FlaskTopModel
from top_model.filesystem import ProductPhotoCIP
from top_model.webstore import Product, Labo
from unrest import UnRest
class Hydra(FlaskTopModel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.config['CLIENT_ID'] = 4
self.config['BASE_IMAGE_URL'
] = 'https://static.pharminfo.fr/images/cip/{cip}/{name}.{ext}'
self.config['SQLALCHEMY_DATABASE_URI'
] = 'pgfdw://hydra@localhost/hydra'
self.config.from_envvar('MEDBOX_SETTINGS', silent=True)
self.configure_db(self.config['SQLALCHEMY_DATABASE_URI'])
def filter_query(query):
return query.filter_by(client_id=app.config['CLIENT_ID'])
app = Hydra(__name__)
rest = UnRest(app, db.session)
rest(Labo, only=('label',))
product_api = rest(Product, query=filter_query, only=('product_id', 'title',
'description', 'cip', 'resip_labo_code', 'type_product'))
image_api = rest(ProductPhotoCIP, only=('cip', 'name', 'ext'))
@image_api.declare('GET')
def get_image(payload, cip, name, ext):
result = image_api.get(payload, cip=cip)
for obj in getattr(result, 'data', result)['objects']:
obj['name'] = quote(obj['name'])
obj['url'] = app.config['BASE_IMAGE_URL'].format(**obj)
return result
@product_api.declare('GET')
def get_product(payload, product_id):
products = Product.query.filter_by(cip=str(product_id)).filter_by(client_id
=app.config['CLIENT_ID']).all()
if products:
return product_api.get(payload, product_id=products[0].product_id)
else:
return {'objects': [], 'occurences': 0}
| from urllib.parse import quote
from top_model import db
from top_model.ext.flask import FlaskTopModel
from top_model.filesystem import ProductPhotoCIP
from top_model.webstore import Product, Labo
from unrest import UnRest
class Hydra(FlaskTopModel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.config['CLIENT_ID'] = 4
self.config['BASE_IMAGE_URL'] = (
'https://static.pharminfo.fr/images/cip/{cip}/{name}.{ext}')
self.config['SQLALCHEMY_DATABASE_URI'] = (
'pgfdw://hydra@localhost/hydra')
self.config.from_envvar('MEDBOX_SETTINGS', silent=True)
self.configure_db(self.config['SQLALCHEMY_DATABASE_URI'])
def filter_query(query):
return query.filter_by(client_id=app.config['CLIENT_ID'])
app = Hydra(__name__)
rest = UnRest(app, db.session)
rest(Labo, only=('label',))
product_api = rest(Product, query=filter_query, only=(
'product_id', 'title', 'description', 'cip', 'resip_labo_code',
'type_product'))
image_api = rest(ProductPhotoCIP, only=('cip', 'name', 'ext'))
@image_api.declare('GET')
def get_image(payload, cip, name, ext):
result = image_api.get(payload, cip=cip)
for obj in getattr(result, 'data', result)['objects']:
obj['name'] = quote(obj['name'])
obj['url'] = app.config['BASE_IMAGE_URL'].format(**obj)
return result
@product_api.declare('GET')
def get_product(payload, product_id):
products = (
Product.query
.filter_by(cip=str(product_id))
.filter_by(client_id=app.config['CLIENT_ID'])
.all())
if products:
return product_api.get(payload, product_id=products[0].product_id)
else:
return {'objects': [], 'occurences': 0}
| [
3,
6,
7,
8,
9
] |
563 | 5b91b7025b0e574d45f95a0585128018d83c17ea | <mask token>
| <mask token>
def something3():
x = session.query(models.Review).filter(models.Review.time < end_time
).count()
<mask token>
| something1
<mask token>
something2
<mask token>
def something3():
x = session.query(models.Review).filter(models.Review.time < end_time
).count()
something4
<mask token>
something5
| something1
x = session.query(x).filter(y).count()
something2
y = session.query(models.User, models.X).filter(models.User.time >
start_time, models.User.id == user_id).count()
def something3():
x = session.query(models.Review).filter(models.Review.time < end_time
).count()
something4
x = session.query(x, y).filter(bla).count()
x = session.query(x.X, y).filter(y > user_id).count()
x = session.query(x.X, y.Y).filter(x.X == 5).count()
something5
| something1
x = session.query(x).filter(y).count()
something2
y = session.query(
models.User, models.X,
).filter(
models.User.time > start_time,
models.User.id == user_id,
).count()
def something3():
x = session.query(
models.Review,
).filter(
models.Review.time < end_time,
).count()
something4
x = session.query(x, y).filter(bla).count()
x = session.query(x.X, y).filter(y > user_id).count()
x = session.query(
x.X, y.Y
).filter(x.X == 5).count()
something5
| [
0,
1,
2,
3,
4
] |
564 | fe12f6d3408ab115c5c440c5b45a9014cfee6539 | <mask token>
| <mask token>
urlpatterns = [path('', views.home, name='home'), path('category/', include
('api.category.urls')), path('product/', include('api.product.urls')),
path('user/', include('api.user.urls')), path('order/', include(
'api.order.urls')), path('payment/', include('api.payment.urls'))]
| from django.urls import path, include
from . import views
urlpatterns = [path('', views.home, name='home'), path('category/', include
('api.category.urls')), path('product/', include('api.product.urls')),
path('user/', include('api.user.urls')), path('order/', include(
'api.order.urls')), path('payment/', include('api.payment.urls'))]
| from django.urls import path,include
from .import views
urlpatterns = [
path('',views.home,name='home'),
path('category/',include('api.category.urls')),
path('product/',include('api.product.urls')),
path('user/',include('api.user.urls')),
path('order/',include('api.order.urls')),
path('payment/',include('api.payment.urls')),
]
| null | [
0,
1,
2,
3
] |
565 | bb7910af5334641fd2db7146112afaff7a2e42b9 | <mask token>
| <mask token>
if boxChecked == 'true':
heading = 'Recurring Donation'
customerRequest = {'given_name': firstName, 'family_name': lastName,
'email_address': email}
try:
customerResponse = customers_api_instance.create_customer(
customerRequest)
except ApiException as e:
print('customer creation failed')
print(e)
exit()
customer = customerResponse.customer
customerCardRequest = {'card_nonce': nonce}
try:
customerCardResponse = customers_api_instance.create_customer_card(
customer.id, customerCardRequest)
except:
print('customer card creation failed')
exit()
customerCard = customerCardResponse.card
body = {'customer_id': customer.id, 'customer_card_id': customerCard.id,
'idempotency_key': idempotency_key, 'amount_money': amount}
customersList = customers_api_instance.list_customers()
else:
heading = 'One time Donation'
body = {'idempotency_key': idempotency_key, 'card_nonce': nonce,
'amount_money': amount}
try:
api_response = transactions_api_instance.charge(location_id, body)
res = api_response.transaction
except ApiException as e:
res = 'Exception when calling TransactionApi->charge: {}'.format(e)
print('Content-type:text/html\r\n\r\n')
print('<html>')
print('<head>')
print('<title>Square Payment</title>')
print('</head>')
print('<body>')
print('<h2>Result: </h2>')
print('<h2>{}</h2>'.format(heading))
print('<p>{}</p>'.format(res))
if customersList:
print('<h2>Customers stored on File: </h2>')
for customer in customersList.customers:
print('<p>{}</p>'.format(customer))
print('</body>')
print('</html>')
| <mask token>
form = cgi.FieldStorage()
nonce = form.getvalue('nonce')
donation = form.getvalue('amount')
boxChecked = form.getvalue('boxChecked')
firstName = form.getvalue('firstname')
lastName = form.getvalue('lastname')
email = form.getvalue('email')
squareconnect.configuration.access_token = (
'sandbox-sq0atb-kfvpHvEa9Mz2098Nozk1RQ')
location_id = 'CBASEGLb1fOhVH4Uvvi1aY_bOawgAQ'
transactions_api_instance = TransactionsApi()
customers_api_instance = CustomersApi()
idempotency_key = str(uuid.uuid1())
amount = {'amount': int(donation) * 100, 'currency': 'USD'}
customersList = []
if boxChecked == 'true':
heading = 'Recurring Donation'
customerRequest = {'given_name': firstName, 'family_name': lastName,
'email_address': email}
try:
customerResponse = customers_api_instance.create_customer(
customerRequest)
except ApiException as e:
print('customer creation failed')
print(e)
exit()
customer = customerResponse.customer
customerCardRequest = {'card_nonce': nonce}
try:
customerCardResponse = customers_api_instance.create_customer_card(
customer.id, customerCardRequest)
except:
print('customer card creation failed')
exit()
customerCard = customerCardResponse.card
body = {'customer_id': customer.id, 'customer_card_id': customerCard.id,
'idempotency_key': idempotency_key, 'amount_money': amount}
customersList = customers_api_instance.list_customers()
else:
heading = 'One time Donation'
body = {'idempotency_key': idempotency_key, 'card_nonce': nonce,
'amount_money': amount}
try:
api_response = transactions_api_instance.charge(location_id, body)
res = api_response.transaction
except ApiException as e:
res = 'Exception when calling TransactionApi->charge: {}'.format(e)
print('Content-type:text/html\r\n\r\n')
print('<html>')
print('<head>')
print('<title>Square Payment</title>')
print('</head>')
print('<body>')
print('<h2>Result: </h2>')
print('<h2>{}</h2>'.format(heading))
print('<p>{}</p>'.format(res))
if customersList:
print('<h2>Customers stored on File: </h2>')
for customer in customersList.customers:
print('<p>{}</p>'.format(customer))
print('</body>')
print('</html>')
| from __future__ import print_function
import uuid
import cgi
import squareconnect
from squareconnect.rest import ApiException
from squareconnect.apis.transactions_api import TransactionsApi
from squareconnect.apis.locations_api import LocationsApi
from squareconnect.apis.customers_api import CustomersApi
form = cgi.FieldStorage()
nonce = form.getvalue('nonce')
donation = form.getvalue('amount')
boxChecked = form.getvalue('boxChecked')
firstName = form.getvalue('firstname')
lastName = form.getvalue('lastname')
email = form.getvalue('email')
squareconnect.configuration.access_token = (
'sandbox-sq0atb-kfvpHvEa9Mz2098Nozk1RQ')
location_id = 'CBASEGLb1fOhVH4Uvvi1aY_bOawgAQ'
transactions_api_instance = TransactionsApi()
customers_api_instance = CustomersApi()
idempotency_key = str(uuid.uuid1())
amount = {'amount': int(donation) * 100, 'currency': 'USD'}
customersList = []
if boxChecked == 'true':
heading = 'Recurring Donation'
customerRequest = {'given_name': firstName, 'family_name': lastName,
'email_address': email}
try:
customerResponse = customers_api_instance.create_customer(
customerRequest)
except ApiException as e:
print('customer creation failed')
print(e)
exit()
customer = customerResponse.customer
customerCardRequest = {'card_nonce': nonce}
try:
customerCardResponse = customers_api_instance.create_customer_card(
customer.id, customerCardRequest)
except:
print('customer card creation failed')
exit()
customerCard = customerCardResponse.card
body = {'customer_id': customer.id, 'customer_card_id': customerCard.id,
'idempotency_key': idempotency_key, 'amount_money': amount}
customersList = customers_api_instance.list_customers()
else:
heading = 'One time Donation'
body = {'idempotency_key': idempotency_key, 'card_nonce': nonce,
'amount_money': amount}
try:
api_response = transactions_api_instance.charge(location_id, body)
res = api_response.transaction
except ApiException as e:
res = 'Exception when calling TransactionApi->charge: {}'.format(e)
print('Content-type:text/html\r\n\r\n')
print('<html>')
print('<head>')
print('<title>Square Payment</title>')
print('</head>')
print('<body>')
print('<h2>Result: </h2>')
print('<h2>{}</h2>'.format(heading))
print('<p>{}</p>'.format(res))
if customersList:
print('<h2>Customers stored on File: </h2>')
for customer in customersList.customers:
print('<p>{}</p>'.format(customer))
print('</body>')
print('</html>')
| #!/usr/bin/env python
# coding: utf-8
from __future__ import print_function
import uuid
import cgi
import squareconnect
from squareconnect.rest import ApiException
from squareconnect.apis.transactions_api import TransactionsApi
from squareconnect.apis.locations_api import LocationsApi
from squareconnect.apis.customers_api import CustomersApi
# Create instance of FieldStorage
form = cgi.FieldStorage()
# Get data from fields
nonce = form.getvalue('nonce')
# Get amount data
donation = form.getvalue('amount')
boxChecked = form.getvalue('boxChecked')
firstName = form.getvalue('firstname')
lastName = form.getvalue('lastname')
email = form.getvalue('email')
# The access token to use in all Connect API requests. Use your *sandbox* access
# token if you're just testing things out.
squareconnect.configuration.access_token = 'sandbox-sq0atb-kfvpHvEa9Mz2098Nozk1RQ'
# The ID of the business location to associate processed payments with.
# See [Retrieve your business's locations]
# (https://docs.connect.squareup.com/articles/getting-started/#retrievemerchantprofile)
# for an easy way to get your business's location IDs.
# If you're testing things out, use a sandbox location ID.
location_id = 'CBASEGLb1fOhVH4Uvvi1aY_bOawgAQ'
transactions_api_instance = TransactionsApi()
customers_api_instance = CustomersApi()
# Every payment you process with the SDK must have a unique idempotency key.
# If you're unsure whether a particular payment succeeded, you can reattempt
# it with the same idempotency key without worrying about double charging
# the buyer.
idempotency_key = str(uuid.uuid1())
# Monetary amounts are specified in the smallest unit of the applicable currency.
# This amount is in cents. It's also hard-coded for $1.00, which isn't very useful.
amount = {'amount': int(donation) * 100, 'currency': 'USD'}
customersList = []
# Add a customer to file
if boxChecked == "true":
heading = "Recurring Donation"
customerRequest = {'given_name': firstName, 'family_name': lastName, 'email_address': email}
try:
customerResponse = customers_api_instance.create_customer(customerRequest)
except ApiException as e:
print ("customer creation failed")
print (e)
exit()
customer = customerResponse.customer
customerCardRequest = {'card_nonce': nonce}
try:
customerCardResponse = customers_api_instance.create_customer_card(customer.id, customerCardRequest)
except:
print ("customer card creation failed")
exit()
customerCard = customerCardResponse.card
body = {'customer_id': customer.id, 'customer_card_id': customerCard.id, 'idempotency_key': idempotency_key, 'amount_money': amount}
customersList = customers_api_instance.list_customers()
else:
# To learn more about splitting transactions with additional recipients,
# see the Transactions API documentation on our [developer site]
# (https://docs.connect.squareup.com/payments/transactions/overview#mpt-overview).
heading = "One time Donation"
body = {'idempotency_key': idempotency_key, 'card_nonce': nonce, 'amount_money': amount}
# customersList = Non
# The SDK throws an exception if a Connect endpoint responds with anything besides
# a 200-level HTTP code. This block catches any exceptions that occur from the request.
try:
api_response = transactions_api_instance.charge(location_id, body)
res = api_response.transaction
except ApiException as e:
res = "Exception when calling TransactionApi->charge: {}".format(e)
# Display the result
print ('Content-type:text/html\r\n\r\n')
print ('<html>')
print ('<head>')
print ('<title>Square Payment</title>')
print ('</head>')
print ('<body>')
print ('<h2>Result: </h2>')
print( '<h2>{}</h2>'.format(heading))
print ('<p>{}</p>'.format(res))
if customersList:
print( '<h2>Customers stored on File: </h2>')
for customer in customersList.customers:
print ('<p>{}</p>'.format(customer))
print ('</body>')
print ('</html>')
| [
0,
1,
2,
3,
4
] |
566 | ea1d62c4a8c406dde9bb138ee045be5e682fdbfe | class Wspak:
<mask token>
def __init__(self, data):
self.data = data
self.index = -2
self.i = len(data) - 1
<mask token>
<mask token>
<mask token>
| class Wspak:
"""Iterator zwracający wartości w odwróconym porządku"""
def __init__(self, data):
self.data = data
self.index = -2
self.i = len(data) - 1
def __iter__(self):
return self
def __next__(self):
if self.index >= self.i:
raise StopIteration
self.index = self.index + 2
return self.data[self.index]
<mask token>
| class Wspak:
"""Iterator zwracający wartości w odwróconym porządku"""
def __init__(self, data):
self.data = data
self.index = -2
self.i = len(data) - 1
def __iter__(self):
return self
def __next__(self):
if self.index >= self.i:
raise StopIteration
self.index = self.index + 2
return self.data[self.index]
<mask token>
for x in Wspak(g):
print(x)
print(d)
| class Wspak:
"""Iterator zwracający wartości w odwróconym porządku"""
def __init__(self, data):
self.data = data
self.index = -2
self.i = len(data) - 1
def __iter__(self):
return self
def __next__(self):
if self.index >= self.i:
raise StopIteration
self.index = self.index + 2
return self.data[self.index]
d = ['sdasda', 'sdasdasd', 'sdsad232', 'dasda', 'dsada']
g = 2, 3, 4, 6, 7
d = [x for x in Wspak(d)]
for x in Wspak(g):
print(x)
print(d)
| class Wspak:
"""Iterator zwracający wartości w odwróconym porządku"""
def __init__(self, data):
self.data = data
self.index = -2
self.i=len(data)-1
def __iter__(self):
return self
def __next__(self):
if self.index >= self.i:
raise StopIteration
self.index = self.index+2
return self.data[self.index]
d=(["sdasda","sdasdasd","sdsad232","dasda","dsada"])
g=(2,3,4,6,7)
d = [x for x in Wspak(d)]
for x in Wspak(g):
print(x)
print(d) | [
2,
5,
6,
7,
8
] |
567 | ec64ddd01034debadb6674e71125f673f5de8367 | <mask token>
| <mask token>
def test_score1() ->None:
package = {'close': 13.92, 'high': 14.57, 'low': 12.45, 'open': 13.4584,
'symbol': 'FANG', 'timestamp': 1627493640000000000, 'trade_count':
602, 'volume': 213907, 'vwap': 8.510506}
app = StudyThreeBarsScore()
newPrice = 13.7
realtime = []
symbol = 'FANG'
stack = {'symbol': symbol, 'value': {'firstPrice': 13.5, 'secondPrice':
14.0, 'thirdPrice': 13.0, 'timeframe': RedisTimeFrame.MIN2}}
score1 = app.ThreeBarPlay(13.6, [], stack)
assert score1 == 4
score2 = app.ThreeBarPlay(13.4, [], stack)
assert score2 == 2
| from redis3barScore import StudyThreeBarsScore
from redisUtil import RedisTimeFrame
def test_score1() ->None:
package = {'close': 13.92, 'high': 14.57, 'low': 12.45, 'open': 13.4584,
'symbol': 'FANG', 'timestamp': 1627493640000000000, 'trade_count':
602, 'volume': 213907, 'vwap': 8.510506}
app = StudyThreeBarsScore()
newPrice = 13.7
realtime = []
symbol = 'FANG'
stack = {'symbol': symbol, 'value': {'firstPrice': 13.5, 'secondPrice':
14.0, 'thirdPrice': 13.0, 'timeframe': RedisTimeFrame.MIN2}}
score1 = app.ThreeBarPlay(13.6, [], stack)
assert score1 == 4
score2 = app.ThreeBarPlay(13.4, [], stack)
assert score2 == 2
| from redis3barScore import StudyThreeBarsScore
from redisUtil import RedisTimeFrame
def test_score1() -> None:
package = {'close': 13.92,
'high': 14.57,
'low': 12.45,
'open': 13.4584,
'symbol': 'FANG',
'timestamp': 1627493640000000000,
'trade_count': 602,
'volume': 213907,
'vwap': 8.510506}
app = StudyThreeBarsScore()
newPrice = 13.70
realtime = []
symbol = "FANG"
stack = {'symbol': symbol, 'value': {
'firstPrice': 13.50,
'secondPrice': 14.00,
'thirdPrice': 13.00,
'timeframe': RedisTimeFrame.MIN2
}}
score1 = app.ThreeBarPlay(13.60, [], stack)
assert score1 == 4
score2 = app.ThreeBarPlay(13.40, [], stack)
assert score2 == 2
| null | [
0,
1,
2,
3
] |
568 | 235bb1b9d4c41c12d7667a6bac48737464c685c7 | <mask token>
def menu(x):
""" Takes a list as argument and displays as a menu """
for i in range(len(x)):
print('{0:>4s} {1:<3s}{2:^5s}{3:<15}'.format(str(i + 1) + ')', x[i]
[1], '-->', x[i][0]))
<mask token>
def secondary_message(t, unit):
""" A message for the secondary switch """
print(Fore.CYAN + '\n Select the unit you would you like to convert ' +
str(t) + '°' + unit + ' into:\n' + Fore.RESET)
menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [
'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',
'°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Back\n' + Fore.RESET, '']])
<mask token>
def main():
"""" This is the main function """
while True:
primary_message()
x = choice(9)
z = tempConv
if x == 1:
t = value_check('C', tempConv.cel_ran)
secondary_message(t, 'C')
y = choice(9, 1)
while True:
if y == 2:
t2 = z.cel_fah(t)
result_message(t, t2, 'C', 'F')
break
elif y == 3:
t2 = z.cel_kel(t)
result_message(t, t2, 'C', 'K')
break
elif y == 4:
t2 = z.cel_ran(t)
result_message(t, t2, 'C', 'R')
break
elif y == 5:
t2 = z.cel_del(t)
result_message(t, t2, 'C', 'De')
break
elif y == 6:
t2 = z.cel_new(t)
result_message(t, t2, 'C', 'N')
break
elif y == 7:
t2 = z.cel_rea(t)
result_message(t, t2, 'C', 'Ré')
break
elif y == 8:
t2 = z.cel_rom(t)
result_message(t, t2, 'C', 'Rø')
break
elif y == 9:
break
elif x == 2:
t = value_check('F', tempConv.fah_ran)
secondary_message(t, 'F')
y = choice(9, 2)
while True:
if y == 1:
t2 = z.fah_cel(t)
result_message(t, t2, 'F', 'C')
break
elif y == 3:
t2 = z.fah_kel(t)
result_message(t, t2, 'F', 'K')
break
elif y == 4:
t2 = z.fah_ran(t)
result_message(t, t2, 'F', 'R')
break
elif y == 5:
t2 = z.fah_del(t)
result_message(t, t2, 'F', 'De')
break
elif y == 6:
t2 = z.fah_new(t)
result_message(t, t2, 'F', 'N')
break
elif y == 7:
t2 = z.fah_rea(t)
result_message(t, t2, 'F', 'Ré')
break
elif y == 8:
t2 = z.fah_rom(t)
result_message(t, t2, 'F', 'Rø')
break
elif y == 9:
break
elif x == 3:
t = value_check('K', tempConv.kel_ran)
secondary_message(t, 'K')
y = choice(9, 3)
while True:
if y == 1:
t2 = z.kel_cel(t)
result_message(t, t2, 'K', 'C')
break
elif y == 2:
t2 = z.kel_fah(t)
result_message(t, t2, 'K', 'F')
break
elif y == 4:
t2 = z.kel_ran(t)
result_message(t, t2, 'K', 'R')
break
elif y == 5:
t2 = z.kel_del(t)
result_message(t, t2, 'K', 'De')
break
elif y == 6:
t2 = z.kel_new(t)
result_message(t, t2, 'K', 'N')
break
elif y == 7:
t2 = z.kel_rea(t)
result_message(t, t2, 'K', 'Ré')
break
elif y == 8:
t2 = z.kel_rom(t)
result_message(t, t2, 'K', 'Rø')
break
elif y == 9:
break
elif x == 4:
t = value_check('R', tempConv.ran_rea)
secondary_message(t, 'R')
y = choice(9, 4)
while True:
if y == 1:
t2 = z.ran_cel(t)
result_message(t, t2, 'R', 'C')
break
elif y == 2:
t2 = z.ran_fah(t)
result_message(t, t2, 'R', 'F')
break
elif y == 3:
t2 = z.ran_kel(t)
result_message(t, t2, 'R', 'K')
break
elif y == 5:
t2 = z.ran_del(t)
result_message(t, t2, 'R', 'De')
break
elif y == 6:
t2 = z.ran_new(t)
result_message(t, t2, 'R', 'N')
break
elif y == 7:
t2 = z.ran_rea(t)
result_message(t, t2, 'R', 'Ré')
break
elif y == 8:
t2 = z.ran_rom(t)
result_message(t, t2, 'R', 'Rø')
break
elif y == 9:
break
elif x == 5:
t = value_check('De', tempConv.del_ran)
secondary_message(t, 'De')
y = choice(9, 5)
while True:
if y == 1:
t2 = z.del_cel(t)
result_message(t, t2, 'De', 'C')
break
elif y == 2:
t2 = z.del_fah(t)
result_message(t, t2, 'De', 'F')
break
elif y == 3:
t2 = z.del_kel(t)
result_message(t, t2, 'De', 'K')
break
elif y == 4:
t2 = z.del_ran(t)
result_message(t, t2, 'De', 'R')
break
elif y == 6:
t2 = z.del_new(t)
result_message(t, t2, 'De', 'N')
break
elif y == 7:
t2 = z.del_rea(t)
result_message(t, t2, 'De', 'Ré')
break
elif y == 8:
t2 = z.del_rom(t)
result_message(t, t2, 'De', 'Rø')
break
elif y == 9:
break
elif x == 6:
t = value_check('N', tempConv.new_ran)
secondary_message(t, 'N')
y = choice(9, 6)
while True:
if y == 1:
t2 = z.new_cel(t)
result_message(t, t2, 'N', 'C')
break
elif y == 2:
t2 = z.new_fah(t)
result_message(t, t2, 'N', 'F')
break
elif y == 3:
t2 = z.new_kel(t)
result_message(t, t2, 'N', 'K')
break
elif y == 4:
t2 = z.new_ran(t)
result_message(t, t2, 'N', 'R')
break
elif y == 5:
t2 = z.new_del(t)
result_message(t, t2, 'N', 'N')
break
elif y == 7:
t2 = z.new_rea(t)
result_message(t, t2, 'N', 'Ré')
break
elif y == 8:
t2 = z.new_rom(t)
result_message(t, t2, 'N', 'Rø')
break
elif y == 9:
break
elif x == 7:
t = value_check('Ré', tempConv.rea_ran)
secondary_message(t, 'Ré')
y = choice(9, 7)
while True:
if y == 1:
t2 = z.rea_cel(t)
result_message(t, t2, 'Ré', 'C')
break
elif y == 2:
t2 = z.rea_fah(t)
result_message(t, t2, 'Ré', 'F')
break
elif y == 3:
t2 = z.rea_kel(t)
result_message(t, t2, 'Ré', 'K')
break
elif y == 4:
t2 = z.rea_ran(t)
result_message(t, t2, 'Ré', 'R')
break
elif y == 5:
t2 = z.rea_del(t)
result_message(t, t2, 'Ré', 'De')
break
elif y == 6:
t2 = z.rea_new(t)
result_message(t, t2, 'Ré', 'N')
break
elif y == 8:
t2 = z.rea_rom(t)
result_message(t, t2, 'Ré', 'Rø')
break
elif y == 9:
break
elif x == 8:
t = value_check('Rø', tempConv.rom_ran)
secondary_message(t, 'Rø')
y = choice(9, 8)
while True:
if y == 1:
t2 = z.rom_cel(t)
result_message(t, t2, 'Rø', 'C')
break
elif y == 2:
t2 = z.rom_fah(t)
result_message(t, t2, 'Rø', 'F')
break
elif y == 3:
t2 = z.rom_kel(t)
result_message(t, t2, 'Rø', 'K')
break
elif y == 4:
t2 = z.rom_ran(t)
result_message(t, t2, 'Rø', 'R')
break
elif y == 5:
t2 = z.rom_del(t)
result_message(t, t2, 'Rø', 'De')
break
elif y == 6:
t2 = z.rom_new(t)
result_message(t, t2, 'Rø', 'N')
break
elif y == 7:
t2 = z.rom_rea(t)
result_message(t, t2, 'Rø', 'Ré')
break
elif y == 9:
break
elif x == 9:
print(Fore.CYAN + '\n Goodbye!' + Fore.RESET)
i = 0
break
<mask token>
| <mask token>
def menu(x):
""" Takes a list as argument and displays as a menu """
for i in range(len(x)):
print('{0:>4s} {1:<3s}{2:^5s}{3:<15}'.format(str(i + 1) + ')', x[i]
[1], '-->', x[i][0]))
<mask token>
def secondary_message(t, unit):
""" A message for the secondary switch """
print(Fore.CYAN + '\n Select the unit you would you like to convert ' +
str(t) + '°' + unit + ' into:\n' + Fore.RESET)
menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [
'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',
'°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Back\n' + Fore.RESET, '']])
def result_message(t, t2, unit, unit2):
from os import system
""" Prints the result to the screen """
print(Fore.GREEN + '\n ' + str(round(t, 2)) + '°' + unit + Fore.YELLOW +
' --> ' + Fore.GREEN + Style.BRIGHT + str(round(t2, 2)) + '°' +
unit2 + '\n' + Style.RESET_ALL)
print(system('pause'))
def choice(x, y=0):
""" Checks user input """
while True:
try:
choice = int(input())
if choice <= x and choice > 0 and choice != y:
return choice
break
elif choice == y:
print(Fore.RED + "\n Can't convert to the same unit!\n" +
Fore.RESET)
else:
print(Fore.RED + '\n Invalid choice!\n' + Fore.RESET)
except ValueError:
print(Fore.RED + '\n Invalid input!\n' + Fore.RESET)
<mask token>
def main():
"""" This is the main function """
while True:
primary_message()
x = choice(9)
z = tempConv
if x == 1:
t = value_check('C', tempConv.cel_ran)
secondary_message(t, 'C')
y = choice(9, 1)
while True:
if y == 2:
t2 = z.cel_fah(t)
result_message(t, t2, 'C', 'F')
break
elif y == 3:
t2 = z.cel_kel(t)
result_message(t, t2, 'C', 'K')
break
elif y == 4:
t2 = z.cel_ran(t)
result_message(t, t2, 'C', 'R')
break
elif y == 5:
t2 = z.cel_del(t)
result_message(t, t2, 'C', 'De')
break
elif y == 6:
t2 = z.cel_new(t)
result_message(t, t2, 'C', 'N')
break
elif y == 7:
t2 = z.cel_rea(t)
result_message(t, t2, 'C', 'Ré')
break
elif y == 8:
t2 = z.cel_rom(t)
result_message(t, t2, 'C', 'Rø')
break
elif y == 9:
break
elif x == 2:
t = value_check('F', tempConv.fah_ran)
secondary_message(t, 'F')
y = choice(9, 2)
while True:
if y == 1:
t2 = z.fah_cel(t)
result_message(t, t2, 'F', 'C')
break
elif y == 3:
t2 = z.fah_kel(t)
result_message(t, t2, 'F', 'K')
break
elif y == 4:
t2 = z.fah_ran(t)
result_message(t, t2, 'F', 'R')
break
elif y == 5:
t2 = z.fah_del(t)
result_message(t, t2, 'F', 'De')
break
elif y == 6:
t2 = z.fah_new(t)
result_message(t, t2, 'F', 'N')
break
elif y == 7:
t2 = z.fah_rea(t)
result_message(t, t2, 'F', 'Ré')
break
elif y == 8:
t2 = z.fah_rom(t)
result_message(t, t2, 'F', 'Rø')
break
elif y == 9:
break
elif x == 3:
t = value_check('K', tempConv.kel_ran)
secondary_message(t, 'K')
y = choice(9, 3)
while True:
if y == 1:
t2 = z.kel_cel(t)
result_message(t, t2, 'K', 'C')
break
elif y == 2:
t2 = z.kel_fah(t)
result_message(t, t2, 'K', 'F')
break
elif y == 4:
t2 = z.kel_ran(t)
result_message(t, t2, 'K', 'R')
break
elif y == 5:
t2 = z.kel_del(t)
result_message(t, t2, 'K', 'De')
break
elif y == 6:
t2 = z.kel_new(t)
result_message(t, t2, 'K', 'N')
break
elif y == 7:
t2 = z.kel_rea(t)
result_message(t, t2, 'K', 'Ré')
break
elif y == 8:
t2 = z.kel_rom(t)
result_message(t, t2, 'K', 'Rø')
break
elif y == 9:
break
elif x == 4:
t = value_check('R', tempConv.ran_rea)
secondary_message(t, 'R')
y = choice(9, 4)
while True:
if y == 1:
t2 = z.ran_cel(t)
result_message(t, t2, 'R', 'C')
break
elif y == 2:
t2 = z.ran_fah(t)
result_message(t, t2, 'R', 'F')
break
elif y == 3:
t2 = z.ran_kel(t)
result_message(t, t2, 'R', 'K')
break
elif y == 5:
t2 = z.ran_del(t)
result_message(t, t2, 'R', 'De')
break
elif y == 6:
t2 = z.ran_new(t)
result_message(t, t2, 'R', 'N')
break
elif y == 7:
t2 = z.ran_rea(t)
result_message(t, t2, 'R', 'Ré')
break
elif y == 8:
t2 = z.ran_rom(t)
result_message(t, t2, 'R', 'Rø')
break
elif y == 9:
break
elif x == 5:
t = value_check('De', tempConv.del_ran)
secondary_message(t, 'De')
y = choice(9, 5)
while True:
if y == 1:
t2 = z.del_cel(t)
result_message(t, t2, 'De', 'C')
break
elif y == 2:
t2 = z.del_fah(t)
result_message(t, t2, 'De', 'F')
break
elif y == 3:
t2 = z.del_kel(t)
result_message(t, t2, 'De', 'K')
break
elif y == 4:
t2 = z.del_ran(t)
result_message(t, t2, 'De', 'R')
break
elif y == 6:
t2 = z.del_new(t)
result_message(t, t2, 'De', 'N')
break
elif y == 7:
t2 = z.del_rea(t)
result_message(t, t2, 'De', 'Ré')
break
elif y == 8:
t2 = z.del_rom(t)
result_message(t, t2, 'De', 'Rø')
break
elif y == 9:
break
elif x == 6:
t = value_check('N', tempConv.new_ran)
secondary_message(t, 'N')
y = choice(9, 6)
while True:
if y == 1:
t2 = z.new_cel(t)
result_message(t, t2, 'N', 'C')
break
elif y == 2:
t2 = z.new_fah(t)
result_message(t, t2, 'N', 'F')
break
elif y == 3:
t2 = z.new_kel(t)
result_message(t, t2, 'N', 'K')
break
elif y == 4:
t2 = z.new_ran(t)
result_message(t, t2, 'N', 'R')
break
elif y == 5:
t2 = z.new_del(t)
result_message(t, t2, 'N', 'N')
break
elif y == 7:
t2 = z.new_rea(t)
result_message(t, t2, 'N', 'Ré')
break
elif y == 8:
t2 = z.new_rom(t)
result_message(t, t2, 'N', 'Rø')
break
elif y == 9:
break
elif x == 7:
t = value_check('Ré', tempConv.rea_ran)
secondary_message(t, 'Ré')
y = choice(9, 7)
while True:
if y == 1:
t2 = z.rea_cel(t)
result_message(t, t2, 'Ré', 'C')
break
elif y == 2:
t2 = z.rea_fah(t)
result_message(t, t2, 'Ré', 'F')
break
elif y == 3:
t2 = z.rea_kel(t)
result_message(t, t2, 'Ré', 'K')
break
elif y == 4:
t2 = z.rea_ran(t)
result_message(t, t2, 'Ré', 'R')
break
elif y == 5:
t2 = z.rea_del(t)
result_message(t, t2, 'Ré', 'De')
break
elif y == 6:
t2 = z.rea_new(t)
result_message(t, t2, 'Ré', 'N')
break
elif y == 8:
t2 = z.rea_rom(t)
result_message(t, t2, 'Ré', 'Rø')
break
elif y == 9:
break
elif x == 8:
t = value_check('Rø', tempConv.rom_ran)
secondary_message(t, 'Rø')
y = choice(9, 8)
while True:
if y == 1:
t2 = z.rom_cel(t)
result_message(t, t2, 'Rø', 'C')
break
elif y == 2:
t2 = z.rom_fah(t)
result_message(t, t2, 'Rø', 'F')
break
elif y == 3:
t2 = z.rom_kel(t)
result_message(t, t2, 'Rø', 'K')
break
elif y == 4:
t2 = z.rom_ran(t)
result_message(t, t2, 'Rø', 'R')
break
elif y == 5:
t2 = z.rom_del(t)
result_message(t, t2, 'Rø', 'De')
break
elif y == 6:
t2 = z.rom_new(t)
result_message(t, t2, 'Rø', 'N')
break
elif y == 7:
t2 = z.rom_rea(t)
result_message(t, t2, 'Rø', 'Ré')
break
elif y == 9:
break
elif x == 9:
print(Fore.CYAN + '\n Goodbye!' + Fore.RESET)
i = 0
break
<mask token>
| <mask token>
def menu(x):
""" Takes a list as argument and displays as a menu """
for i in range(len(x)):
print('{0:>4s} {1:<3s}{2:^5s}{3:<15}'.format(str(i + 1) + ')', x[i]
[1], '-->', x[i][0]))
def primary_message():
""" A message for the main switch """
print(Fore.CYAN + '\n Select the unit you want to convert from:\n' +
Fore.RESET)
menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [
'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',
'°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Exit\n' + Fore.RESET, '']])
def secondary_message(t, unit):
""" A message for the secondary switch """
print(Fore.CYAN + '\n Select the unit you would you like to convert ' +
str(t) + '°' + unit + ' into:\n' + Fore.RESET)
menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [
'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',
'°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Back\n' + Fore.RESET, '']])
def result_message(t, t2, unit, unit2):
from os import system
""" Prints the result to the screen """
print(Fore.GREEN + '\n ' + str(round(t, 2)) + '°' + unit + Fore.YELLOW +
' --> ' + Fore.GREEN + Style.BRIGHT + str(round(t2, 2)) + '°' +
unit2 + '\n' + Style.RESET_ALL)
print(system('pause'))
def choice(x, y=0):
""" Checks user input """
while True:
try:
choice = int(input())
if choice <= x and choice > 0 and choice != y:
return choice
break
elif choice == y:
print(Fore.RED + "\n Can't convert to the same unit!\n" +
Fore.RESET)
else:
print(Fore.RED + '\n Invalid choice!\n' + Fore.RESET)
except ValueError:
print(Fore.RED + '\n Invalid input!\n' + Fore.RESET)
def value_input(unit):
""" Asks user for temp. value, then checks it. """
print(Fore.CYAN + '\n Enter the temperature in °' + unit + ':\n' + Fore
.RESET)
while True:
try:
value = float(input())
return value
break
except ValueError:
print(Fore.RED + '\n Input must be an integer!\n' + Fore.RESET)
<mask token>
def main():
"""" This is the main function """
while True:
primary_message()
x = choice(9)
z = tempConv
if x == 1:
t = value_check('C', tempConv.cel_ran)
secondary_message(t, 'C')
y = choice(9, 1)
while True:
if y == 2:
t2 = z.cel_fah(t)
result_message(t, t2, 'C', 'F')
break
elif y == 3:
t2 = z.cel_kel(t)
result_message(t, t2, 'C', 'K')
break
elif y == 4:
t2 = z.cel_ran(t)
result_message(t, t2, 'C', 'R')
break
elif y == 5:
t2 = z.cel_del(t)
result_message(t, t2, 'C', 'De')
break
elif y == 6:
t2 = z.cel_new(t)
result_message(t, t2, 'C', 'N')
break
elif y == 7:
t2 = z.cel_rea(t)
result_message(t, t2, 'C', 'Ré')
break
elif y == 8:
t2 = z.cel_rom(t)
result_message(t, t2, 'C', 'Rø')
break
elif y == 9:
break
elif x == 2:
t = value_check('F', tempConv.fah_ran)
secondary_message(t, 'F')
y = choice(9, 2)
while True:
if y == 1:
t2 = z.fah_cel(t)
result_message(t, t2, 'F', 'C')
break
elif y == 3:
t2 = z.fah_kel(t)
result_message(t, t2, 'F', 'K')
break
elif y == 4:
t2 = z.fah_ran(t)
result_message(t, t2, 'F', 'R')
break
elif y == 5:
t2 = z.fah_del(t)
result_message(t, t2, 'F', 'De')
break
elif y == 6:
t2 = z.fah_new(t)
result_message(t, t2, 'F', 'N')
break
elif y == 7:
t2 = z.fah_rea(t)
result_message(t, t2, 'F', 'Ré')
break
elif y == 8:
t2 = z.fah_rom(t)
result_message(t, t2, 'F', 'Rø')
break
elif y == 9:
break
elif x == 3:
t = value_check('K', tempConv.kel_ran)
secondary_message(t, 'K')
y = choice(9, 3)
while True:
if y == 1:
t2 = z.kel_cel(t)
result_message(t, t2, 'K', 'C')
break
elif y == 2:
t2 = z.kel_fah(t)
result_message(t, t2, 'K', 'F')
break
elif y == 4:
t2 = z.kel_ran(t)
result_message(t, t2, 'K', 'R')
break
elif y == 5:
t2 = z.kel_del(t)
result_message(t, t2, 'K', 'De')
break
elif y == 6:
t2 = z.kel_new(t)
result_message(t, t2, 'K', 'N')
break
elif y == 7:
t2 = z.kel_rea(t)
result_message(t, t2, 'K', 'Ré')
break
elif y == 8:
t2 = z.kel_rom(t)
result_message(t, t2, 'K', 'Rø')
break
elif y == 9:
break
elif x == 4:
t = value_check('R', tempConv.ran_rea)
secondary_message(t, 'R')
y = choice(9, 4)
while True:
if y == 1:
t2 = z.ran_cel(t)
result_message(t, t2, 'R', 'C')
break
elif y == 2:
t2 = z.ran_fah(t)
result_message(t, t2, 'R', 'F')
break
elif y == 3:
t2 = z.ran_kel(t)
result_message(t, t2, 'R', 'K')
break
elif y == 5:
t2 = z.ran_del(t)
result_message(t, t2, 'R', 'De')
break
elif y == 6:
t2 = z.ran_new(t)
result_message(t, t2, 'R', 'N')
break
elif y == 7:
t2 = z.ran_rea(t)
result_message(t, t2, 'R', 'Ré')
break
elif y == 8:
t2 = z.ran_rom(t)
result_message(t, t2, 'R', 'Rø')
break
elif y == 9:
break
elif x == 5:
t = value_check('De', tempConv.del_ran)
secondary_message(t, 'De')
y = choice(9, 5)
while True:
if y == 1:
t2 = z.del_cel(t)
result_message(t, t2, 'De', 'C')
break
elif y == 2:
t2 = z.del_fah(t)
result_message(t, t2, 'De', 'F')
break
elif y == 3:
t2 = z.del_kel(t)
result_message(t, t2, 'De', 'K')
break
elif y == 4:
t2 = z.del_ran(t)
result_message(t, t2, 'De', 'R')
break
elif y == 6:
t2 = z.del_new(t)
result_message(t, t2, 'De', 'N')
break
elif y == 7:
t2 = z.del_rea(t)
result_message(t, t2, 'De', 'Ré')
break
elif y == 8:
t2 = z.del_rom(t)
result_message(t, t2, 'De', 'Rø')
break
elif y == 9:
break
elif x == 6:
t = value_check('N', tempConv.new_ran)
secondary_message(t, 'N')
y = choice(9, 6)
while True:
if y == 1:
t2 = z.new_cel(t)
result_message(t, t2, 'N', 'C')
break
elif y == 2:
t2 = z.new_fah(t)
result_message(t, t2, 'N', 'F')
break
elif y == 3:
t2 = z.new_kel(t)
result_message(t, t2, 'N', 'K')
break
elif y == 4:
t2 = z.new_ran(t)
result_message(t, t2, 'N', 'R')
break
elif y == 5:
t2 = z.new_del(t)
result_message(t, t2, 'N', 'N')
break
elif y == 7:
t2 = z.new_rea(t)
result_message(t, t2, 'N', 'Ré')
break
elif y == 8:
t2 = z.new_rom(t)
result_message(t, t2, 'N', 'Rø')
break
elif y == 9:
break
elif x == 7:
t = value_check('Ré', tempConv.rea_ran)
secondary_message(t, 'Ré')
y = choice(9, 7)
while True:
if y == 1:
t2 = z.rea_cel(t)
result_message(t, t2, 'Ré', 'C')
break
elif y == 2:
t2 = z.rea_fah(t)
result_message(t, t2, 'Ré', 'F')
break
elif y == 3:
t2 = z.rea_kel(t)
result_message(t, t2, 'Ré', 'K')
break
elif y == 4:
t2 = z.rea_ran(t)
result_message(t, t2, 'Ré', 'R')
break
elif y == 5:
t2 = z.rea_del(t)
result_message(t, t2, 'Ré', 'De')
break
elif y == 6:
t2 = z.rea_new(t)
result_message(t, t2, 'Ré', 'N')
break
elif y == 8:
t2 = z.rea_rom(t)
result_message(t, t2, 'Ré', 'Rø')
break
elif y == 9:
break
elif x == 8:
t = value_check('Rø', tempConv.rom_ran)
secondary_message(t, 'Rø')
y = choice(9, 8)
while True:
if y == 1:
t2 = z.rom_cel(t)
result_message(t, t2, 'Rø', 'C')
break
elif y == 2:
t2 = z.rom_fah(t)
result_message(t, t2, 'Rø', 'F')
break
elif y == 3:
t2 = z.rom_kel(t)
result_message(t, t2, 'Rø', 'K')
break
elif y == 4:
t2 = z.rom_ran(t)
result_message(t, t2, 'Rø', 'R')
break
elif y == 5:
t2 = z.rom_del(t)
result_message(t, t2, 'Rø', 'De')
break
elif y == 6:
t2 = z.rom_new(t)
result_message(t, t2, 'Rø', 'N')
break
elif y == 7:
t2 = z.rom_rea(t)
result_message(t, t2, 'Rø', 'Ré')
break
elif y == 9:
break
elif x == 9:
print(Fore.CYAN + '\n Goodbye!' + Fore.RESET)
i = 0
break
<mask token>
| from colorama import init, Fore, Style
import tempConv
def menu(x):
""" Takes a list as argument and displays as a menu """
for i in range(len(x)):
print('{0:>4s} {1:<3s}{2:^5s}{3:<15}'.format(str(i + 1) + ')', x[i]
[1], '-->', x[i][0]))
def primary_message():
""" A message for the main switch """
print(Fore.CYAN + '\n Select the unit you want to convert from:\n' +
Fore.RESET)
menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [
'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',
'°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Exit\n' + Fore.RESET, '']])
def secondary_message(t, unit):
""" A message for the secondary switch """
print(Fore.CYAN + '\n Select the unit you would you like to convert ' +
str(t) + '°' + unit + ' into:\n' + Fore.RESET)
menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [
'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',
'°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Back\n' + Fore.RESET, '']])
def result_message(t, t2, unit, unit2):
from os import system
""" Prints the result to the screen """
print(Fore.GREEN + '\n ' + str(round(t, 2)) + '°' + unit + Fore.YELLOW +
' --> ' + Fore.GREEN + Style.BRIGHT + str(round(t2, 2)) + '°' +
unit2 + '\n' + Style.RESET_ALL)
print(system('pause'))
def choice(x, y=0):
""" Checks user input """
while True:
try:
choice = int(input())
if choice <= x and choice > 0 and choice != y:
return choice
break
elif choice == y:
print(Fore.RED + "\n Can't convert to the same unit!\n" +
Fore.RESET)
else:
print(Fore.RED + '\n Invalid choice!\n' + Fore.RESET)
except ValueError:
print(Fore.RED + '\n Invalid input!\n' + Fore.RESET)
def value_input(unit):
""" Asks user for temp. value, then checks it. """
print(Fore.CYAN + '\n Enter the temperature in °' + unit + ':\n' + Fore
.RESET)
while True:
try:
value = float(input())
return value
break
except ValueError:
print(Fore.RED + '\n Input must be an integer!\n' + Fore.RESET)
def value_check(unit, value):
""" Check for value below absolute zero """
while True:
try:
t = value_input(unit)
if value(t) != None:
return t
break
except ValueError:
tempConv(t)
def main():
"""" This is the main function """
while True:
primary_message()
x = choice(9)
z = tempConv
if x == 1:
t = value_check('C', tempConv.cel_ran)
secondary_message(t, 'C')
y = choice(9, 1)
while True:
if y == 2:
t2 = z.cel_fah(t)
result_message(t, t2, 'C', 'F')
break
elif y == 3:
t2 = z.cel_kel(t)
result_message(t, t2, 'C', 'K')
break
elif y == 4:
t2 = z.cel_ran(t)
result_message(t, t2, 'C', 'R')
break
elif y == 5:
t2 = z.cel_del(t)
result_message(t, t2, 'C', 'De')
break
elif y == 6:
t2 = z.cel_new(t)
result_message(t, t2, 'C', 'N')
break
elif y == 7:
t2 = z.cel_rea(t)
result_message(t, t2, 'C', 'Ré')
break
elif y == 8:
t2 = z.cel_rom(t)
result_message(t, t2, 'C', 'Rø')
break
elif y == 9:
break
elif x == 2:
t = value_check('F', tempConv.fah_ran)
secondary_message(t, 'F')
y = choice(9, 2)
while True:
if y == 1:
t2 = z.fah_cel(t)
result_message(t, t2, 'F', 'C')
break
elif y == 3:
t2 = z.fah_kel(t)
result_message(t, t2, 'F', 'K')
break
elif y == 4:
t2 = z.fah_ran(t)
result_message(t, t2, 'F', 'R')
break
elif y == 5:
t2 = z.fah_del(t)
result_message(t, t2, 'F', 'De')
break
elif y == 6:
t2 = z.fah_new(t)
result_message(t, t2, 'F', 'N')
break
elif y == 7:
t2 = z.fah_rea(t)
result_message(t, t2, 'F', 'Ré')
break
elif y == 8:
t2 = z.fah_rom(t)
result_message(t, t2, 'F', 'Rø')
break
elif y == 9:
break
elif x == 3:
t = value_check('K', tempConv.kel_ran)
secondary_message(t, 'K')
y = choice(9, 3)
while True:
if y == 1:
t2 = z.kel_cel(t)
result_message(t, t2, 'K', 'C')
break
elif y == 2:
t2 = z.kel_fah(t)
result_message(t, t2, 'K', 'F')
break
elif y == 4:
t2 = z.kel_ran(t)
result_message(t, t2, 'K', 'R')
break
elif y == 5:
t2 = z.kel_del(t)
result_message(t, t2, 'K', 'De')
break
elif y == 6:
t2 = z.kel_new(t)
result_message(t, t2, 'K', 'N')
break
elif y == 7:
t2 = z.kel_rea(t)
result_message(t, t2, 'K', 'Ré')
break
elif y == 8:
t2 = z.kel_rom(t)
result_message(t, t2, 'K', 'Rø')
break
elif y == 9:
break
elif x == 4:
t = value_check('R', tempConv.ran_rea)
secondary_message(t, 'R')
y = choice(9, 4)
while True:
if y == 1:
t2 = z.ran_cel(t)
result_message(t, t2, 'R', 'C')
break
elif y == 2:
t2 = z.ran_fah(t)
result_message(t, t2, 'R', 'F')
break
elif y == 3:
t2 = z.ran_kel(t)
result_message(t, t2, 'R', 'K')
break
elif y == 5:
t2 = z.ran_del(t)
result_message(t, t2, 'R', 'De')
break
elif y == 6:
t2 = z.ran_new(t)
result_message(t, t2, 'R', 'N')
break
elif y == 7:
t2 = z.ran_rea(t)
result_message(t, t2, 'R', 'Ré')
break
elif y == 8:
t2 = z.ran_rom(t)
result_message(t, t2, 'R', 'Rø')
break
elif y == 9:
break
elif x == 5:
t = value_check('De', tempConv.del_ran)
secondary_message(t, 'De')
y = choice(9, 5)
while True:
if y == 1:
t2 = z.del_cel(t)
result_message(t, t2, 'De', 'C')
break
elif y == 2:
t2 = z.del_fah(t)
result_message(t, t2, 'De', 'F')
break
elif y == 3:
t2 = z.del_kel(t)
result_message(t, t2, 'De', 'K')
break
elif y == 4:
t2 = z.del_ran(t)
result_message(t, t2, 'De', 'R')
break
elif y == 6:
t2 = z.del_new(t)
result_message(t, t2, 'De', 'N')
break
elif y == 7:
t2 = z.del_rea(t)
result_message(t, t2, 'De', 'Ré')
break
elif y == 8:
t2 = z.del_rom(t)
result_message(t, t2, 'De', 'Rø')
break
elif y == 9:
break
elif x == 6:
t = value_check('N', tempConv.new_ran)
secondary_message(t, 'N')
y = choice(9, 6)
while True:
if y == 1:
t2 = z.new_cel(t)
result_message(t, t2, 'N', 'C')
break
elif y == 2:
t2 = z.new_fah(t)
result_message(t, t2, 'N', 'F')
break
elif y == 3:
t2 = z.new_kel(t)
result_message(t, t2, 'N', 'K')
break
elif y == 4:
t2 = z.new_ran(t)
result_message(t, t2, 'N', 'R')
break
elif y == 5:
t2 = z.new_del(t)
result_message(t, t2, 'N', 'N')
break
elif y == 7:
t2 = z.new_rea(t)
result_message(t, t2, 'N', 'Ré')
break
elif y == 8:
t2 = z.new_rom(t)
result_message(t, t2, 'N', 'Rø')
break
elif y == 9:
break
elif x == 7:
t = value_check('Ré', tempConv.rea_ran)
secondary_message(t, 'Ré')
y = choice(9, 7)
while True:
if y == 1:
t2 = z.rea_cel(t)
result_message(t, t2, 'Ré', 'C')
break
elif y == 2:
t2 = z.rea_fah(t)
result_message(t, t2, 'Ré', 'F')
break
elif y == 3:
t2 = z.rea_kel(t)
result_message(t, t2, 'Ré', 'K')
break
elif y == 4:
t2 = z.rea_ran(t)
result_message(t, t2, 'Ré', 'R')
break
elif y == 5:
t2 = z.rea_del(t)
result_message(t, t2, 'Ré', 'De')
break
elif y == 6:
t2 = z.rea_new(t)
result_message(t, t2, 'Ré', 'N')
break
elif y == 8:
t2 = z.rea_rom(t)
result_message(t, t2, 'Ré', 'Rø')
break
elif y == 9:
break
elif x == 8:
t = value_check('Rø', tempConv.rom_ran)
secondary_message(t, 'Rø')
y = choice(9, 8)
while True:
if y == 1:
t2 = z.rom_cel(t)
result_message(t, t2, 'Rø', 'C')
break
elif y == 2:
t2 = z.rom_fah(t)
result_message(t, t2, 'Rø', 'F')
break
elif y == 3:
t2 = z.rom_kel(t)
result_message(t, t2, 'Rø', 'K')
break
elif y == 4:
t2 = z.rom_ran(t)
result_message(t, t2, 'Rø', 'R')
break
elif y == 5:
t2 = z.rom_del(t)
result_message(t, t2, 'Rø', 'De')
break
elif y == 6:
t2 = z.rom_new(t)
result_message(t, t2, 'Rø', 'N')
break
elif y == 7:
t2 = z.rom_rea(t)
result_message(t, t2, 'Rø', 'Ré')
break
elif y == 9:
break
elif x == 9:
print(Fore.CYAN + '\n Goodbye!' + Fore.RESET)
i = 0
break
if __name__ == '__main__':
init()
main()
|
from colorama import init, Fore, Style
import tempConv
#============================================================================#
# TEMP CONVERSION PROGRAM: #
#============================================================================#
#-----------------------------------------------------------------------------
def menu(x):
""" Takes a list as argument and displays as a menu """
for i in range(len(x)):
print("{0:>4s} {1:<3s}{2:^5s}{3:<15}"
.format(str(i + 1) + ")", x[i][1], "-->", x[i][0]))
#-----------------------------------------------------------------------------
def primary_message():
""" A message for the main switch """
print(Fore.CYAN + "\n Select the unit you want to convert from:\n" +
Fore.RESET)
menu([
["Celsius", "\u00b0C"],
["Fahrenheit", "\u00b0F"],
["Kelvin", "\u00b0K"],
["Rankin", "\u00b0R"],
["Delisle", "\u00b0De"],
["Newton", "\u00b0N"],
["R\u00e9aumur", "\u00b0R\u00e9"],
["R\u00f8mer", "\u00b0R\u00f8"],
[Fore.RED + "Exit\n" + Fore.RESET,""]
])
#-----------------------------------------------------------------------------
def secondary_message(t, unit):
""" A message for the secondary switch """
print(Fore.CYAN + "\n Select the unit you would you like to convert " +
str(t) + "\u00b0" + unit + " into:\n" + Fore.RESET)
menu([
["Celsius", "\u00b0C"],
["Fahrenheit", "\u00b0F"],
["Kelvin", "\u00b0K"],
["Rankin", "\u00b0R"],
["Delisle", "\u00b0De"],
["Newton", "\u00b0N"],
["R\u00e9aumur", "\u00b0R\u00e9"],
["R\u00f8mer", "\u00b0R\u00f8"],
[Fore.RED + "Back\n" + Fore.RESET,""]
])
#-----------------------------------------------------------------------------
def result_message(t, t2, unit, unit2):
from os import system
""" Prints the result to the screen """
print(Fore.GREEN + "\n " + str(round(t, 2)) + "\u00b0" + unit +
Fore.YELLOW +" --> " + Fore.GREEN + Style.BRIGHT +
str(round(t2, 2)) + "\u00b0" + unit2 + "\n" + Style.RESET_ALL)
print(system('pause'))
#-----------------------------------------------------------------------------
def choice(x, y = 0):
""" Checks user input """
while True:
try:
choice = int(input()) # <=== Check if it's an int
if choice <= x and choice > 0 and choice != y: # <=== If choice in
return choice # range and not the same; return choice
break
elif choice == y:
print(Fore.RED + "\n Can't convert to the same unit!\n" +
Fore.RESET)
else:
print(Fore.RED + "\n Invalid choice!\n" + Fore.RESET)
except ValueError: # <=== If choice is invalid prompt message
print(Fore.RED + "\n Invalid input!\n" + Fore.RESET)
#-----------------------------------------------------------------------------
def value_input(unit):
""" Asks user for temp. value, then checks it. """
print(Fore.CYAN + "\n Enter the temperature in \u00b0" + unit + ":\n" +
Fore.RESET)
while True:
try:
value = float(input()) # <=== Make sure input is a float
return value
break
except ValueError:
print(Fore.RED + "\n Input must be an integer!\n" + Fore.RESET)
#-----------------------------------------------------------------------------
def value_check(unit, value):
""" Check for value below absolute zero """
while True:
try: # <=== Checks that value isn't below abs 0
t = value_input(unit) # Returns value if okay
if value(t) != None:
return t
break
except ValueError:
tempConv(t)
#-----------------------------------------------------------------------------
def main():
"""" This is the main function """
while True:
primary_message() # <=== Display menu and take input
x = choice(9)
z = tempConv
if x == 1:
# This is the From Celsius options
t = value_check("C", tempConv.cel_ran)
secondary_message(t, "C")
y = choice(9, 1)
while True:
if y == 2:
t2 = z.cel_fah(t) # <=== Fahrenheit
result_message(t, t2, "C", "F")
break
elif y == 3:
t2 = z.cel_kel(t) # <=== Kelvin
result_message(t, t2, "C", "K")
break
elif y == 4:
t2 = z.cel_ran(t) # <=== Rankin
result_message(t, t2, "C", "R")
break
elif y == 5:
t2 = z.cel_del(t) # <=== Delisle
result_message(t, t2, "C", "De")
break
elif y == 6:
t2 = z.cel_new(t) # <=== Newton
result_message(t, t2, "C", "N")
break
elif y == 7:
t2 = z.cel_rea(t) # <=== Reaumur
result_message(t, t2, "C", "R\u00e9")
break
elif y == 8:
t2 = z.cel_rom(t) # <=== Romer
result_message(t, t2, "C", "R\u00f8")
break
elif y == 9:
break
elif x == 2:
t = value_check("F", tempConv.fah_ran)
secondary_message(t, "F")
y = choice(9, 2)
while True:
if y == 1:
t2 = z.fah_cel(t)
result_message(t, t2, "F", "C")
break
elif y == 3:
t2 = z.fah_kel(t)
result_message(t, t2, "F", "K")
break
elif y == 4:
t2 = z.fah_ran(t)
result_message(t, t2, "F", "R")
break
elif y == 5:
t2 = z.fah_del(t)
result_message(t, t2, "F", "De")
break
elif y == 6:
t2 = z.fah_new(t)
result_message(t, t2, "F", "N")
break
elif y == 7:
t2 = z.fah_rea(t)
result_message(t, t2, "F", "R\u00e9")
break
elif y == 8:
t2 = z.fah_rom(t)
result_message(t, t2, "F", "R\u00f8")
break
elif y == 9:
break
elif x == 3:
t = value_check("K", tempConv.kel_ran)
secondary_message(t, "K")
y = choice(9, 3)
while True:
if y == 1:
t2 = z.kel_cel(t)
result_message(t, t2, "K", "C")
break
elif y == 2:
t2 = z.kel_fah(t)
result_message(t, t2, "K", "F")
break
elif y == 4:
t2 = z.kel_ran(t)
result_message(t, t2, "K", "R")
break
elif y == 5:
t2 = z.kel_del(t)
result_message(t, t2, "K", "De")
break
elif y == 6:
t2 = z.kel_new(t)
result_message(t, t2, "K", "N")
break
elif y == 7:
t2 = z.kel_rea(t)
result_message(t, t2, "K", "R\u00e9")
break
elif y == 8:
t2 = z.kel_rom(t)
result_message(t, t2, "K", "R\u00f8")
break
elif y == 9:
break
elif x == 4:
t = value_check("R", tempConv.ran_rea)
secondary_message(t, "R")
y = choice(9, 4)
while True:
if y == 1:
t2 = z.ran_cel(t)
result_message(t, t2, "R", "C")
break
elif y == 2:
t2 = z.ran_fah(t)
result_message(t, t2, "R", "F")
break
elif y == 3:
t2 = z.ran_kel(t)
result_message(t, t2, "R", "K")
break
elif y == 5:
t2 = z.ran_del(t)
result_message(t, t2, "R", "De")
break
elif y == 6:
t2 = z.ran_new(t)
result_message(t, t2, "R", "N")
break
elif y == 7:
t2 = z.ran_rea(t)
result_message(t, t2, "R", "R\u00e9")
break
elif y == 8:
t2 = z.ran_rom(t)
result_message(t, t2, "R", "R\u00f8")
break
elif y == 9:
break
elif x == 5:
t = value_check("De", tempConv.del_ran)
secondary_message(t, "De")
y = choice(9, 5)
while True:
if y == 1:
t2 = z.del_cel(t)
result_message(t, t2, "De", "C")
break
elif y == 2:
t2 = z.del_fah(t)
result_message(t, t2, "De", "F")
break
elif y == 3:
t2 = z.del_kel(t)
result_message(t, t2, "De", "K")
break
elif y == 4:
t2 = z.del_ran(t)
result_message(t, t2, "De", "R")
break
elif y == 6:
t2 = z.del_new(t)
result_message(t, t2, "De", "N")
break
elif y == 7:
t2 = z.del_rea(t)
result_message(t, t2, "De", "R\u00e9")
break
elif y == 8:
t2 = z.del_rom(t)
result_message(t, t2, "De", "R\u00f8")
break
elif y == 9:
break
elif x == 6:
t = value_check("N", tempConv.new_ran)
secondary_message(t, "N")
y = choice(9, 6)
while True:
if y == 1:
t2 = z.new_cel(t)
result_message(t, t2, "N", "C")
break
elif y == 2:
t2 = z.new_fah(t)
result_message(t, t2, "N", "F")
break
elif y == 3:
t2 = z.new_kel(t)
result_message(t, t2, "N", "K")
break
elif y == 4:
t2 = z.new_ran(t)
result_message(t, t2, "N", "R")
break
elif y == 5:
t2 = z.new_del(t)
result_message(t, t2, "N", "N")
break
elif y == 7:
t2 = z.new_rea(t)
result_message(t, t2, "N", "R\u00e9")
break
elif y == 8:
t2 = z.new_rom(t)
result_message(t, t2, "N", "R\u00f8")
break
elif y == 9:
break
elif x == 7:
t = value_check("R\u00e9", tempConv.rea_ran)
secondary_message(t, "R\u00e9")
y = choice(9, 7)
while True:
if y == 1:
t2 = z.rea_cel(t)
result_message(t, t2, "R\u00e9", "C")
break
elif y == 2:
t2 = z.rea_fah(t)
result_message(t, t2, "R\u00e9", "F")
break
elif y == 3:
t2 = z.rea_kel(t)
result_message(t, t2, "R\u00e9", "K")
break
elif y == 4:
t2 = z.rea_ran(t)
result_message(t, t2, "R\u00e9", "R")
break
elif y == 5:
t2 = z.rea_del(t)
result_message(t, t2, "R\u00e9", "De")
break
elif y == 6:
t2 = z.rea_new(t)
result_message(t, t2, "R\u00e9", "N")
break
elif y == 8:
t2 = z.rea_rom(t)
result_message(t, t2, "R\u00e9", "R\u00f8")
break
elif y == 9:
break
elif x == 8:
t = value_check("R\u00f8", tempConv.rom_ran)
secondary_message(t, "R\u00f8")
y = choice(9, 8)
while True:
if y == 1:
t2 = z.rom_cel(t)
result_message(t, t2, "R\u00f8", "C")
break
elif y == 2:
t2 = z.rom_fah(t)
result_message(t, t2, "R\u00f8", "F")
break
elif y == 3:
t2 = z.rom_kel(t)
result_message(t, t2, "R\u00f8", "K")
break
elif y == 4:
t2 = z.rom_ran(t)
result_message(t, t2, "R\u00f8", "R")
break
elif y == 5:
t2 = z.rom_del(t)
result_message(t, t2, "R\u00f8", "De")
break
elif y == 6:
t2 = z.rom_new(t)
result_message(t, t2, "R\u00f8", "N")
break
elif y == 7:
t2 = z.rom_rea(t)
result_message(t, t2, "R\u00f8", "R\u00e9")
break
elif y == 9:
break
elif x == 9:
print(Fore.CYAN + "\n Goodbye!" + Fore.RESET)
i = 0
break
#-----------------------------------------------------------------------------
if __name__ == "__main__":
init()
main()
| [
3,
5,
7,
10,
11
] |
569 | d9f586bbb72021ee0b37ff8660e26b50d7e6a2d3 | <mask token>
def index(request):
return render(request, 'ALR1.html')
def search(request):
return render(request, 'ALR2.html')
<mask token>
| <mask token>
def index(request):
return render(request, 'ALR1.html')
def search(request):
return render(request, 'ALR2.html')
<mask token>
def pdf(request):
pdfId = request.GET['id']
pdf_data = open('pdf/test.pdf', 'rb').read()
return HttpResponse(pdf_data, content_type='application/pdf')
| <mask token>
def index(request):
return render(request, 'ALR1.html')
def search(request):
return render(request, 'ALR2.html')
def home(request):
return render(request, 'ALR3.html')
def pdf(request):
pdfId = request.GET['id']
pdf_data = open('pdf/test.pdf', 'rb').read()
return HttpResponse(pdf_data, content_type='application/pdf')
| from django.http import HttpResponse
from django.shortcuts import render
def index(request):
return render(request, 'ALR1.html')
def search(request):
return render(request, 'ALR2.html')
def home(request):
return render(request, 'ALR3.html')
def pdf(request):
pdfId = request.GET['id']
pdf_data = open('pdf/test.pdf', 'rb').read()
return HttpResponse(pdf_data, content_type='application/pdf')
| from django.http import HttpResponse
from django.shortcuts import render
def index(request):
return render(request, 'ALR1.html')
def search(request):
return render(request, 'ALR2.html')
def home(request):
return render(request, 'ALR3.html')
def pdf(request):
pdfId = request.GET['id']
# pdf_data=open('pdf/' + pdfId + '.pdf','rb').read()
pdf_data=open('pdf/test.pdf','rb').read()
return HttpResponse(pdf_data, content_type='application/pdf')
| [
2,
3,
4,
5,
6
] |
570 | 3d737d0ee9c3af1f8ebe4c6998ad30fa34f42856 | <mask token>
| <mask token>
def user(request):
context = {'users': User.objects.all(), 'user_level': User.objects.get(
id=request.session['user_id'])}
return render(request, 'dashboard/user.html', context)
<mask token>
| <mask token>
def user(request):
context = {'users': User.objects.all(), 'user_level': User.objects.get(
id=request.session['user_id'])}
return render(request, 'dashboard/user.html', context)
def admin(request):
context = {'users': User.objects.all(), 'user_level': User.objects.get(
id=request.session['user_id'])}
return render(request, 'dashboard/admin.html', context)
| from django.shortcuts import render
from ..login.models import *
def user(request):
context = {'users': User.objects.all(), 'user_level': User.objects.get(
id=request.session['user_id'])}
return render(request, 'dashboard/user.html', context)
def admin(request):
context = {'users': User.objects.all(), 'user_level': User.objects.get(
id=request.session['user_id'])}
return render(request, 'dashboard/admin.html', context)
| from django.shortcuts import render
from .. login.models import *
def user(request):
context = {
"users" : User.objects.all(),
"user_level" : User.objects.get(id = request.session['user_id'])
}
return render(request, 'dashboard/user.html', context)
def admin(request):
context = {
"users" : User.objects.all(),
"user_level" : User.objects.get(id = request.session['user_id'])
}
return render(request, 'dashboard/admin.html', context)
| [
0,
1,
2,
3,
4
] |
571 | 937d01eaa82cbfe07b20fae9320c554a0960d7b1 | <mask token>
def meetBlock(x, y, maps):
if maps[x][y] == 1:
return True
else:
return False
def onlyUpdate(n_blocks, xs, ys, maps):
for i in range(n_blocks):
maps[xs[i]][ys[i]] = 1
def oneLineFull(maps, CLR):
for i in range(4, 10):
for j in range(4):
if CLR == GRN and maps[i][j] == 0:
break
elif CLR == BLU and maps[j][i] == 0:
break
else:
return True, i
return False, 0
def pullAndUpdate(olf_idx, maps, CLR):
for i in range(olf_idx, 3, -1):
for j in range(4):
if CLR == GRN:
if olf_idx == 4:
maps[i][j] = 0
else:
maps[i][j] = maps[i - 1][j]
maps[i - 1][j] = 0
elif CLR == BLU:
if olf_idx == 4:
maps[j][i] = 0
else:
maps[j][i] = maps[j][i - 1]
maps[j][i - 1] = 0
def pushAndPullUpdate(n_inBorder, maps, CLR):
for i in range(10 - 1 - n_inBorder, 3, -1):
for j in range(4):
if CLR == GRN:
maps[i + n_inBorder][j] = maps[i][j]
maps[i][j] = 0
elif CLR == BLU:
maps[j][i + n_inBorder] = maps[j][i]
maps[j][i] = 0
def print_maps(maps):
global X, Y
for i in range(X):
for j in range(Y):
print(maps[i][j], end=' ')
print()
print()
def isBlockInBorder(maps, CLR):
cnt = 0
for i in range(4, 6):
for j in range(4):
if CLR == GRN and maps[i][j] == 1 or CLR == BLU and maps[j][i
] == 1:
cnt += 1
break
return cnt
def Mover(n_blocks, xs_ori, ys_ori, maps, CLR):
xs = xs_ori.copy()
ys = ys_ori.copy()
score = 0
STOP_FLAG = False
while not STOP_FLAG:
for i in range(n_blocks):
xt, yt = xs[i] + dx[CLR], ys[i] + dy[CLR]
if outMaps(xt, yt):
STOP_FLAG = True
break
if meetBlock(xt, yt, maps):
STOP_FLAG = True
break
else:
for i in range(n_blocks):
xs[i], ys[i] = xs[i] + dx[CLR], ys[i] + dy[CLR]
onlyUpdate(n_blocks, xs, ys, maps)
OLF_FLAG = True
while OLF_FLAG:
OLF_FLAG, olf_idx = oneLineFull(maps, CLR)
if OLF_FLAG:
score += 1
pullAndUpdate(olf_idx, maps, CLR)
n_inBorder = isBlockInBorder(maps, CLR)
if n_inBorder:
pushAndPullUpdate(n_inBorder, maps, CLR)
return score
def Area_score(maps, CLR):
score = 0
for i in range(4, 10):
for j in range(4):
if CLR == GRN:
score += maps[i][j]
elif CLR == BLU:
score += maps[j][i]
return score
<mask token>
| <mask token>
def outMaps(x, y):
global X, Y
if 0 <= x < X and 0 <= y < Y:
return False
else:
return True
def meetBlock(x, y, maps):
if maps[x][y] == 1:
return True
else:
return False
def onlyUpdate(n_blocks, xs, ys, maps):
for i in range(n_blocks):
maps[xs[i]][ys[i]] = 1
def oneLineFull(maps, CLR):
for i in range(4, 10):
for j in range(4):
if CLR == GRN and maps[i][j] == 0:
break
elif CLR == BLU and maps[j][i] == 0:
break
else:
return True, i
return False, 0
def pullAndUpdate(olf_idx, maps, CLR):
for i in range(olf_idx, 3, -1):
for j in range(4):
if CLR == GRN:
if olf_idx == 4:
maps[i][j] = 0
else:
maps[i][j] = maps[i - 1][j]
maps[i - 1][j] = 0
elif CLR == BLU:
if olf_idx == 4:
maps[j][i] = 0
else:
maps[j][i] = maps[j][i - 1]
maps[j][i - 1] = 0
def pushAndPullUpdate(n_inBorder, maps, CLR):
for i in range(10 - 1 - n_inBorder, 3, -1):
for j in range(4):
if CLR == GRN:
maps[i + n_inBorder][j] = maps[i][j]
maps[i][j] = 0
elif CLR == BLU:
maps[j][i + n_inBorder] = maps[j][i]
maps[j][i] = 0
def print_maps(maps):
global X, Y
for i in range(X):
for j in range(Y):
print(maps[i][j], end=' ')
print()
print()
def isBlockInBorder(maps, CLR):
cnt = 0
for i in range(4, 6):
for j in range(4):
if CLR == GRN and maps[i][j] == 1 or CLR == BLU and maps[j][i
] == 1:
cnt += 1
break
return cnt
def Mover(n_blocks, xs_ori, ys_ori, maps, CLR):
xs = xs_ori.copy()
ys = ys_ori.copy()
score = 0
STOP_FLAG = False
while not STOP_FLAG:
for i in range(n_blocks):
xt, yt = xs[i] + dx[CLR], ys[i] + dy[CLR]
if outMaps(xt, yt):
STOP_FLAG = True
break
if meetBlock(xt, yt, maps):
STOP_FLAG = True
break
else:
for i in range(n_blocks):
xs[i], ys[i] = xs[i] + dx[CLR], ys[i] + dy[CLR]
onlyUpdate(n_blocks, xs, ys, maps)
OLF_FLAG = True
while OLF_FLAG:
OLF_FLAG, olf_idx = oneLineFull(maps, CLR)
if OLF_FLAG:
score += 1
pullAndUpdate(olf_idx, maps, CLR)
n_inBorder = isBlockInBorder(maps, CLR)
if n_inBorder:
pushAndPullUpdate(n_inBorder, maps, CLR)
return score
def Area_score(maps, CLR):
score = 0
for i in range(4, 10):
for j in range(4):
if CLR == GRN:
score += maps[i][j]
elif CLR == BLU:
score += maps[j][i]
return score
<mask token>
| <mask token>
def outMaps(x, y):
global X, Y
if 0 <= x < X and 0 <= y < Y:
return False
else:
return True
def meetBlock(x, y, maps):
if maps[x][y] == 1:
return True
else:
return False
def onlyUpdate(n_blocks, xs, ys, maps):
for i in range(n_blocks):
maps[xs[i]][ys[i]] = 1
def oneLineFull(maps, CLR):
for i in range(4, 10):
for j in range(4):
if CLR == GRN and maps[i][j] == 0:
break
elif CLR == BLU and maps[j][i] == 0:
break
else:
return True, i
return False, 0
def pullAndUpdate(olf_idx, maps, CLR):
for i in range(olf_idx, 3, -1):
for j in range(4):
if CLR == GRN:
if olf_idx == 4:
maps[i][j] = 0
else:
maps[i][j] = maps[i - 1][j]
maps[i - 1][j] = 0
elif CLR == BLU:
if olf_idx == 4:
maps[j][i] = 0
else:
maps[j][i] = maps[j][i - 1]
maps[j][i - 1] = 0
def pushAndPullUpdate(n_inBorder, maps, CLR):
for i in range(10 - 1 - n_inBorder, 3, -1):
for j in range(4):
if CLR == GRN:
maps[i + n_inBorder][j] = maps[i][j]
maps[i][j] = 0
elif CLR == BLU:
maps[j][i + n_inBorder] = maps[j][i]
maps[j][i] = 0
def print_maps(maps):
global X, Y
for i in range(X):
for j in range(Y):
print(maps[i][j], end=' ')
print()
print()
def isBlockInBorder(maps, CLR):
cnt = 0
for i in range(4, 6):
for j in range(4):
if CLR == GRN and maps[i][j] == 1 or CLR == BLU and maps[j][i
] == 1:
cnt += 1
break
return cnt
def Mover(n_blocks, xs_ori, ys_ori, maps, CLR):
xs = xs_ori.copy()
ys = ys_ori.copy()
score = 0
STOP_FLAG = False
while not STOP_FLAG:
for i in range(n_blocks):
xt, yt = xs[i] + dx[CLR], ys[i] + dy[CLR]
if outMaps(xt, yt):
STOP_FLAG = True
break
if meetBlock(xt, yt, maps):
STOP_FLAG = True
break
else:
for i in range(n_blocks):
xs[i], ys[i] = xs[i] + dx[CLR], ys[i] + dy[CLR]
onlyUpdate(n_blocks, xs, ys, maps)
OLF_FLAG = True
while OLF_FLAG:
OLF_FLAG, olf_idx = oneLineFull(maps, CLR)
if OLF_FLAG:
score += 1
pullAndUpdate(olf_idx, maps, CLR)
n_inBorder = isBlockInBorder(maps, CLR)
if n_inBorder:
pushAndPullUpdate(n_inBorder, maps, CLR)
return score
def Area_score(maps, CLR):
score = 0
for i in range(4, 10):
for j in range(4):
if CLR == GRN:
score += maps[i][j]
elif CLR == BLU:
score += maps[j][i]
return score
<mask token>
for i in range(N):
t, x, y = map(int, input().split())
xs, ys = [x], [y]
if t == BLOCK_0:
n_blocks = 1
elif t == BLOCK_1:
n_blocks = 2
xs.append(x)
ys.append(y + 1)
elif t == BLOCK_2:
n_blocks = 2
xs.append(x + 1)
ys.append(y)
total_score += Mover(n_blocks, xs, ys, maps, GRN)
total_score += Mover(n_blocks, xs, ys, maps, BLU)
<mask token>
print(total_score)
print(grn_score + blu_score)
| <mask token>
sys.stdin = open('input.txt', 'rt')
BLOCK_0 = 1
BLOCK_1 = 2
BLOCK_2 = 3
N = int(input())
X, Y = 10, 10
GRN = 0
BLU = 1
maps = [([0] * Y) for _ in range(X)]
dx = [1, 0]
dy = [0, 1]
def outMaps(x, y):
global X, Y
if 0 <= x < X and 0 <= y < Y:
return False
else:
return True
def meetBlock(x, y, maps):
if maps[x][y] == 1:
return True
else:
return False
def onlyUpdate(n_blocks, xs, ys, maps):
for i in range(n_blocks):
maps[xs[i]][ys[i]] = 1
def oneLineFull(maps, CLR):
for i in range(4, 10):
for j in range(4):
if CLR == GRN and maps[i][j] == 0:
break
elif CLR == BLU and maps[j][i] == 0:
break
else:
return True, i
return False, 0
def pullAndUpdate(olf_idx, maps, CLR):
for i in range(olf_idx, 3, -1):
for j in range(4):
if CLR == GRN:
if olf_idx == 4:
maps[i][j] = 0
else:
maps[i][j] = maps[i - 1][j]
maps[i - 1][j] = 0
elif CLR == BLU:
if olf_idx == 4:
maps[j][i] = 0
else:
maps[j][i] = maps[j][i - 1]
maps[j][i - 1] = 0
def pushAndPullUpdate(n_inBorder, maps, CLR):
for i in range(10 - 1 - n_inBorder, 3, -1):
for j in range(4):
if CLR == GRN:
maps[i + n_inBorder][j] = maps[i][j]
maps[i][j] = 0
elif CLR == BLU:
maps[j][i + n_inBorder] = maps[j][i]
maps[j][i] = 0
def print_maps(maps):
global X, Y
for i in range(X):
for j in range(Y):
print(maps[i][j], end=' ')
print()
print()
def isBlockInBorder(maps, CLR):
cnt = 0
for i in range(4, 6):
for j in range(4):
if CLR == GRN and maps[i][j] == 1 or CLR == BLU and maps[j][i
] == 1:
cnt += 1
break
return cnt
def Mover(n_blocks, xs_ori, ys_ori, maps, CLR):
xs = xs_ori.copy()
ys = ys_ori.copy()
score = 0
STOP_FLAG = False
while not STOP_FLAG:
for i in range(n_blocks):
xt, yt = xs[i] + dx[CLR], ys[i] + dy[CLR]
if outMaps(xt, yt):
STOP_FLAG = True
break
if meetBlock(xt, yt, maps):
STOP_FLAG = True
break
else:
for i in range(n_blocks):
xs[i], ys[i] = xs[i] + dx[CLR], ys[i] + dy[CLR]
onlyUpdate(n_blocks, xs, ys, maps)
OLF_FLAG = True
while OLF_FLAG:
OLF_FLAG, olf_idx = oneLineFull(maps, CLR)
if OLF_FLAG:
score += 1
pullAndUpdate(olf_idx, maps, CLR)
n_inBorder = isBlockInBorder(maps, CLR)
if n_inBorder:
pushAndPullUpdate(n_inBorder, maps, CLR)
return score
def Area_score(maps, CLR):
score = 0
for i in range(4, 10):
for j in range(4):
if CLR == GRN:
score += maps[i][j]
elif CLR == BLU:
score += maps[j][i]
return score
total_score = 0
for i in range(N):
t, x, y = map(int, input().split())
xs, ys = [x], [y]
if t == BLOCK_0:
n_blocks = 1
elif t == BLOCK_1:
n_blocks = 2
xs.append(x)
ys.append(y + 1)
elif t == BLOCK_2:
n_blocks = 2
xs.append(x + 1)
ys.append(y)
total_score += Mover(n_blocks, xs, ys, maps, GRN)
total_score += Mover(n_blocks, xs, ys, maps, BLU)
grn_score = Area_score(maps, GRN)
blu_score = Area_score(maps, BLU)
print(total_score)
print(grn_score + blu_score)
| import sys
sys.stdin = open('input.txt', 'rt')
BLOCK_0 = 1
BLOCK_1 = 2
BLOCK_2 = 3
N = int(input())
X, Y = 10, 10
# x: 행 , y: 열A
GRN = 0
BLU = 1
maps = [[0]*Y for _ in range(X)]
dx = [1, 0]
dy = [0, 1]
def outMaps(x, y):
global X, Y
if 0<=x<X and 0<=y<Y: return False
else: return True
def meetBlock(x, y, maps):
if maps[x][y] == 1: return True
else: return False
def onlyUpdate(n_blocks, xs, ys, maps):
for i in range(n_blocks):
maps[xs[i]][ys[i]] = 1
def oneLineFull(maps, CLR):
for i in range(4, 10):
for j in range(4):
if CLR == GRN and maps[i][j] == 0:
break
elif CLR == BLU and maps[j][i] == 0:
break
else: # 전부 1이여서 full line일 때
return True, i
return False, 0
def pullAndUpdate(olf_idx, maps, CLR):
#for olf in list_olf:
for i in range(olf_idx, 3, -1):
for j in range(4):
if CLR == GRN:
if olf_idx == 4:
maps[i][j] = 0
else:
maps[i][j] = maps[i-1][j]
maps[i-1][j] = 0
elif CLR == BLU:
if olf_idx == 4:
maps[j][i] = 0
else:
maps[j][i] = maps[j][i-1]
maps[j][i-1] = 0
def pushAndPullUpdate(n_inBorder, maps, CLR):
for i in range(10-1-n_inBorder, 3, -1):
for j in range(4):
if CLR == GRN:
maps[i+n_inBorder][j] = maps[i][j]
maps[i][j] = 0
elif CLR == BLU:
maps[j][i+n_inBorder] = maps[j][i]
maps[j][i] = 0
def print_maps(maps):
global X, Y
for i in range(X):
for j in range(Y):
print(maps[i][j], end=' ')
print()
print()
def isBlockInBorder(maps, CLR):
cnt = 0
for i in range(4, 6):
for j in range(4):
if (CLR == GRN and maps[i][j] == 1) or (CLR == BLU and maps[j][i] == 1):
cnt += 1
break
return cnt
def Mover(n_blocks, xs_ori, ys_ori, maps, CLR):
xs = xs_ori.copy()
ys = ys_ori.copy()
score = 0
STOP_FLAG = False
while not STOP_FLAG:
for i in range(n_blocks):
xt, yt = xs[i] + dx[CLR], ys[i] + dy[CLR]
if outMaps(xt, yt):
STOP_FLAG = True
break
if meetBlock(xt, yt, maps):
STOP_FLAG = True
break
else:
# break 걸리지 않고 넘어왔으므로, update
for i in range(n_blocks):
xs[i], ys[i] = xs[i] + dx[CLR], ys[i] + dy[CLR]
# 만약 STOP_FLAG == True 로 탈출했다면
# 해당 상자의 이동이 끝난 것 이므로 한 줄이 전부 차있는 것이 있는지 check
# maps에 업데이트
onlyUpdate(n_blocks, xs, ys, maps)
# 만약 one line full 인 라인이 있다면
OLF_FLAG = True
while OLF_FLAG:
OLF_FLAG, olf_idx = oneLineFull(maps, CLR)
if OLF_FLAG:
score += 1
pullAndUpdate(olf_idx, maps, CLR)
# 만약 경계안에 block이 존재한다면
n_inBorder = isBlockInBorder(maps, CLR)
if n_inBorder:
pushAndPullUpdate(n_inBorder, maps, CLR)
return score
def Area_score(maps, CLR):
score = 0
for i in range(4, 10):
for j in range(4):
if CLR == GRN: score += maps[i][j]
elif CLR == BLU: score += maps[j][i]
return score
total_score = 0
for i in range(N):
t, x, y = map(int, input().split())
xs, ys = [x], [y]
if t == BLOCK_0:
n_blocks = 1
elif t == BLOCK_1:
n_blocks = 2
xs.append(x)
ys.append(y+1)
elif t == BLOCK_2:
n_blocks = 2
xs.append(x+1)
ys.append(y)
total_score += Mover(n_blocks, xs, ys, maps, GRN)
total_score += Mover(n_blocks, xs, ys, maps, BLU)
#print_maps(maps)
grn_score = Area_score(maps, GRN)
blu_score = Area_score(maps, BLU)
print(total_score)
print(grn_score+blu_score)
| [
9,
10,
11,
12,
14
] |
572 | ebe546794131eddea396bd6b82fbb41aeead4661 | <mask token>
| <mask token>
plt.plot(point_p1_s, point_p1_h, 'bs-')
plt.plot(point_p2_s, point_p2_h, 'bs-')
plt.plot(point_is_s, point_is_h, 'ys-')
plt.plot(point_hp_s, point_hp_h, 'rs-', label='Expansion Line')
<mask token>
plt.legend(loc='best', bbox_to_anchor=[0.5, 0.5], ncol=2, shadow=True,
title=_title)
plt.annotate('(P1,T1)', xy=(s1, h1), xycoords='data', xytext=(+10, +30),
textcoords='offset points', fontsize=12, arrowprops=dict(arrowstyle=
'->', connectionstyle='arc3,rad=.2'))
plt.annotate('(P2,T2)', xy=(s2, h2), xycoords='data', xytext=(+10, +30),
textcoords='offset points', fontsize=12, arrowprops=dict(arrowstyle=
'->', connectionstyle='arc3,rad=.2'))
plt.xlabel('s(kJ/(kg.K))')
plt.ylabel('h(kJ/kg)')
plt.show()
| <mask token>
p1, t1 = 16, 535
p2, t2 = 3.56, 315
h1 = pt2h(p1, t1)
s1 = pt2s(p1, t1)
h2 = pt2h(p2, t2)
s2 = pt2s(p2, t2)
h2s = ps2h(p2, s1)
his = ishd(p1, t1, p2)
ef = ief(p1, t1, p2, t2)
samp = 0.01
smp1 = s1 - samp
hsmp1 = ps2h(p1, smp1)
sap1 = s1 + samp
hsap1 = ps2h(p1, sap1)
smt1 = s1 - samp
hsmt1 = ps2h(p1, smp1)
sat1 = s1 + samp
hsat1 = ts2h(t1, sap1)
point_p1_h = np.zeros(shape=3)
point_p1_h[0] = hsmp1
point_p1_h[1] = h1
point_p1_h[2] = hsap1
point_p1_s = np.zeros(shape=3)
point_p1_s[0] = smp1
point_p1_s[1] = s1
point_p1_s[2] = sap1
smp2 = s1 - samp
hsmp2 = ps2h(p2, smp2)
sap2 = s2 + samp
hsap2 = ps2h(p2, sap2)
smt2 = s2 - samp
hsmt2 = ps2h(p1, smp1)
sat2 = s2 + samp
hsat2 = ts2h(t2, sap1)
point_p2_h = np.zeros(shape=3)
point_p2_h[0] = hsmp2
point_p2_h[1] = h2
point_p2_h[2] = hsap2
point_p2_s = np.zeros(shape=3)
point_p2_s[0] = smp2
point_p2_s[1] = s2
point_p2_s[2] = sap2
point_is_h = np.zeros(shape=2)
point_is_h[0] = h1
point_is_h[1] = h2s
point_is_s = np.zeros(shape=2)
point_is_s[0] = s1
point_is_s[1] = s1
point_hp_h = np.zeros(shape=2)
point_hp_h[0] = h1
point_hp_h[1] = h2
point_hp_s = np.zeros(shape=2)
point_hp_s[0] = s1
point_hp_s[1] = s2
plt.plot(point_p1_s, point_p1_h, 'bs-')
plt.plot(point_p2_s, point_p2_h, 'bs-')
plt.plot(point_is_s, point_is_h, 'ys-')
plt.plot(point_hp_s, point_hp_h, 'rs-', label='Expansion Line')
_title = ('The isentropic efficiency = ' + '$\\frac{h1-h2}{h1-h2s}$' + '=' +
'{:.2f}'.format(ef) + '%')
plt.legend(loc='best', bbox_to_anchor=[0.5, 0.5], ncol=2, shadow=True,
title=_title)
plt.annotate('(P1,T1)', xy=(s1, h1), xycoords='data', xytext=(+10, +30),
textcoords='offset points', fontsize=12, arrowprops=dict(arrowstyle=
'->', connectionstyle='arc3,rad=.2'))
plt.annotate('(P2,T2)', xy=(s2, h2), xycoords='data', xytext=(+10, +30),
textcoords='offset points', fontsize=12, arrowprops=dict(arrowstyle=
'->', connectionstyle='arc3,rad=.2'))
plt.xlabel('s(kJ/(kg.K))')
plt.ylabel('h(kJ/kg)')
plt.show()
| <mask token>
from seuif97 import *
import matplotlib.pyplot as plt
import numpy as np
p1, t1 = 16, 535
p2, t2 = 3.56, 315
h1 = pt2h(p1, t1)
s1 = pt2s(p1, t1)
h2 = pt2h(p2, t2)
s2 = pt2s(p2, t2)
h2s = ps2h(p2, s1)
his = ishd(p1, t1, p2)
ef = ief(p1, t1, p2, t2)
samp = 0.01
smp1 = s1 - samp
hsmp1 = ps2h(p1, smp1)
sap1 = s1 + samp
hsap1 = ps2h(p1, sap1)
smt1 = s1 - samp
hsmt1 = ps2h(p1, smp1)
sat1 = s1 + samp
hsat1 = ts2h(t1, sap1)
point_p1_h = np.zeros(shape=3)
point_p1_h[0] = hsmp1
point_p1_h[1] = h1
point_p1_h[2] = hsap1
point_p1_s = np.zeros(shape=3)
point_p1_s[0] = smp1
point_p1_s[1] = s1
point_p1_s[2] = sap1
smp2 = s1 - samp
hsmp2 = ps2h(p2, smp2)
sap2 = s2 + samp
hsap2 = ps2h(p2, sap2)
smt2 = s2 - samp
hsmt2 = ps2h(p1, smp1)
sat2 = s2 + samp
hsat2 = ts2h(t2, sap1)
point_p2_h = np.zeros(shape=3)
point_p2_h[0] = hsmp2
point_p2_h[1] = h2
point_p2_h[2] = hsap2
point_p2_s = np.zeros(shape=3)
point_p2_s[0] = smp2
point_p2_s[1] = s2
point_p2_s[2] = sap2
point_is_h = np.zeros(shape=2)
point_is_h[0] = h1
point_is_h[1] = h2s
point_is_s = np.zeros(shape=2)
point_is_s[0] = s1
point_is_s[1] = s1
point_hp_h = np.zeros(shape=2)
point_hp_h[0] = h1
point_hp_h[1] = h2
point_hp_s = np.zeros(shape=2)
point_hp_s[0] = s1
point_hp_s[1] = s2
plt.plot(point_p1_s, point_p1_h, 'bs-')
plt.plot(point_p2_s, point_p2_h, 'bs-')
plt.plot(point_is_s, point_is_h, 'ys-')
plt.plot(point_hp_s, point_hp_h, 'rs-', label='Expansion Line')
_title = ('The isentropic efficiency = ' + '$\\frac{h1-h2}{h1-h2s}$' + '=' +
'{:.2f}'.format(ef) + '%')
plt.legend(loc='best', bbox_to_anchor=[0.5, 0.5], ncol=2, shadow=True,
title=_title)
plt.annotate('(P1,T1)', xy=(s1, h1), xycoords='data', xytext=(+10, +30),
textcoords='offset points', fontsize=12, arrowprops=dict(arrowstyle=
'->', connectionstyle='arc3,rad=.2'))
plt.annotate('(P2,T2)', xy=(s2, h2), xycoords='data', xytext=(+10, +30),
textcoords='offset points', fontsize=12, arrowprops=dict(arrowstyle=
'->', connectionstyle='arc3,rad=.2'))
plt.xlabel('s(kJ/(kg.K))')
plt.ylabel('h(kJ/kg)')
plt.show()
| # -*- coding: utf-8 -*-
"""
This is a simple sample for seuif.py
License: this code is in the public domain
Author: Cheng Maohua
Email: [email protected]
Last modified: 2016.4.20
"""
from seuif97 import *
import matplotlib.pyplot as plt
import numpy as np
p1,t1 = 16, 535
p2,t2 = 3.56,315
h1 = pt2h(p1, t1)
s1 = pt2s(p1, t1)
h2 = pt2h(p2, t2)
s2 = pt2s(p2, t2)
h2s = ps2h(p2, s1)
his = ishd(p1, t1, p2)
ef = ief(p1, t1, p2, t2)
# print('The isentropic efficiency is ',ef)
# 4条线:p1、p2 等压,等熵焓降线、膨胀线
samp = 0.01
smp1 = s1 - samp
hsmp1 = ps2h(p1, smp1)
sap1 = s1 + samp
hsap1 = ps2h(p1, sap1)
smt1 = s1 - samp
hsmt1 = ps2h(p1, smp1)
sat1 = s1 + samp
hsat1 = ts2h(t1, sap1)
point_p1_h = np.zeros(shape=3)
point_p1_h[0] = hsmp1
point_p1_h[1] = h1
point_p1_h[2] = hsap1
point_p1_s = np.zeros(shape=3)
point_p1_s[0] = smp1
point_p1_s[1] = s1
point_p1_s[2] = sap1
# p2
smp2 = s1 - samp # 等熵焓降点延伸
hsmp2 = ps2h(p2, smp2)
sap2 = s2 + samp
hsap2 = ps2h(p2, sap2)
smt2 = s2 - samp
hsmt2 = ps2h(p1, smp1)
sat2 = s2 + samp
hsat2 = ts2h(t2, sap1)
point_p2_h = np.zeros(shape=3)
point_p2_h[0] = hsmp2
point_p2_h[1] = h2
point_p2_h[2] = hsap2
point_p2_s = np.zeros(shape=3)
point_p2_s[0] = smp2
point_p2_s[1] = s2
point_p2_s[2] = sap2
# 等熵焓降
point_is_h = np.zeros(shape=2)
point_is_h[0] = h1
point_is_h[1] = h2s
point_is_s = np.zeros(shape=2)
point_is_s[0] = s1
point_is_s[1] = s1
# HP Expansion Line
point_hp_h = np.zeros(shape=2)
point_hp_h[0] = h1
point_hp_h[1] = h2
point_hp_s = np.zeros(shape=2)
point_hp_s[0] = s1
point_hp_s[1] = s2
plt.plot(point_p1_s, point_p1_h, 'bs-')
plt.plot(point_p2_s, point_p2_h, 'bs-')
plt.plot(point_is_s, point_is_h, 'ys-')
plt.plot(point_hp_s, point_hp_h, 'rs-', label='Expansion Line')
_title = 'The isentropic efficiency = ' + \
r'$\frac{h1-h2}{h1-h2s}$' + '=' + '{:.2f}'.format(ef) + '%'
plt.legend(loc="best", bbox_to_anchor=[0.5, 0.5],
ncol=2, shadow=True, title=_title)
# annotate some interesting points
plt.annotate('(P1,T1)',
xy=(s1, h1), xycoords='data',
xytext=(+10, +30), textcoords='offset points', fontsize=12,
arrowprops=dict(arrowstyle="->", connectionstyle="arc3,rad=.2"))
plt.annotate('(P2,T2)',
xy=(s2, h2), xycoords='data',
xytext=(+10, +30), textcoords='offset points', fontsize=12,
arrowprops=dict(arrowstyle="->", connectionstyle="arc3,rad=.2"))
plt.xlabel('s(kJ/(kg.K))')
plt.ylabel('h(kJ/kg)')
plt.show()
| [
0,
1,
2,
3,
4
] |
573 | 906265182a9776fec5bad41bfc9ee68b36873d1e | <mask token>
| try:
x = int(input('정수를 입력하세요: '))
print(x)
except:
print('정수가 아닙니다.')
| #예외처리 문법을 활용하여 정수가 아닌 숫자를 입력했을때 에러문구가나오도록 작성.(에러문구:정수가아닙니다)
try:
x = int(input('정수를 입력하세요: '))
print(x)
except:
print('정수가 아닙니다.')
| null | null | [
0,
1,
2
] |
574 | b7aa99e9e4af3bef4b2b3e7d8ab9bf159a093af6 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright (C) 2011 Lionel Bergeret
#
# ----------------------------------------------------------------
# The contents of this file are distributed under the CC0 license.
# See http://creativecommons.org/publicdomain/zero/1.0/
# ----------------------------------------------------------------
import os
import re
import cPickle
from optparse import OptionParser
# Import shapefile informations
from shapelib import ShapeFile
import dbflib
# Shapely
from shapely.geometry import Polygon
from shapely.ops import cascaded_union
# Numpy and matplotlib
import numpy as np
from matplotlib.nxutils import points_inside_poly
try:
import psyco
psyco.full()
except ImportError:
print "Psyco plugin missing, will run slower"
pass
def main(shapefile, picklefile):
if picklefile:
[npts, x, y, z, zraw, xil, yil, grid, missing] = cPickle.load(open(picklefile,'rb'))
points = np.vstack((x,y)).T
# Load administrative area
shp = ShapeFile(shapefile)
dbf = dbflib.open(shapefile)
coastline = []
# Process every shape from the ShapeFile
print "Processing shapes ..."
for npoly in range(shp.info()[0]):
shp_object = shp.read_object(npoly)
shp_dict = dbf.read_record(npoly)
verts = shp_object.vertices()
if "NAME_1" in shp_dict:
name = "%s" % (shp_dict["NAME_1"])
else:
name = "Unknown"
print "Processing %s" % (name)
# Extract city polygon vertices (ring per ring)
for ring in verts:
vx = []
vy = []
for point in ring:
vx.append(point[0])
vy.append(point[1])
# Only process big enough rings
if len(vx) > 256: # big enough
poly_verts = zip(vx,vy)
if picklefile:
# Compute intersections with the city
intersection = points_inside_poly(points, poly_verts)
npts = sum(1 for x in points_inside_poly(points, poly_verts) if x)
else:
npts = 1 # Add this polygon
# Add the ring to the coastine if measures inside
if npts > 0:
polygon = Polygon(poly_verts)
if not polygon.is_empty and polygon.is_valid:
print "- Add polygon (%d)" % (len(vx))
coastline.append(polygon)
else:
print "- Skip polygon (%d)" % (len(vx))
print "Union of %d polygons" % len(coastline)
coast = cascaded_union(coastline)
cPickle.dump(coast,open('coastline.pickle','wb'),-1)
print "Done."
# -----------------------------------------------------------------------------
# Main
# -----------------------------------------------------------------------------
if __name__ == '__main__':
parser = OptionParser("Usage: safecastCoastline <shapefile>")
parser.add_option("-s", "--safecast", dest="scfilename",
help="provice the safecast.pickle file for intersections.", metavar="FILE")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("Wrong number of arguments")
main(args[0], options.scfilename)
| null | null | null | null | [
0
] |
575 | aa4d872c6a529d8acf18f1c3b477bc1816ac2887 | <mask token>
| <mask token>
print('bob' in adict)
print('name' in adict)
for key in adict:
print('%s:%s' % (key, adict[key]))
print('%(name)s:%(age)s' % adict)
| adict = {'name': 'bob', 'age': 23}
print('bob' in adict)
print('name' in adict)
for key in adict:
print('%s:%s' % (key, adict[key]))
print('%(name)s:%(age)s' % adict)
| null | null | [
0,
1,
2
] |
576 | ebc050544da69837cc2b8977f347380b94474bab | <mask token>
def _build(_input, *nodes):
x = _input
for node in nodes:
if callable(node):
x = node(x)
elif isinstance(node, list):
x = [_build(x, branch) for branch in node]
elif isinstance(node, tuple):
x = _build(x, *node)
else:
x = node
return x
<mask token>
| <mask token>
def _build(_input, *nodes):
x = _input
for node in nodes:
if callable(node):
x = node(x)
elif isinstance(node, list):
x = [_build(x, branch) for branch in node]
elif isinstance(node, tuple):
x = _build(x, *node)
else:
x = node
return x
<mask token>
model.summary()
| <mask token>
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
classes = ['normal cells', 'blasts', 'blasts_highSSC_granulocytes',
'blasts_highSSC_middle_ugly', 'blasts_highSSC_upper_dead']
num_classes = len(classes)
image_size = 66
imagefiles = np.load('imagefiles_supplementary.npz')
X_train = imagefiles['X_train']
X_test = imagefiles['X_test']
y_train = imagefiles['y_train']
y_test = imagefiles['y_test']
X_train = X_train.reshape((-1, image_size, image_size, 1))
X_test = X_test.reshape((-1, image_size, image_size, 1))
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
y_train = np_utils.to_categorical(y_train, num_classes)
y_test = np_utils.to_categorical(y_test, num_classes)
def _build(_input, *nodes):
x = _input
for node in nodes:
if callable(node):
x = node(x)
elif isinstance(node, list):
x = [_build(x, branch) for branch in node]
elif isinstance(node, tuple):
x = _build(x, *node)
else:
x = node
return x
_input = Input(X_train.shape[1:])
output = _build(_input, [(Conv2D(96, (3, 3), strides=(2, 2)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True,
scale=True, beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None), Activation('relu')), MaxPooling2D(pool_size=(3,
3), strides=(2, 2))], Add(), [(Conv2D(96, (3, 3), strides=(2, 2)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True,
scale=True, beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None), Activation('relu')), MaxPooling2D(pool_size=(3,
3), strides=(2, 2))], Add(), [(Conv2D(96, (3, 3), strides=(2, 2)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True,
scale=True, beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None), Activation('relu')), MaxPooling2D(pool_size=(3,
3), strides=(2, 2))], Add(), [(Conv2D(112, (1, 1), strides=(1, 1)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True,
scale=True, beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None), Activation('relu')), (Conv2D(48, (3, 3),
strides=(1, 1)), BatchNormalization(axis=-1, momentum=0.99, epsilon=
0.001, center=True, scale=True, beta_initializer='zeros',
gamma_initializer='ones', moving_mean_initializer='zeros',
moving_variance_initializer='ones', beta_regularizer=None,
gamma_regularizer=None, beta_constraint=None, gamma_constraint=None),
Activation('relu'))], Add(), [MaxPooling2D(pool_size=(2, 2), strides=
None, padding='valid', data_format=None), Flatten(), Dense(256,
activation='relu'), Dropout(0.5), Dense(num_classes, activation='softmax')]
)
model = Model(_input, output)
model.summary()
| import os
import numpy as np
from keras.models import Sequential, Model
from keras.layers import Dense, Dropout, Flatten, concatenate
from keras.layers import Conv2D, MaxPooling2D, GlobalAveragePooling2D, Activation
from keras.layers.normalization import BatchNormalization
from keras.optimizers import SGD
from keras.utils import np_utils
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix, f1_score
import seaborn as sns
from keras.layers import Input, Dense, Add, Multiply
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
classes = ['normal cells', 'blasts', 'blasts_highSSC_granulocytes',
'blasts_highSSC_middle_ugly', 'blasts_highSSC_upper_dead']
num_classes = len(classes)
image_size = 66
imagefiles = np.load('imagefiles_supplementary.npz')
X_train = imagefiles['X_train']
X_test = imagefiles['X_test']
y_train = imagefiles['y_train']
y_test = imagefiles['y_test']
X_train = X_train.reshape((-1, image_size, image_size, 1))
X_test = X_test.reshape((-1, image_size, image_size, 1))
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
y_train = np_utils.to_categorical(y_train, num_classes)
y_test = np_utils.to_categorical(y_test, num_classes)
def _build(_input, *nodes):
x = _input
for node in nodes:
if callable(node):
x = node(x)
elif isinstance(node, list):
x = [_build(x, branch) for branch in node]
elif isinstance(node, tuple):
x = _build(x, *node)
else:
x = node
return x
_input = Input(X_train.shape[1:])
output = _build(_input, [(Conv2D(96, (3, 3), strides=(2, 2)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True,
scale=True, beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None), Activation('relu')), MaxPooling2D(pool_size=(3,
3), strides=(2, 2))], Add(), [(Conv2D(96, (3, 3), strides=(2, 2)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True,
scale=True, beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None), Activation('relu')), MaxPooling2D(pool_size=(3,
3), strides=(2, 2))], Add(), [(Conv2D(96, (3, 3), strides=(2, 2)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True,
scale=True, beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None), Activation('relu')), MaxPooling2D(pool_size=(3,
3), strides=(2, 2))], Add(), [(Conv2D(112, (1, 1), strides=(1, 1)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True,
scale=True, beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None), Activation('relu')), (Conv2D(48, (3, 3),
strides=(1, 1)), BatchNormalization(axis=-1, momentum=0.99, epsilon=
0.001, center=True, scale=True, beta_initializer='zeros',
gamma_initializer='ones', moving_mean_initializer='zeros',
moving_variance_initializer='ones', beta_regularizer=None,
gamma_regularizer=None, beta_constraint=None, gamma_constraint=None),
Activation('relu'))], Add(), [MaxPooling2D(pool_size=(2, 2), strides=
None, padding='valid', data_format=None), Flatten(), Dense(256,
activation='relu'), Dropout(0.5), Dense(num_classes, activation='softmax')]
)
model = Model(_input, output)
model.summary()
| import os
import numpy as np
from keras.models import Sequential, Model
from keras.layers import Dense, Dropout, Flatten, concatenate
from keras.layers import Conv2D, MaxPooling2D, GlobalAveragePooling2D, Activation
from keras.layers.normalization import BatchNormalization
from keras.optimizers import SGD
from keras.utils import np_utils
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix, f1_score
import seaborn as sns
from keras.layers import Input, Dense, Add, Multiply
# macOS特有の警告文を非表示(GPUがないからCPUでやるときに出る)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
# パラメータの初期化
classes = [
"normal cells",
"blasts",
"blasts_highSSC_granulocytes",
"blasts_highSSC_middle_ugly",
"blasts_highSSC_upper_dead",
]
num_classes = len(classes)
image_size = 66
# データの読み込み
imagefiles = np.load("imagefiles_supplementary.npz")
X_train = imagefiles['X_train']
X_test = imagefiles['X_test']
y_train = imagefiles['y_train']
y_test = imagefiles['y_test']
# グレースケール画像をCNNに入力するための次元操作
X_train = X_train.reshape((-1, image_size, image_size, 1))
X_test = X_test.reshape((-1, image_size, image_size, 1))
# データの正規化
X_train = X_train.astype("float32")
X_test = X_test.astype("float32")
# OneHotVector化する(正解ラベルの位置に1がつく)
y_train = np_utils.to_categorical(y_train, num_classes)
y_test = np_utils.to_categorical(y_test, num_classes)
def _build(_input, *nodes):
x = _input
for node in nodes:
if callable(node):
x = node(x)
elif isinstance(node, list):
x = [_build(x, branch) for branch in node]
elif isinstance(node, tuple):
x = _build(x, *node)
else:
x = node
return x
_input = Input(X_train.shape[1:])
output = _build(
_input,
# Reduction dual-path module×3の定義
# ---------------------------
# 畳み込み層の追加(96:フィルタ数)
# バッチ正規化
# 活性化関数:ReLu
# ---------------------------
# MaxPooling
# ---------------------------
# Reduction dual-path module1
[(Conv2D(96, (3, 3), strides=(2, 2)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None),
Activation('relu')),
MaxPooling2D(pool_size=(3, 3), strides=(2, 2))],
# Reduction dual-path module2
Add(),
[(Conv2D(96, (3, 3), strides=(2, 2)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None),
Activation('relu')),
MaxPooling2D(pool_size=(3, 3), strides=(2, 2))],
# Reduction dual-path module3
Add(),
[(Conv2D(96, (3, 3), strides=(2, 2)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None),
Activation('relu')),
MaxPooling2D(pool_size=(3, 3), strides=(2, 2))],
# Dual-path modules×10の定義
# ---------------------------
# 畳み込み層の追加(112:フィルタ数)
# バッチ正規化
# 活性化関数:ReLu
# ---------------------------
# Dual-path modules2の定義
# 畳み込み層の追加(48:フィルタ数)
# バッチ正規化
# 活性化関数:ReLu
# ---------------------------
# Dual-path modules1
Add(),
[(Conv2D(112, (1, 1), strides=(1, 1)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None),
Activation('relu'),
),
(Conv2D(48, (3, 3), strides=(1, 1)),
BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
beta_initializer='zeros', gamma_initializer='ones',
moving_mean_initializer='zeros', moving_variance_initializer='ones',
beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
gamma_constraint=None),
Activation('relu'),
)],
# # Dual-path modules2
# Add(),
# [(Conv2D(112, (1, 1), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu')),
# (Conv2D(48, (3, 3), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu'))],
# # Dual-path modules3
# Add(),
# [(Conv2D(112, (1, 1), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu')),
# (Conv2D(48, (3, 3), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu'))],
# # Dual-path modules4
# Add(),
# [(Conv2D(112, (1, 1), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu')),
# (Conv2D(48, (3, 3), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu'))],
# # Dual-path modules5
# Add(),
# [(Conv2D(112, (1, 1), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu')),
# (Conv2D(48, (3, 3), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu'))],
# # Dual-path modules6
# Add(),
# [(Conv2D(112, (1, 1), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu')),
# (Conv2D(48, (3, 3), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu'))],
# # Dual-path modules7
# Add(),
# [(Conv2D(112, (1, 1), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu')),
# (Conv2D(48, (3, 3), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu'))],
# # Dual-path modules8
# Add(),
# [(Conv2D(112, (1, 1), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu')),
# (Conv2D(48, (3, 3), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu'))],
# # Dual-path modules9
# Add(),
# [(Conv2D(112, (1, 1), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu')),
# (Conv2D(48, (3, 3), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu'))],
# # Dual-path modules10
# Add(),
# [(Conv2D(112, (1, 1), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu')),
# (Conv2D(48, (3, 3), strides=(1, 1)),
# BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones',
# moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None,
# gamma_constraint=None),
# Activation('relu'))],
# 全結合
Add(),
[MaxPooling2D(pool_size=(2, 2), strides=None, padding='valid', data_format=None),
Flatten(),
Dense(256, activation='relu'),
Dropout(0.5),
Dense(num_classes, activation='softmax')
]
)
model = Model(_input, output)
model.summary()
# # 損失関数の設定
# opt = SGD(lr=0.01, momentum=0.0, decay=0.0, nesterov=False)
# model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy'])
#
# # トレーニングの実施
# # 学習
# print("start training")
# hist = model.fit(X_train, y_train, batch_size=32, epochs=30, validation_data=(X_test, y_test))
# # 評価
# print("start eval")
# score = model.evaluate(X_test, y_test, batch_size=32, verbose=1) # verbose:途中結果表示
# print('Test Loss: ', score[0])
# print('Test Accuracy: ', score[1])
#
# model.save('leukemia_cnn_supplementary.h5')
#
# # 学習の様子をグラフへ描画
# # 正解率の推移をプロット
# fig = plt.figure()
# plt.plot(hist.history['accuracy'])
# plt.plot(hist.history['val_accuracy'])
# plt.title('Accuracy')
# plt.legend(['train', 'test'], loc='upper left')
# fig.savefig('result/cnn_supplementary/cnn_accuracy_supplementary.png')
# plt.close()
# # ロスの推移をプロット
# fig = plt.figure()
# plt.plot(hist.history['loss'])
# plt.plot(hist.history['val_loss'])
# plt.title('Loss')
# plt.legend(['train', 'test'], loc='upper left')
# fig.savefig('result/cnn_supplementary/cnn_loss_supplementary.png')
# plt.close()
# # Confusion matrix作成
# plt.figure()
# y_pred = model.predict(X_test)
# y_test = imagefiles['y_test'] # one hot vector化されているのでロードし直す
# cm = confusion_matrix(y_test, np.argmax(y_pred, axis=1))
# ticklabels = ["blasts_highSSC_granulocytes",
# "blasts_highSSC_middle_ugly",
# "blasts",
# "normal cells",
# "blasts_highSSC_upper_dead"]
# sns.heatmap(cm, annot=True, cmap='Blues', yticklabels=ticklabels, xticklabels=ticklabels)
# plt.ylabel("Correct")
# plt.xlabel("Prediction")
# plt.tight_layout()
# plt.savefig('result/cnn_supplementary/confusion_matrix_cnn_supplementary.png')
# plt.close()
#
# # F1 micro/macro
# f1_macro = f1_score(y_test, np.argmax(y_pred, axis=1), average="macro")
# f1_micro = f1_score(y_test, np.argmax(y_pred, axis=1), average="micro")
# print(f"f1_macro:{f1_macro}")
# print(f"f1_miro:{f1_micro}")
| [
1,
2,
3,
4,
5
] |
577 | ed2f3bbc7eb0a4d8f5ccdb7a12e00cbddab04dd0 | <mask token>
def get_nearest_method(method_name, parser):
"""
all candidates toked
all protocol untoked
input:
queries:
[
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
]
output:
[
nearest_idx1,
nearest_idx2,
nearest_idx3,
...
]
"""
return _method_adaptors[method_name](parser)
<mask token>
| <mask token>
def get_nearest_method(method_name, parser):
"""
all candidates toked
all protocol untoked
input:
queries:
[
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
]
output:
[
nearest_idx1,
nearest_idx2,
nearest_idx3,
...
]
"""
return _method_adaptors[method_name](parser)
def get_method_names():
return list(_method_adaptors.keys())
| <mask token>
def register_dist_adaptor(method_name):
def decorator(func):
_method_adaptors[method_name] = func
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator
def get_nearest_method(method_name, parser):
"""
all candidates toked
all protocol untoked
input:
queries:
[
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
]
output:
[
nearest_idx1,
nearest_idx2,
nearest_idx3,
...
]
"""
return _method_adaptors[method_name](parser)
def get_method_names():
return list(_method_adaptors.keys())
| _method_adaptors = dict()
def register_dist_adaptor(method_name):
def decorator(func):
_method_adaptors[method_name] = func
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator
def get_nearest_method(method_name, parser):
"""
all candidates toked
all protocol untoked
input:
queries:
[
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
]
output:
[
nearest_idx1,
nearest_idx2,
nearest_idx3,
...
]
"""
return _method_adaptors[method_name](parser)
def get_method_names():
return list(_method_adaptors.keys())
| null | [
1,
2,
3,
4
] |
578 | 837534ebc953dae966154921709398ab2b2e0b33 | <mask token>
| from .dependencies import have
from .syntax import PythonHighlighter
from .utils import count_locations, image_path, interface_style, natural_sort
| # © MNELAB developers
#
# License: BSD (3-clause)
from .dependencies import have
from .syntax import PythonHighlighter
from .utils import count_locations, image_path, interface_style, natural_sort
| null | null | [
0,
1,
2
] |
579 | 29c630b56eb56d91d1e917078138a2bbf562e0bf | import pyhs2
import sys
import datetime
i = datetime.datetime.now()
# args
if len(sys.argv) < 2:
print "Run with python version 2.6"
print "Requires arg: <orgId>"
sys.exit()
orgId = sys.argv[1]
print "\n\nCreating document external ID manifest for Org ID: " + orgId
## strings
fileLine = "%s\t%s\t%s\n" #external_id doc_source assign_authority
query = """select * from (select external_id, doc_source, assign_authority from summary_doc_manifest where org_id = '%s'
UNION ALL select get_json_object(line, '$.document.id') as external_id, get_json_object(line, '$.document.source') as doc_source,
get_json_object(line, '$.document.assignAuthority') as assign_authority from production_logs_datacheckandrecover_epoch
where get_json_object(line, '$.docManifest') is not null and get_json_object(line, '$.orgId') = '%s'
and day=%s and month=%s and year=2014) joined_table""" %(orgId, orgId, i.day, i.month)
fileName = orgId + "-manifest"
## hive connection
conn = pyhs2.connect(host='10.196.47.205',
port=10000,
authMechanism="PLAIN",
user='hive',
password='',
database='default')
cur = conn.cursor()
count = 0
print "Executing query: " + query
cur.execute(query)
print "Building query results..."
out = open(fileName, "w")
for row in cur.fetch():
out.write(fileLine%(row[0], row[1], row[2]))
count+=1
if count%1000000 == 0:
print "...wrote " + str(count) + " entries so far."
out.close()
print "...wrote " + str(count) + " entries into the file: " + fileName
print "\n" | null | null | null | null | [
0
] |
580 | ce26ad27b7729164e27c845e2803a670b506bad8 | <mask token>
class QuoteesxtractorSpider(scrapy.Spider):
<mask token>
<mask token>
<mask token>
<mask token>
| <mask token>
class QuoteesxtractorSpider(scrapy.Spider):
<mask token>
<mask token>
<mask token>
def parse(self, response):
for quote in response.css('.quote'):
result = {'text': quote.css('span.text::text').get(), 'author':
quote.css('small.author::text').get(), 'tags': quote.css(
'div.tags a.tag::text').getall()}
yield result
| <mask token>
class QuoteesxtractorSpider(scrapy.Spider):
name = 'quoteEsxtractor'
allowed_domains = ['quotes.toscrape.com']
start_urls = ['http://quotes.toscrape.com/']
def parse(self, response):
for quote in response.css('.quote'):
result = {'text': quote.css('span.text::text').get(), 'author':
quote.css('small.author::text').get(), 'tags': quote.css(
'div.tags a.tag::text').getall()}
yield result
| import scrapy
class QuoteesxtractorSpider(scrapy.Spider):
name = 'quoteEsxtractor'
allowed_domains = ['quotes.toscrape.com']
start_urls = ['http://quotes.toscrape.com/']
def parse(self, response):
for quote in response.css('.quote'):
result = {'text': quote.css('span.text::text').get(), 'author':
quote.css('small.author::text').get(), 'tags': quote.css(
'div.tags a.tag::text').getall()}
yield result
| # -*- coding: utf-8 -*-
import scrapy
class QuoteesxtractorSpider(scrapy.Spider):
name = 'quoteEsxtractor'
allowed_domains = ['quotes.toscrape.com']
start_urls = ['http://quotes.toscrape.com/']
def parse(self, response):
for quote in response.css('.quote') :
# print(quote.getall())
result = {
"text": quote.css('span.text::text').get(),
"author": quote.css('small.author::text').get(),
"tags": quote.css('div.tags a.tag::text').getall()
}
yield result | [
1,
2,
3,
4,
5
] |
581 | 09788cf04ab5190a33b43e3756f4dbd7d78977a5 | <mask token>
class GraphTupleData(Base, sqlutil.PluralTablenameFromCamelCapsClassNameMixin):
<mask token>
id: int = sql.Column(sql.Integer, sql.ForeignKey('graph_tuples.id',
onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
sha1: str = sql.Column(sql.String(40), nullable=False, index=True)
pickled_graph_tuple: bytes = sql.Column(sqlutil.ColumnTypes.LargeBinary
(), nullable=False)
<mask token>
class Database(sqlutil.Database):
"""A database of GraphTuples."""
def __init__(self, url: str, must_exist: bool=False, ctx: progress.
ProgressContext=progress.NullContext):
super(Database, self).__init__(url, Base, must_exist=must_exist)
self.ctx = ctx
self._graph_tuple_stats = None
self._splits = None
self._split_counts = None
@database_statistic
def graph_count(self) ->int:
"""The number of non-empty graphs in the database."""
return int(self.graph_tuple_stats.graph_count)
@database_statistic
def ir_count(self) ->int:
"""The number of distinct intermediate representations that the non-empty
graphs are constructed from.
"""
return int(self.graph_tuple_stats.ir_count or 0)
@database_statistic
def split_count(self) ->int:
"""The number of distinct splits in the database."""
return int(self.graph_tuple_stats.split_count or 0)
@database_statistic
def node_count(self) ->int:
"""The total node count in non-empty graphs."""
return int(self.graph_tuple_stats.node_count or 0)
@database_statistic
def edge_count(self) ->int:
"""The total edge count in non-empty graphs."""
return int(self.graph_tuple_stats.edge_count or 0)
@database_statistic
def control_edge_count(self) ->int:
"""The total control edge count in non-empty graphs."""
return int(self.graph_tuple_stats.control_edge_count or 0)
@database_statistic
def data_edge_count(self) ->int:
"""The total data edge count in non-empty graphs."""
return int(self.graph_tuple_stats.data_edge_count or 0)
@database_statistic
def call_edge_count(self) ->int:
"""The total call edge count in non-empty graphs."""
return int(self.graph_tuple_stats.call_edge_count or 0)
@database_statistic
def node_count_max(self) ->int:
"""The maximum node count in non-empty graphs."""
return int(self.graph_tuple_stats.node_count_max or 0)
@database_statistic
def edge_count_max(self) ->int:
"""The maximum edge count in non-empty graphs."""
return int(self.graph_tuple_stats.edge_count_max or 0)
@database_statistic
def control_edge_count_max(self) ->int:
"""The maximum control edge count in non-empty graphs."""
return int(self.graph_tuple_stats.control_edge_count_max or 0)
@database_statistic
def data_edge_count_max(self) ->int:
"""The maximum data edge count in non-empty graphs."""
return int(self.graph_tuple_stats.data_edge_count_max or 0)
@database_statistic
def call_edge_count_max(self) ->int:
"""The maximum call edge count in non-empty graphs."""
return int(self.graph_tuple_stats.call_edge_count_max or 0)
@database_statistic
def edge_position_max(self) ->int:
"""The maximum edge position in non-empty graphs."""
return int(self.graph_tuple_stats.edge_position_max or 0)
@database_statistic
def node_x_dimensionality(self) ->int:
"""The node x dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.node_x_dimensionality or 0)
@database_statistic
def node_y_dimensionality(self) ->int:
"""The node y dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.node_y_dimensionality or 0)
@database_statistic
def graph_x_dimensionality(self) ->int:
"""The graph x dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.graph_x_dimensionality or 0)
@database_statistic
def graph_y_dimensionality(self) ->int:
"""The graph y dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.graph_y_dimensionality or 0)
@database_statistic
def graph_data_size(self) ->int:
"""The total size of the non-empty graph data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size or 0)
@database_statistic
def graph_data_size_min(self) ->int:
"""The minimum size of the non-empty graph tuple data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size_min or 0)
@database_statistic
def graph_data_size_avg(self) ->float:
"""The average size of the non-empty graph tuple data, in bytes."""
return float(self.graph_tuple_stats.graph_data_size_avg or 0)
@database_statistic
def graph_data_size_max(self) ->int:
"""The maximum size of the non-empty graph tuple data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size_max or 0)
@database_statistic
def has_data_flow(self) ->bool:
"""Return whether the graph database has data flow annotations.
This is only true if *all* columns have data flow values.
"""
return self.graph_count and not self.data_flow_null_count
@database_statistic
def data_flow_null_count(self) ->int:
"""The number of database rows without data flow information.
If > 0, then has_data_flow is False.
"""
return self.graph_count - int(self.graph_tuple_stats.
data_flow_steps_count or 0)
@database_statistic
def data_flow_steps_min(self) ->Optional[int]:
"""The minimum data flow steps for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_steps_min or 0)
@database_statistic
def data_flow_steps_avg(self) ->Optional[float]:
"""The average data flow steps for non-empty graphs."""
if self.has_data_flow:
return float(self.graph_tuple_stats.data_flow_steps_avg)
@database_statistic
def data_flow_steps_max(self) ->Optional[int]:
"""The maximum data flow steps for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_steps_max or 0)
@database_statistic
def data_flow_positive_node_count_min(self) ->Optional[int]:
"""The minimum data flow positive node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_min or 0)
@database_statistic
def data_flow_positive_node_count_avg(self) ->Optional[int]:
"""The minimum data flow average node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_avg or 0)
@database_statistic
def data_flow_positive_node_count_max(self) ->Optional[int]:
"""The minimum data flow max node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_max or 0)
@database_statistic
def splits(self) ->List[int]:
"""Return a list of unique split values."""
if self._splits is None:
self.RefreshStats()
return self._splits
@database_statistic
def split_counts(self) ->Dict[int, int]:
"""Return a dictionary mapping split to the number of graphs."""
if self._split_counts is None:
self.RefreshStats()
return self._split_counts
def RefreshStats(self):
"""Compute the database stats for access via the instance properties.
Raises:
ValueError: If the database contains invalid entries, e.g. inconsistent
vector dimensionalities.
"""
with self.ctx.Profile(2, lambda t:
f"Computed stats over {humanize.BinaryPrefix(stats.graph_data_size, 'B')} database ({humanize.Plural(stats.graph_count, 'graph')})"
), self.Session() as session:
query = session.query(sql.func.count(GraphTuple.id).label(
'graph_count'), sql.func.count(sql.func.distinct(GraphTuple
.ir_id)).label('ir_count'), sql.func.count(sql.func.
distinct(GraphTuple.split)).label('split_count'), sql.func.
sum(GraphTuple.node_count).label('node_count'), sql.func.
sum(GraphTuple.control_edge_count).label(
'control_edge_count'), sql.func.sum(GraphTuple.
data_edge_count).label('data_edge_count'), sql.func.sum(
GraphTuple.call_edge_count).label('call_edge_count'), sql.
func.sum(GraphTuple.control_edge_count + GraphTuple.
data_edge_count + GraphTuple.call_edge_count).label(
'edge_count'), sql.func.max(GraphTuple.node_count).label(
'node_count_max'), sql.func.max(GraphTuple.
control_edge_count).label('control_edge_count_max'), sql.
func.max(GraphTuple.data_edge_count).label(
'data_edge_count_max'), sql.func.max(GraphTuple.
call_edge_count).label('call_edge_count_max'), sql.func.max
(GraphTuple.call_edge_count).label('call_edge_count_max'),
sql.func.max(GraphTuple.control_edge_count + GraphTuple.
data_edge_count + GraphTuple.call_edge_count).label(
'edge_count_max'), sql.func.max(GraphTuple.
edge_position_max).label('edge_position_max'), sql.func.
count(sql.func.distinct(GraphTuple.node_x_dimensionality)).
label('node_x_dimensionality_count'), sql.func.count(sql.
func.distinct(GraphTuple.node_y_dimensionality)).label(
'node_y_dimensionality_count'), sql.func.count(sql.func.
distinct(GraphTuple.graph_x_dimensionality)).label(
'graph_x_dimensionality_count'), sql.func.count(sql.func.
distinct(GraphTuple.graph_y_dimensionality)).label(
'graph_y_dimensionality_count'), sql.func.max(GraphTuple.
node_x_dimensionality).label('node_x_dimensionality'), sql.
func.max(GraphTuple.node_y_dimensionality).label(
'node_y_dimensionality'), sql.func.max(GraphTuple.
graph_x_dimensionality).label('graph_x_dimensionality'),
sql.func.max(GraphTuple.graph_y_dimensionality).label(
'graph_y_dimensionality'), sql.func.sum(GraphTuple.
pickled_graph_tuple_size).label('graph_data_size'), sql.
func.min(GraphTuple.pickled_graph_tuple_size).label(
'graph_data_size_min'), sql.func.avg(GraphTuple.
pickled_graph_tuple_size).label('graph_data_size_avg'), sql
.func.max(GraphTuple.pickled_graph_tuple_size).label(
'graph_data_size_max'), sql.func.count(GraphTuple.
data_flow_steps).label('data_flow_steps_count'), sql.func.
min(GraphTuple.data_flow_steps).label('data_flow_steps_min'
), sql.func.avg(GraphTuple.data_flow_steps).label(
'data_flow_steps_avg'), sql.func.max(GraphTuple.
data_flow_steps).label('data_flow_steps_max'), sql.func.min
(GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_min'), sql.func.avg(
GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_avg'), sql.func.max(
GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_max'))
query = query.filter(GraphTuple.node_count > 1)
stats = query.one()
if stats.node_x_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.node_x_dimensionality_count} distinct node x dimensionalities'
)
if stats.node_y_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.node_y_dimensionality_count} distinct node y dimensionalities'
)
if stats.graph_x_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.graph_x_dimensionality_count} distinct graph x dimensionalities'
)
if stats.graph_y_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.graph_y_dimensionality_count} distinct graph y dimensionalities'
)
if not (stats.data_flow_steps_count == 0 or stats.
data_flow_steps_count == stats.graph_count):
raise ValueError(
f'{stats.graph_count - stats.data_flow_steps_count} of {stats.graph_count} graphs have no data_flow_steps value'
)
self._graph_tuple_stats = stats
with self.Session() as session:
self._splits = sorted(set([row.split for row in session.
query(GraphTuple.split).filter(GraphTuple.split != None
).group_by(GraphTuple.split)]))
self._split_counts = {split: session.query(sql.func.count(
GraphTuple.id)).filter(GraphTuple.split == split).
scalar() for split in self._splits}
@property
def graph_tuple_stats(self):
"""Fetch aggregate graph tuple stats, or compute them if not set."""
if self._graph_tuple_stats is None:
self.RefreshStats()
return self._graph_tuple_stats
@property
def stats_json(self) ->Dict[str, Any]:
"""Fetch the database statics as a JSON dictionary."""
return {name: function(self) for name, function in
database_statistics_registry}
def __repr__(self) ->str:
return (
f"Database of {humanize.DecimalPrefix(self.graph_count, 'graph')} with dimensionalities: node_x={self.node_x_dimensionality}, node_y={self.node_y_dimensionality}, graph_x={self.graph_x_dimensionality}, graph_y={self.graph_y_dimensionality}."
)
<mask token>
| <mask token>
class GraphTuple(Base, sqlutil.PluralTablenameFromCamelCapsClassNameMixin):
<mask token>
id: int = sql.Column(sql.Integer, primary_key=True)
ir_id: int = sql.Column(sql.Integer, nullable=False, index=True)
split: Optional[int] = sql.Column(sql.Integer, nullable=True, index=True)
node_count: int = sql.Column(sql.Integer, nullable=False)
control_edge_count: int = sql.Column(sql.Integer, nullable=False)
data_edge_count: int = sql.Column(sql.Integer, nullable=False)
call_edge_count: int = sql.Column(sql.Integer, nullable=False)
edge_position_max: int = sql.Column(sql.Integer().with_variant(sqlite.
FLOAT(), 'sqlite'), nullable=False)
node_x_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
node_y_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
graph_x_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
graph_y_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
pickled_graph_tuple_size: int = sql.Column(sql.Integer, nullable=False)
data_flow_steps: int = sql.Column(sql.Integer, nullable=True)
data_flow_root_node: int = sql.Column(sql.Integer, nullable=True)
data_flow_positive_node_count: int = sql.Column(sql.Integer, nullable=True)
timestamp: datetime.datetime = sqlutil.ColumnFactory.MillisecondDatetime()
data: 'GraphTupleData' = sql.orm.relationship('GraphTupleData', uselist
=False, cascade='all, delete-orphan')
<mask token>
<mask token>
<mask token>
@decorators.memoized_property
def tuple(self) ->graph_tuple_lib.GraphTuple:
"""Un-pickle the graph tuple and cache the binary results."""
return pickle.loads(self.data.pickled_graph_tuple)
<mask token>
<mask token>
@classmethod
def CreateFromGraphTuple(cls, graph_tuple: graph_tuple_lib.GraphTuple,
ir_id: int, split: Optional[int]=None) ->'GraphTuple':
"""Create a mapped database instance from the given graph tuple.
This is the preferred method of populating databases of graph tuples, as
it contains the boilerplate to extract and set the metadata columns, and
handles the join between the two data/metadata tables invisibly.
Args:
graph_tuple: The graph tuple to map.
ir_id: The intermediate representation ID.
split: The split value of this graph.
Returns:
A GraphTuple instance.
"""
pickled_graph_tuple = pickle.dumps(graph_tuple)
return GraphTuple(ir_id=ir_id, split=split, node_count=graph_tuple.
node_count, control_edge_count=graph_tuple.control_edge_count,
data_edge_count=graph_tuple.data_edge_count, call_edge_count=
graph_tuple.call_edge_count, edge_position_max=graph_tuple.
edge_position_max, node_x_dimensionality=graph_tuple.
node_x_dimensionality, node_y_dimensionality=graph_tuple.
node_y_dimensionality, graph_x_dimensionality=graph_tuple.
graph_x_dimensionality, graph_y_dimensionality=graph_tuple.
graph_y_dimensionality, pickled_graph_tuple_size=len(
pickled_graph_tuple), data=GraphTupleData(sha1=crypto.sha1(
pickled_graph_tuple), pickled_graph_tuple=pickled_graph_tuple))
<mask token>
<mask token>
<mask token>
class GraphTupleData(Base, sqlutil.PluralTablenameFromCamelCapsClassNameMixin):
"""The pickled graph tuple data. See GraphTuple for the parent table."""
id: int = sql.Column(sql.Integer, sql.ForeignKey('graph_tuples.id',
onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
sha1: str = sql.Column(sql.String(40), nullable=False, index=True)
pickled_graph_tuple: bytes = sql.Column(sqlutil.ColumnTypes.LargeBinary
(), nullable=False)
<mask token>
class Database(sqlutil.Database):
"""A database of GraphTuples."""
def __init__(self, url: str, must_exist: bool=False, ctx: progress.
ProgressContext=progress.NullContext):
super(Database, self).__init__(url, Base, must_exist=must_exist)
self.ctx = ctx
self._graph_tuple_stats = None
self._splits = None
self._split_counts = None
@database_statistic
def graph_count(self) ->int:
"""The number of non-empty graphs in the database."""
return int(self.graph_tuple_stats.graph_count)
@database_statistic
def ir_count(self) ->int:
"""The number of distinct intermediate representations that the non-empty
graphs are constructed from.
"""
return int(self.graph_tuple_stats.ir_count or 0)
@database_statistic
def split_count(self) ->int:
"""The number of distinct splits in the database."""
return int(self.graph_tuple_stats.split_count or 0)
@database_statistic
def node_count(self) ->int:
"""The total node count in non-empty graphs."""
return int(self.graph_tuple_stats.node_count or 0)
@database_statistic
def edge_count(self) ->int:
"""The total edge count in non-empty graphs."""
return int(self.graph_tuple_stats.edge_count or 0)
@database_statistic
def control_edge_count(self) ->int:
"""The total control edge count in non-empty graphs."""
return int(self.graph_tuple_stats.control_edge_count or 0)
@database_statistic
def data_edge_count(self) ->int:
"""The total data edge count in non-empty graphs."""
return int(self.graph_tuple_stats.data_edge_count or 0)
@database_statistic
def call_edge_count(self) ->int:
"""The total call edge count in non-empty graphs."""
return int(self.graph_tuple_stats.call_edge_count or 0)
@database_statistic
def node_count_max(self) ->int:
"""The maximum node count in non-empty graphs."""
return int(self.graph_tuple_stats.node_count_max or 0)
@database_statistic
def edge_count_max(self) ->int:
"""The maximum edge count in non-empty graphs."""
return int(self.graph_tuple_stats.edge_count_max or 0)
@database_statistic
def control_edge_count_max(self) ->int:
"""The maximum control edge count in non-empty graphs."""
return int(self.graph_tuple_stats.control_edge_count_max or 0)
@database_statistic
def data_edge_count_max(self) ->int:
"""The maximum data edge count in non-empty graphs."""
return int(self.graph_tuple_stats.data_edge_count_max or 0)
@database_statistic
def call_edge_count_max(self) ->int:
"""The maximum call edge count in non-empty graphs."""
return int(self.graph_tuple_stats.call_edge_count_max or 0)
@database_statistic
def edge_position_max(self) ->int:
"""The maximum edge position in non-empty graphs."""
return int(self.graph_tuple_stats.edge_position_max or 0)
@database_statistic
def node_x_dimensionality(self) ->int:
"""The node x dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.node_x_dimensionality or 0)
@database_statistic
def node_y_dimensionality(self) ->int:
"""The node y dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.node_y_dimensionality or 0)
@database_statistic
def graph_x_dimensionality(self) ->int:
"""The graph x dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.graph_x_dimensionality or 0)
@database_statistic
def graph_y_dimensionality(self) ->int:
"""The graph y dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.graph_y_dimensionality or 0)
@database_statistic
def graph_data_size(self) ->int:
"""The total size of the non-empty graph data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size or 0)
@database_statistic
def graph_data_size_min(self) ->int:
"""The minimum size of the non-empty graph tuple data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size_min or 0)
@database_statistic
def graph_data_size_avg(self) ->float:
"""The average size of the non-empty graph tuple data, in bytes."""
return float(self.graph_tuple_stats.graph_data_size_avg or 0)
@database_statistic
def graph_data_size_max(self) ->int:
"""The maximum size of the non-empty graph tuple data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size_max or 0)
@database_statistic
def has_data_flow(self) ->bool:
"""Return whether the graph database has data flow annotations.
This is only true if *all* columns have data flow values.
"""
return self.graph_count and not self.data_flow_null_count
@database_statistic
def data_flow_null_count(self) ->int:
"""The number of database rows without data flow information.
If > 0, then has_data_flow is False.
"""
return self.graph_count - int(self.graph_tuple_stats.
data_flow_steps_count or 0)
@database_statistic
def data_flow_steps_min(self) ->Optional[int]:
"""The minimum data flow steps for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_steps_min or 0)
@database_statistic
def data_flow_steps_avg(self) ->Optional[float]:
"""The average data flow steps for non-empty graphs."""
if self.has_data_flow:
return float(self.graph_tuple_stats.data_flow_steps_avg)
@database_statistic
def data_flow_steps_max(self) ->Optional[int]:
"""The maximum data flow steps for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_steps_max or 0)
@database_statistic
def data_flow_positive_node_count_min(self) ->Optional[int]:
"""The minimum data flow positive node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_min or 0)
@database_statistic
def data_flow_positive_node_count_avg(self) ->Optional[int]:
"""The minimum data flow average node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_avg or 0)
@database_statistic
def data_flow_positive_node_count_max(self) ->Optional[int]:
"""The minimum data flow max node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_max or 0)
@database_statistic
def splits(self) ->List[int]:
"""Return a list of unique split values."""
if self._splits is None:
self.RefreshStats()
return self._splits
@database_statistic
def split_counts(self) ->Dict[int, int]:
"""Return a dictionary mapping split to the number of graphs."""
if self._split_counts is None:
self.RefreshStats()
return self._split_counts
def RefreshStats(self):
"""Compute the database stats for access via the instance properties.
Raises:
ValueError: If the database contains invalid entries, e.g. inconsistent
vector dimensionalities.
"""
with self.ctx.Profile(2, lambda t:
f"Computed stats over {humanize.BinaryPrefix(stats.graph_data_size, 'B')} database ({humanize.Plural(stats.graph_count, 'graph')})"
), self.Session() as session:
query = session.query(sql.func.count(GraphTuple.id).label(
'graph_count'), sql.func.count(sql.func.distinct(GraphTuple
.ir_id)).label('ir_count'), sql.func.count(sql.func.
distinct(GraphTuple.split)).label('split_count'), sql.func.
sum(GraphTuple.node_count).label('node_count'), sql.func.
sum(GraphTuple.control_edge_count).label(
'control_edge_count'), sql.func.sum(GraphTuple.
data_edge_count).label('data_edge_count'), sql.func.sum(
GraphTuple.call_edge_count).label('call_edge_count'), sql.
func.sum(GraphTuple.control_edge_count + GraphTuple.
data_edge_count + GraphTuple.call_edge_count).label(
'edge_count'), sql.func.max(GraphTuple.node_count).label(
'node_count_max'), sql.func.max(GraphTuple.
control_edge_count).label('control_edge_count_max'), sql.
func.max(GraphTuple.data_edge_count).label(
'data_edge_count_max'), sql.func.max(GraphTuple.
call_edge_count).label('call_edge_count_max'), sql.func.max
(GraphTuple.call_edge_count).label('call_edge_count_max'),
sql.func.max(GraphTuple.control_edge_count + GraphTuple.
data_edge_count + GraphTuple.call_edge_count).label(
'edge_count_max'), sql.func.max(GraphTuple.
edge_position_max).label('edge_position_max'), sql.func.
count(sql.func.distinct(GraphTuple.node_x_dimensionality)).
label('node_x_dimensionality_count'), sql.func.count(sql.
func.distinct(GraphTuple.node_y_dimensionality)).label(
'node_y_dimensionality_count'), sql.func.count(sql.func.
distinct(GraphTuple.graph_x_dimensionality)).label(
'graph_x_dimensionality_count'), sql.func.count(sql.func.
distinct(GraphTuple.graph_y_dimensionality)).label(
'graph_y_dimensionality_count'), sql.func.max(GraphTuple.
node_x_dimensionality).label('node_x_dimensionality'), sql.
func.max(GraphTuple.node_y_dimensionality).label(
'node_y_dimensionality'), sql.func.max(GraphTuple.
graph_x_dimensionality).label('graph_x_dimensionality'),
sql.func.max(GraphTuple.graph_y_dimensionality).label(
'graph_y_dimensionality'), sql.func.sum(GraphTuple.
pickled_graph_tuple_size).label('graph_data_size'), sql.
func.min(GraphTuple.pickled_graph_tuple_size).label(
'graph_data_size_min'), sql.func.avg(GraphTuple.
pickled_graph_tuple_size).label('graph_data_size_avg'), sql
.func.max(GraphTuple.pickled_graph_tuple_size).label(
'graph_data_size_max'), sql.func.count(GraphTuple.
data_flow_steps).label('data_flow_steps_count'), sql.func.
min(GraphTuple.data_flow_steps).label('data_flow_steps_min'
), sql.func.avg(GraphTuple.data_flow_steps).label(
'data_flow_steps_avg'), sql.func.max(GraphTuple.
data_flow_steps).label('data_flow_steps_max'), sql.func.min
(GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_min'), sql.func.avg(
GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_avg'), sql.func.max(
GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_max'))
query = query.filter(GraphTuple.node_count > 1)
stats = query.one()
if stats.node_x_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.node_x_dimensionality_count} distinct node x dimensionalities'
)
if stats.node_y_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.node_y_dimensionality_count} distinct node y dimensionalities'
)
if stats.graph_x_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.graph_x_dimensionality_count} distinct graph x dimensionalities'
)
if stats.graph_y_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.graph_y_dimensionality_count} distinct graph y dimensionalities'
)
if not (stats.data_flow_steps_count == 0 or stats.
data_flow_steps_count == stats.graph_count):
raise ValueError(
f'{stats.graph_count - stats.data_flow_steps_count} of {stats.graph_count} graphs have no data_flow_steps value'
)
self._graph_tuple_stats = stats
with self.Session() as session:
self._splits = sorted(set([row.split for row in session.
query(GraphTuple.split).filter(GraphTuple.split != None
).group_by(GraphTuple.split)]))
self._split_counts = {split: session.query(sql.func.count(
GraphTuple.id)).filter(GraphTuple.split == split).
scalar() for split in self._splits}
@property
def graph_tuple_stats(self):
"""Fetch aggregate graph tuple stats, or compute them if not set."""
if self._graph_tuple_stats is None:
self.RefreshStats()
return self._graph_tuple_stats
@property
def stats_json(self) ->Dict[str, Any]:
"""Fetch the database statics as a JSON dictionary."""
return {name: function(self) for name, function in
database_statistics_registry}
def __repr__(self) ->str:
return (
f"Database of {humanize.DecimalPrefix(self.graph_count, 'graph')} with dimensionalities: node_x={self.node_x_dimensionality}, node_y={self.node_y_dimensionality}, graph_x={self.graph_x_dimensionality}, graph_y={self.graph_y_dimensionality}."
)
<mask token>
| <mask token>
class GraphTuple(Base, sqlutil.PluralTablenameFromCamelCapsClassNameMixin):
<mask token>
id: int = sql.Column(sql.Integer, primary_key=True)
ir_id: int = sql.Column(sql.Integer, nullable=False, index=True)
split: Optional[int] = sql.Column(sql.Integer, nullable=True, index=True)
node_count: int = sql.Column(sql.Integer, nullable=False)
control_edge_count: int = sql.Column(sql.Integer, nullable=False)
data_edge_count: int = sql.Column(sql.Integer, nullable=False)
call_edge_count: int = sql.Column(sql.Integer, nullable=False)
edge_position_max: int = sql.Column(sql.Integer().with_variant(sqlite.
FLOAT(), 'sqlite'), nullable=False)
node_x_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
node_y_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
graph_x_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
graph_y_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
pickled_graph_tuple_size: int = sql.Column(sql.Integer, nullable=False)
data_flow_steps: int = sql.Column(sql.Integer, nullable=True)
data_flow_root_node: int = sql.Column(sql.Integer, nullable=True)
data_flow_positive_node_count: int = sql.Column(sql.Integer, nullable=True)
timestamp: datetime.datetime = sqlutil.ColumnFactory.MillisecondDatetime()
data: 'GraphTupleData' = sql.orm.relationship('GraphTupleData', uselist
=False, cascade='all, delete-orphan')
@property
def has_data_flow(self) ->bool:
"""Returns whether graph tuple has data flow columns."""
return self.data_flow_steps is not None
<mask token>
<mask token>
@decorators.memoized_property
def tuple(self) ->graph_tuple_lib.GraphTuple:
"""Un-pickle the graph tuple and cache the binary results."""
return pickle.loads(self.data.pickled_graph_tuple)
def ToFile(self, path: pathlib.Path) ->None:
"""Dump the pickled graph tuple to file.
This is lossy, as the ir_id column is not dumped.
Args:
path: The path of the graph tuple to write.
"""
with open(path, 'wb') as f:
pickle.dump(self.tuple, f)
@classmethod
def FromFile(cls, path: pathlib.Path, ir_id: int):
"""Construct a mapped database instance from a file generated by ToFile().
Args:
path: The path of the file to read.
ir_id: The IR id of the graph tuple.
Returns:
A GraphTuple instance.
"""
with open(path, 'rb') as f:
graph_tuple = pickle.load(f)
return cls.CreateFromGraphTuple(graph_tuple, ir_id)
@classmethod
def CreateFromGraphTuple(cls, graph_tuple: graph_tuple_lib.GraphTuple,
ir_id: int, split: Optional[int]=None) ->'GraphTuple':
"""Create a mapped database instance from the given graph tuple.
This is the preferred method of populating databases of graph tuples, as
it contains the boilerplate to extract and set the metadata columns, and
handles the join between the two data/metadata tables invisibly.
Args:
graph_tuple: The graph tuple to map.
ir_id: The intermediate representation ID.
split: The split value of this graph.
Returns:
A GraphTuple instance.
"""
pickled_graph_tuple = pickle.dumps(graph_tuple)
return GraphTuple(ir_id=ir_id, split=split, node_count=graph_tuple.
node_count, control_edge_count=graph_tuple.control_edge_count,
data_edge_count=graph_tuple.data_edge_count, call_edge_count=
graph_tuple.call_edge_count, edge_position_max=graph_tuple.
edge_position_max, node_x_dimensionality=graph_tuple.
node_x_dimensionality, node_y_dimensionality=graph_tuple.
node_y_dimensionality, graph_x_dimensionality=graph_tuple.
graph_x_dimensionality, graph_y_dimensionality=graph_tuple.
graph_y_dimensionality, pickled_graph_tuple_size=len(
pickled_graph_tuple), data=GraphTupleData(sha1=crypto.sha1(
pickled_graph_tuple), pickled_graph_tuple=pickled_graph_tuple))
<mask token>
@classmethod
def CreateEmpty(cls, ir_id: int) ->'GraphTuple':
"""Create an "empty" graph tuple.
An empty graph tuple can be used to signal that the conversion to GraphTuple
failed, and is signalled by a node_count of 0. An empty graph tuple has
no corresponding GraphTupleData row.
"""
return GraphTuple(ir_id=ir_id, node_count=0, control_edge_count=0,
data_edge_count=0, call_edge_count=0, edge_position_max=0,
pickled_graph_tuple_size=0)
<mask token>
class GraphTupleData(Base, sqlutil.PluralTablenameFromCamelCapsClassNameMixin):
"""The pickled graph tuple data. See GraphTuple for the parent table."""
id: int = sql.Column(sql.Integer, sql.ForeignKey('graph_tuples.id',
onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
sha1: str = sql.Column(sql.String(40), nullable=False, index=True)
pickled_graph_tuple: bytes = sql.Column(sqlutil.ColumnTypes.LargeBinary
(), nullable=False)
<mask token>
class Database(sqlutil.Database):
"""A database of GraphTuples."""
def __init__(self, url: str, must_exist: bool=False, ctx: progress.
ProgressContext=progress.NullContext):
super(Database, self).__init__(url, Base, must_exist=must_exist)
self.ctx = ctx
self._graph_tuple_stats = None
self._splits = None
self._split_counts = None
@database_statistic
def graph_count(self) ->int:
"""The number of non-empty graphs in the database."""
return int(self.graph_tuple_stats.graph_count)
@database_statistic
def ir_count(self) ->int:
"""The number of distinct intermediate representations that the non-empty
graphs are constructed from.
"""
return int(self.graph_tuple_stats.ir_count or 0)
@database_statistic
def split_count(self) ->int:
"""The number of distinct splits in the database."""
return int(self.graph_tuple_stats.split_count or 0)
@database_statistic
def node_count(self) ->int:
"""The total node count in non-empty graphs."""
return int(self.graph_tuple_stats.node_count or 0)
@database_statistic
def edge_count(self) ->int:
"""The total edge count in non-empty graphs."""
return int(self.graph_tuple_stats.edge_count or 0)
@database_statistic
def control_edge_count(self) ->int:
"""The total control edge count in non-empty graphs."""
return int(self.graph_tuple_stats.control_edge_count or 0)
@database_statistic
def data_edge_count(self) ->int:
"""The total data edge count in non-empty graphs."""
return int(self.graph_tuple_stats.data_edge_count or 0)
@database_statistic
def call_edge_count(self) ->int:
"""The total call edge count in non-empty graphs."""
return int(self.graph_tuple_stats.call_edge_count or 0)
@database_statistic
def node_count_max(self) ->int:
"""The maximum node count in non-empty graphs."""
return int(self.graph_tuple_stats.node_count_max or 0)
@database_statistic
def edge_count_max(self) ->int:
"""The maximum edge count in non-empty graphs."""
return int(self.graph_tuple_stats.edge_count_max or 0)
@database_statistic
def control_edge_count_max(self) ->int:
"""The maximum control edge count in non-empty graphs."""
return int(self.graph_tuple_stats.control_edge_count_max or 0)
@database_statistic
def data_edge_count_max(self) ->int:
"""The maximum data edge count in non-empty graphs."""
return int(self.graph_tuple_stats.data_edge_count_max or 0)
@database_statistic
def call_edge_count_max(self) ->int:
"""The maximum call edge count in non-empty graphs."""
return int(self.graph_tuple_stats.call_edge_count_max or 0)
@database_statistic
def edge_position_max(self) ->int:
"""The maximum edge position in non-empty graphs."""
return int(self.graph_tuple_stats.edge_position_max or 0)
@database_statistic
def node_x_dimensionality(self) ->int:
"""The node x dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.node_x_dimensionality or 0)
@database_statistic
def node_y_dimensionality(self) ->int:
"""The node y dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.node_y_dimensionality or 0)
@database_statistic
def graph_x_dimensionality(self) ->int:
"""The graph x dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.graph_x_dimensionality or 0)
@database_statistic
def graph_y_dimensionality(self) ->int:
"""The graph y dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.graph_y_dimensionality or 0)
@database_statistic
def graph_data_size(self) ->int:
"""The total size of the non-empty graph data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size or 0)
@database_statistic
def graph_data_size_min(self) ->int:
"""The minimum size of the non-empty graph tuple data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size_min or 0)
@database_statistic
def graph_data_size_avg(self) ->float:
"""The average size of the non-empty graph tuple data, in bytes."""
return float(self.graph_tuple_stats.graph_data_size_avg or 0)
@database_statistic
def graph_data_size_max(self) ->int:
"""The maximum size of the non-empty graph tuple data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size_max or 0)
@database_statistic
def has_data_flow(self) ->bool:
"""Return whether the graph database has data flow annotations.
This is only true if *all* columns have data flow values.
"""
return self.graph_count and not self.data_flow_null_count
@database_statistic
def data_flow_null_count(self) ->int:
"""The number of database rows without data flow information.
If > 0, then has_data_flow is False.
"""
return self.graph_count - int(self.graph_tuple_stats.
data_flow_steps_count or 0)
@database_statistic
def data_flow_steps_min(self) ->Optional[int]:
"""The minimum data flow steps for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_steps_min or 0)
@database_statistic
def data_flow_steps_avg(self) ->Optional[float]:
"""The average data flow steps for non-empty graphs."""
if self.has_data_flow:
return float(self.graph_tuple_stats.data_flow_steps_avg)
@database_statistic
def data_flow_steps_max(self) ->Optional[int]:
"""The maximum data flow steps for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_steps_max or 0)
@database_statistic
def data_flow_positive_node_count_min(self) ->Optional[int]:
"""The minimum data flow positive node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_min or 0)
@database_statistic
def data_flow_positive_node_count_avg(self) ->Optional[int]:
"""The minimum data flow average node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_avg or 0)
@database_statistic
def data_flow_positive_node_count_max(self) ->Optional[int]:
"""The minimum data flow max node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_max or 0)
@database_statistic
def splits(self) ->List[int]:
"""Return a list of unique split values."""
if self._splits is None:
self.RefreshStats()
return self._splits
@database_statistic
def split_counts(self) ->Dict[int, int]:
"""Return a dictionary mapping split to the number of graphs."""
if self._split_counts is None:
self.RefreshStats()
return self._split_counts
def RefreshStats(self):
"""Compute the database stats for access via the instance properties.
Raises:
ValueError: If the database contains invalid entries, e.g. inconsistent
vector dimensionalities.
"""
with self.ctx.Profile(2, lambda t:
f"Computed stats over {humanize.BinaryPrefix(stats.graph_data_size, 'B')} database ({humanize.Plural(stats.graph_count, 'graph')})"
), self.Session() as session:
query = session.query(sql.func.count(GraphTuple.id).label(
'graph_count'), sql.func.count(sql.func.distinct(GraphTuple
.ir_id)).label('ir_count'), sql.func.count(sql.func.
distinct(GraphTuple.split)).label('split_count'), sql.func.
sum(GraphTuple.node_count).label('node_count'), sql.func.
sum(GraphTuple.control_edge_count).label(
'control_edge_count'), sql.func.sum(GraphTuple.
data_edge_count).label('data_edge_count'), sql.func.sum(
GraphTuple.call_edge_count).label('call_edge_count'), sql.
func.sum(GraphTuple.control_edge_count + GraphTuple.
data_edge_count + GraphTuple.call_edge_count).label(
'edge_count'), sql.func.max(GraphTuple.node_count).label(
'node_count_max'), sql.func.max(GraphTuple.
control_edge_count).label('control_edge_count_max'), sql.
func.max(GraphTuple.data_edge_count).label(
'data_edge_count_max'), sql.func.max(GraphTuple.
call_edge_count).label('call_edge_count_max'), sql.func.max
(GraphTuple.call_edge_count).label('call_edge_count_max'),
sql.func.max(GraphTuple.control_edge_count + GraphTuple.
data_edge_count + GraphTuple.call_edge_count).label(
'edge_count_max'), sql.func.max(GraphTuple.
edge_position_max).label('edge_position_max'), sql.func.
count(sql.func.distinct(GraphTuple.node_x_dimensionality)).
label('node_x_dimensionality_count'), sql.func.count(sql.
func.distinct(GraphTuple.node_y_dimensionality)).label(
'node_y_dimensionality_count'), sql.func.count(sql.func.
distinct(GraphTuple.graph_x_dimensionality)).label(
'graph_x_dimensionality_count'), sql.func.count(sql.func.
distinct(GraphTuple.graph_y_dimensionality)).label(
'graph_y_dimensionality_count'), sql.func.max(GraphTuple.
node_x_dimensionality).label('node_x_dimensionality'), sql.
func.max(GraphTuple.node_y_dimensionality).label(
'node_y_dimensionality'), sql.func.max(GraphTuple.
graph_x_dimensionality).label('graph_x_dimensionality'),
sql.func.max(GraphTuple.graph_y_dimensionality).label(
'graph_y_dimensionality'), sql.func.sum(GraphTuple.
pickled_graph_tuple_size).label('graph_data_size'), sql.
func.min(GraphTuple.pickled_graph_tuple_size).label(
'graph_data_size_min'), sql.func.avg(GraphTuple.
pickled_graph_tuple_size).label('graph_data_size_avg'), sql
.func.max(GraphTuple.pickled_graph_tuple_size).label(
'graph_data_size_max'), sql.func.count(GraphTuple.
data_flow_steps).label('data_flow_steps_count'), sql.func.
min(GraphTuple.data_flow_steps).label('data_flow_steps_min'
), sql.func.avg(GraphTuple.data_flow_steps).label(
'data_flow_steps_avg'), sql.func.max(GraphTuple.
data_flow_steps).label('data_flow_steps_max'), sql.func.min
(GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_min'), sql.func.avg(
GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_avg'), sql.func.max(
GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_max'))
query = query.filter(GraphTuple.node_count > 1)
stats = query.one()
if stats.node_x_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.node_x_dimensionality_count} distinct node x dimensionalities'
)
if stats.node_y_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.node_y_dimensionality_count} distinct node y dimensionalities'
)
if stats.graph_x_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.graph_x_dimensionality_count} distinct graph x dimensionalities'
)
if stats.graph_y_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.graph_y_dimensionality_count} distinct graph y dimensionalities'
)
if not (stats.data_flow_steps_count == 0 or stats.
data_flow_steps_count == stats.graph_count):
raise ValueError(
f'{stats.graph_count - stats.data_flow_steps_count} of {stats.graph_count} graphs have no data_flow_steps value'
)
self._graph_tuple_stats = stats
with self.Session() as session:
self._splits = sorted(set([row.split for row in session.
query(GraphTuple.split).filter(GraphTuple.split != None
).group_by(GraphTuple.split)]))
self._split_counts = {split: session.query(sql.func.count(
GraphTuple.id)).filter(GraphTuple.split == split).
scalar() for split in self._splits}
@property
def graph_tuple_stats(self):
"""Fetch aggregate graph tuple stats, or compute them if not set."""
if self._graph_tuple_stats is None:
self.RefreshStats()
return self._graph_tuple_stats
@property
def stats_json(self) ->Dict[str, Any]:
"""Fetch the database statics as a JSON dictionary."""
return {name: function(self) for name, function in
database_statistics_registry}
def __repr__(self) ->str:
return (
f"Database of {humanize.DecimalPrefix(self.graph_count, 'graph')} with dimensionalities: node_x={self.node_x_dimensionality}, node_y={self.node_y_dimensionality}, graph_x={self.graph_x_dimensionality}, graph_y={self.graph_y_dimensionality}."
)
<mask token>
| <mask token>
class Meta(Base, sqlutil.TablenameFromClassNameMixin):
<mask token>
id: int = sql.Column(sql.Integer, primary_key=True)
run_id: str = run_id.RunId.SqlStringColumn()
timestamp: datetime.datetime = sqlutil.ColumnFactory.MillisecondDatetime()
key: str = sql.Column(sql.String(128), index=True)
pickled_value: bytes = sql.Column(sqlutil.ColumnTypes.LargeBinary(),
nullable=False)
<mask token>
<mask token>
class GraphTuple(Base, sqlutil.PluralTablenameFromCamelCapsClassNameMixin):
"""A table of graph tuples.
For every GraphTuple, there should be a corresponding GraphTupleData row
containing the pickled graph tuple as a binary blob. The reason for dividing
the data horizontally across two tables is to enable fast scanning
of graph metadata, without needing to churn through a table of pickled binary
blobs.
"""
id: int = sql.Column(sql.Integer, primary_key=True)
ir_id: int = sql.Column(sql.Integer, nullable=False, index=True)
split: Optional[int] = sql.Column(sql.Integer, nullable=True, index=True)
node_count: int = sql.Column(sql.Integer, nullable=False)
control_edge_count: int = sql.Column(sql.Integer, nullable=False)
data_edge_count: int = sql.Column(sql.Integer, nullable=False)
call_edge_count: int = sql.Column(sql.Integer, nullable=False)
edge_position_max: int = sql.Column(sql.Integer().with_variant(sqlite.
FLOAT(), 'sqlite'), nullable=False)
node_x_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
node_y_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
graph_x_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
graph_y_dimensionality: int = sql.Column(sql.Integer, default=0,
nullable=False)
pickled_graph_tuple_size: int = sql.Column(sql.Integer, nullable=False)
data_flow_steps: int = sql.Column(sql.Integer, nullable=True)
data_flow_root_node: int = sql.Column(sql.Integer, nullable=True)
data_flow_positive_node_count: int = sql.Column(sql.Integer, nullable=True)
timestamp: datetime.datetime = sqlutil.ColumnFactory.MillisecondDatetime()
data: 'GraphTupleData' = sql.orm.relationship('GraphTupleData', uselist
=False, cascade='all, delete-orphan')
@property
def has_data_flow(self) ->bool:
"""Returns whether graph tuple has data flow columns."""
return self.data_flow_steps is not None
@property
def edge_count(self) ->int:
return (self.control_edge_count + self.data_edge_count + self.
call_edge_count)
@property
def sha1(self) ->str:
"""Return the sha1 of the graph tuple."""
return self.data.sha1
@decorators.memoized_property
def tuple(self) ->graph_tuple_lib.GraphTuple:
"""Un-pickle the graph tuple and cache the binary results."""
return pickle.loads(self.data.pickled_graph_tuple)
def ToFile(self, path: pathlib.Path) ->None:
"""Dump the pickled graph tuple to file.
This is lossy, as the ir_id column is not dumped.
Args:
path: The path of the graph tuple to write.
"""
with open(path, 'wb') as f:
pickle.dump(self.tuple, f)
@classmethod
def FromFile(cls, path: pathlib.Path, ir_id: int):
"""Construct a mapped database instance from a file generated by ToFile().
Args:
path: The path of the file to read.
ir_id: The IR id of the graph tuple.
Returns:
A GraphTuple instance.
"""
with open(path, 'rb') as f:
graph_tuple = pickle.load(f)
return cls.CreateFromGraphTuple(graph_tuple, ir_id)
@classmethod
def CreateFromGraphTuple(cls, graph_tuple: graph_tuple_lib.GraphTuple,
ir_id: int, split: Optional[int]=None) ->'GraphTuple':
"""Create a mapped database instance from the given graph tuple.
This is the preferred method of populating databases of graph tuples, as
it contains the boilerplate to extract and set the metadata columns, and
handles the join between the two data/metadata tables invisibly.
Args:
graph_tuple: The graph tuple to map.
ir_id: The intermediate representation ID.
split: The split value of this graph.
Returns:
A GraphTuple instance.
"""
pickled_graph_tuple = pickle.dumps(graph_tuple)
return GraphTuple(ir_id=ir_id, split=split, node_count=graph_tuple.
node_count, control_edge_count=graph_tuple.control_edge_count,
data_edge_count=graph_tuple.data_edge_count, call_edge_count=
graph_tuple.call_edge_count, edge_position_max=graph_tuple.
edge_position_max, node_x_dimensionality=graph_tuple.
node_x_dimensionality, node_y_dimensionality=graph_tuple.
node_y_dimensionality, graph_x_dimensionality=graph_tuple.
graph_x_dimensionality, graph_y_dimensionality=graph_tuple.
graph_y_dimensionality, pickled_graph_tuple_size=len(
pickled_graph_tuple), data=GraphTupleData(sha1=crypto.sha1(
pickled_graph_tuple), pickled_graph_tuple=pickled_graph_tuple))
@classmethod
def CreateFromNetworkX(cls, g: nx.MultiDiGraph, ir_id: int, split:
Optional[int]=None) ->'GraphTuple':
"""Create a mapped database instance from the given networkx graph.
This is the preferred method of populating databases of graph tuples, as
it contains the boilerplate to extract and set the metadata columns, and
handles the join between the two data/metadata tables invisibly.
Args:
g: The networkx graph.
ir_id: The intermediate representation ID.
split: The split value of this graph.
Returns:
A GraphTuple instance.
"""
graph_tuple = graph_tuple_lib.GraphTuple.CreateFromNetworkX(g)
mapped = cls.CreateFromGraphTuple(graph_tuple, ir_id=ir_id, split=split
)
mapped.data_flow_steps = g.graph.get('data_flow_steps')
mapped.data_flow_root_node = g.graph.get('data_flow_root_node')
mapped.data_flow_positive_node_count = g.graph.get(
'data_flow_positive_node_count')
return mapped
@classmethod
def CreateEmpty(cls, ir_id: int) ->'GraphTuple':
"""Create an "empty" graph tuple.
An empty graph tuple can be used to signal that the conversion to GraphTuple
failed, and is signalled by a node_count of 0. An empty graph tuple has
no corresponding GraphTupleData row.
"""
return GraphTuple(ir_id=ir_id, node_count=0, control_edge_count=0,
data_edge_count=0, call_edge_count=0, edge_position_max=0,
pickled_graph_tuple_size=0)
@classmethod
def CreateFromProgramGraph(cls, program_graph: programl_pb2.
ProgramGraph, ir_id: int, split: Optional[int]=None) ->'GraphTuple':
"""Create a mapped database instance from the given annotated graph.
This is the preferred method of populating databases of graph tuples, as
it contains the boilerplate to extract and set the metadata columns, and
handles the join between the two data/metadata tables invisibly.
Args:
annotated_graph: A DataFlowAnnotatedGraph instance.
ir_id: The intermediate representation ID.
split: The split value of this graph.
Returns:
A GraphTuple instance.
"""
graph_tuple = graph_tuple_lib.GraphTuple.CreateFromProgramGraph(
program_graph)
mapped = cls.CreateFromGraphTuple(graph_tuple, ir_id, split)
mapped.data_flow_steps = program_graph.data_flow_steps
mapped.data_flow_root_node = program_graph.data_flow_root_node
mapped.data_flow_positive_node_count = (program_graph.
data_flow_positive_node_count)
return mapped
class GraphTupleData(Base, sqlutil.PluralTablenameFromCamelCapsClassNameMixin):
"""The pickled graph tuple data. See GraphTuple for the parent table."""
id: int = sql.Column(sql.Integer, sql.ForeignKey('graph_tuples.id',
onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
sha1: str = sql.Column(sql.String(40), nullable=False, index=True)
pickled_graph_tuple: bytes = sql.Column(sqlutil.ColumnTypes.LargeBinary
(), nullable=False)
<mask token>
class Database(sqlutil.Database):
"""A database of GraphTuples."""
def __init__(self, url: str, must_exist: bool=False, ctx: progress.
ProgressContext=progress.NullContext):
super(Database, self).__init__(url, Base, must_exist=must_exist)
self.ctx = ctx
self._graph_tuple_stats = None
self._splits = None
self._split_counts = None
@database_statistic
def graph_count(self) ->int:
"""The number of non-empty graphs in the database."""
return int(self.graph_tuple_stats.graph_count)
@database_statistic
def ir_count(self) ->int:
"""The number of distinct intermediate representations that the non-empty
graphs are constructed from.
"""
return int(self.graph_tuple_stats.ir_count or 0)
@database_statistic
def split_count(self) ->int:
"""The number of distinct splits in the database."""
return int(self.graph_tuple_stats.split_count or 0)
@database_statistic
def node_count(self) ->int:
"""The total node count in non-empty graphs."""
return int(self.graph_tuple_stats.node_count or 0)
@database_statistic
def edge_count(self) ->int:
"""The total edge count in non-empty graphs."""
return int(self.graph_tuple_stats.edge_count or 0)
@database_statistic
def control_edge_count(self) ->int:
"""The total control edge count in non-empty graphs."""
return int(self.graph_tuple_stats.control_edge_count or 0)
@database_statistic
def data_edge_count(self) ->int:
"""The total data edge count in non-empty graphs."""
return int(self.graph_tuple_stats.data_edge_count or 0)
@database_statistic
def call_edge_count(self) ->int:
"""The total call edge count in non-empty graphs."""
return int(self.graph_tuple_stats.call_edge_count or 0)
@database_statistic
def node_count_max(self) ->int:
"""The maximum node count in non-empty graphs."""
return int(self.graph_tuple_stats.node_count_max or 0)
@database_statistic
def edge_count_max(self) ->int:
"""The maximum edge count in non-empty graphs."""
return int(self.graph_tuple_stats.edge_count_max or 0)
@database_statistic
def control_edge_count_max(self) ->int:
"""The maximum control edge count in non-empty graphs."""
return int(self.graph_tuple_stats.control_edge_count_max or 0)
@database_statistic
def data_edge_count_max(self) ->int:
"""The maximum data edge count in non-empty graphs."""
return int(self.graph_tuple_stats.data_edge_count_max or 0)
@database_statistic
def call_edge_count_max(self) ->int:
"""The maximum call edge count in non-empty graphs."""
return int(self.graph_tuple_stats.call_edge_count_max or 0)
@database_statistic
def edge_position_max(self) ->int:
"""The maximum edge position in non-empty graphs."""
return int(self.graph_tuple_stats.edge_position_max or 0)
@database_statistic
def node_x_dimensionality(self) ->int:
"""The node x dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.node_x_dimensionality or 0)
@database_statistic
def node_y_dimensionality(self) ->int:
"""The node y dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.node_y_dimensionality or 0)
@database_statistic
def graph_x_dimensionality(self) ->int:
"""The graph x dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.graph_x_dimensionality or 0)
@database_statistic
def graph_y_dimensionality(self) ->int:
"""The graph y dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.graph_y_dimensionality or 0)
@database_statistic
def graph_data_size(self) ->int:
"""The total size of the non-empty graph data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size or 0)
@database_statistic
def graph_data_size_min(self) ->int:
"""The minimum size of the non-empty graph tuple data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size_min or 0)
@database_statistic
def graph_data_size_avg(self) ->float:
"""The average size of the non-empty graph tuple data, in bytes."""
return float(self.graph_tuple_stats.graph_data_size_avg or 0)
@database_statistic
def graph_data_size_max(self) ->int:
"""The maximum size of the non-empty graph tuple data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size_max or 0)
@database_statistic
def has_data_flow(self) ->bool:
"""Return whether the graph database has data flow annotations.
This is only true if *all* columns have data flow values.
"""
return self.graph_count and not self.data_flow_null_count
@database_statistic
def data_flow_null_count(self) ->int:
"""The number of database rows without data flow information.
If > 0, then has_data_flow is False.
"""
return self.graph_count - int(self.graph_tuple_stats.
data_flow_steps_count or 0)
@database_statistic
def data_flow_steps_min(self) ->Optional[int]:
"""The minimum data flow steps for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_steps_min or 0)
@database_statistic
def data_flow_steps_avg(self) ->Optional[float]:
"""The average data flow steps for non-empty graphs."""
if self.has_data_flow:
return float(self.graph_tuple_stats.data_flow_steps_avg)
@database_statistic
def data_flow_steps_max(self) ->Optional[int]:
"""The maximum data flow steps for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_steps_max or 0)
@database_statistic
def data_flow_positive_node_count_min(self) ->Optional[int]:
"""The minimum data flow positive node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_min or 0)
@database_statistic
def data_flow_positive_node_count_avg(self) ->Optional[int]:
"""The minimum data flow average node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_avg or 0)
@database_statistic
def data_flow_positive_node_count_max(self) ->Optional[int]:
"""The minimum data flow max node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.
data_flow_positive_node_count_max or 0)
@database_statistic
def splits(self) ->List[int]:
"""Return a list of unique split values."""
if self._splits is None:
self.RefreshStats()
return self._splits
@database_statistic
def split_counts(self) ->Dict[int, int]:
"""Return a dictionary mapping split to the number of graphs."""
if self._split_counts is None:
self.RefreshStats()
return self._split_counts
def RefreshStats(self):
"""Compute the database stats for access via the instance properties.
Raises:
ValueError: If the database contains invalid entries, e.g. inconsistent
vector dimensionalities.
"""
with self.ctx.Profile(2, lambda t:
f"Computed stats over {humanize.BinaryPrefix(stats.graph_data_size, 'B')} database ({humanize.Plural(stats.graph_count, 'graph')})"
), self.Session() as session:
query = session.query(sql.func.count(GraphTuple.id).label(
'graph_count'), sql.func.count(sql.func.distinct(GraphTuple
.ir_id)).label('ir_count'), sql.func.count(sql.func.
distinct(GraphTuple.split)).label('split_count'), sql.func.
sum(GraphTuple.node_count).label('node_count'), sql.func.
sum(GraphTuple.control_edge_count).label(
'control_edge_count'), sql.func.sum(GraphTuple.
data_edge_count).label('data_edge_count'), sql.func.sum(
GraphTuple.call_edge_count).label('call_edge_count'), sql.
func.sum(GraphTuple.control_edge_count + GraphTuple.
data_edge_count + GraphTuple.call_edge_count).label(
'edge_count'), sql.func.max(GraphTuple.node_count).label(
'node_count_max'), sql.func.max(GraphTuple.
control_edge_count).label('control_edge_count_max'), sql.
func.max(GraphTuple.data_edge_count).label(
'data_edge_count_max'), sql.func.max(GraphTuple.
call_edge_count).label('call_edge_count_max'), sql.func.max
(GraphTuple.call_edge_count).label('call_edge_count_max'),
sql.func.max(GraphTuple.control_edge_count + GraphTuple.
data_edge_count + GraphTuple.call_edge_count).label(
'edge_count_max'), sql.func.max(GraphTuple.
edge_position_max).label('edge_position_max'), sql.func.
count(sql.func.distinct(GraphTuple.node_x_dimensionality)).
label('node_x_dimensionality_count'), sql.func.count(sql.
func.distinct(GraphTuple.node_y_dimensionality)).label(
'node_y_dimensionality_count'), sql.func.count(sql.func.
distinct(GraphTuple.graph_x_dimensionality)).label(
'graph_x_dimensionality_count'), sql.func.count(sql.func.
distinct(GraphTuple.graph_y_dimensionality)).label(
'graph_y_dimensionality_count'), sql.func.max(GraphTuple.
node_x_dimensionality).label('node_x_dimensionality'), sql.
func.max(GraphTuple.node_y_dimensionality).label(
'node_y_dimensionality'), sql.func.max(GraphTuple.
graph_x_dimensionality).label('graph_x_dimensionality'),
sql.func.max(GraphTuple.graph_y_dimensionality).label(
'graph_y_dimensionality'), sql.func.sum(GraphTuple.
pickled_graph_tuple_size).label('graph_data_size'), sql.
func.min(GraphTuple.pickled_graph_tuple_size).label(
'graph_data_size_min'), sql.func.avg(GraphTuple.
pickled_graph_tuple_size).label('graph_data_size_avg'), sql
.func.max(GraphTuple.pickled_graph_tuple_size).label(
'graph_data_size_max'), sql.func.count(GraphTuple.
data_flow_steps).label('data_flow_steps_count'), sql.func.
min(GraphTuple.data_flow_steps).label('data_flow_steps_min'
), sql.func.avg(GraphTuple.data_flow_steps).label(
'data_flow_steps_avg'), sql.func.max(GraphTuple.
data_flow_steps).label('data_flow_steps_max'), sql.func.min
(GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_min'), sql.func.avg(
GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_avg'), sql.func.max(
GraphTuple.data_flow_positive_node_count).label(
'data_flow_positive_node_count_max'))
query = query.filter(GraphTuple.node_count > 1)
stats = query.one()
if stats.node_x_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.node_x_dimensionality_count} distinct node x dimensionalities'
)
if stats.node_y_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.node_y_dimensionality_count} distinct node y dimensionalities'
)
if stats.graph_x_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.graph_x_dimensionality_count} distinct graph x dimensionalities'
)
if stats.graph_y_dimensionality_count > 1:
raise ValueError(
f'Database contains {stats.graph_y_dimensionality_count} distinct graph y dimensionalities'
)
if not (stats.data_flow_steps_count == 0 or stats.
data_flow_steps_count == stats.graph_count):
raise ValueError(
f'{stats.graph_count - stats.data_flow_steps_count} of {stats.graph_count} graphs have no data_flow_steps value'
)
self._graph_tuple_stats = stats
with self.Session() as session:
self._splits = sorted(set([row.split for row in session.
query(GraphTuple.split).filter(GraphTuple.split != None
).group_by(GraphTuple.split)]))
self._split_counts = {split: session.query(sql.func.count(
GraphTuple.id)).filter(GraphTuple.split == split).
scalar() for split in self._splits}
@property
def graph_tuple_stats(self):
"""Fetch aggregate graph tuple stats, or compute them if not set."""
if self._graph_tuple_stats is None:
self.RefreshStats()
return self._graph_tuple_stats
@property
def stats_json(self) ->Dict[str, Any]:
"""Fetch the database statics as a JSON dictionary."""
return {name: function(self) for name, function in
database_statistics_registry}
def __repr__(self) ->str:
return (
f"Database of {humanize.DecimalPrefix(self.graph_count, 'graph')} with dimensionalities: node_x={self.node_x_dimensionality}, node_y={self.node_y_dimensionality}, graph_x={self.graph_x_dimensionality}, graph_y={self.graph_y_dimensionality}."
)
<mask token>
| # Copyright 2019-2020 the ProGraML authors.
#
# Contact Chris Cummins <[email protected]>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module defines a database for storing graph tuples."""
import datetime
import pathlib
import pickle
from typing import Any
from typing import Callable
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
import networkx as nx
import sqlalchemy as sql
from sqlalchemy.dialects import sqlite
from deeplearning.ml4pl import run_id
from deeplearning.ml4pl.graphs import programl_pb2
from deeplearning.ml4pl.graphs.labelled import graph_tuple as graph_tuple_lib
from labm8.py import app
from labm8.py import crypto
from labm8.py import decorators
from labm8.py import humanize
from labm8.py import jsonutil
from labm8.py import progress
from labm8.py import sqlutil
FLAGS = app.FLAGS
# Note we declare a graph_db flag at the bottom of this file, after declaring
# the Database class.
Base = sql.ext.declarative.declarative_base()
class Meta(Base, sqlutil.TablenameFromClassNameMixin):
"""A key-value database metadata store, with additional run ID."""
# Unused integer ID for this row.
id: int = sql.Column(sql.Integer, primary_key=True)
# The run ID that generated this <key,value> pair.
run_id: str = run_id.RunId.SqlStringColumn()
timestamp: datetime.datetime = sqlutil.ColumnFactory.MillisecondDatetime()
# The <key,value> pair.
key: str = sql.Column(sql.String(128), index=True)
pickled_value: bytes = sql.Column(
sqlutil.ColumnTypes.LargeBinary(), nullable=False
)
@property
def value(self) -> Any:
"""De-pickle the column value."""
return pickle.loads(self.pickled_value)
@classmethod
def Create(cls, key: str, value: Any):
"""Construct a table entry."""
return Meta(key=key, pickled_value=pickle.dumps(value))
class GraphTuple(Base, sqlutil.PluralTablenameFromCamelCapsClassNameMixin):
"""A table of graph tuples.
For every GraphTuple, there should be a corresponding GraphTupleData row
containing the pickled graph tuple as a binary blob. The reason for dividing
the data horizontally across two tables is to enable fast scanning
of graph metadata, without needing to churn through a table of pickled binary
blobs.
"""
id: int = sql.Column(sql.Integer, primary_key=True)
# A reference to the 'id' column of a
# deeplearning.ml4pl.ir.ir_database.IntermediateRepresentationFile database
# row. There is no foreign key relationship here because they are separate
# databases.
ir_id: int = sql.Column(sql.Integer, nullable=False, index=True)
# An integer used to split databases of graphs into separate graphs, e.g.
# train/val/test split.
split: Optional[int] = sql.Column(sql.Integer, nullable=True, index=True)
# The size of the program graph.
node_count: int = sql.Column(sql.Integer, nullable=False)
control_edge_count: int = sql.Column(sql.Integer, nullable=False)
data_edge_count: int = sql.Column(sql.Integer, nullable=False)
call_edge_count: int = sql.Column(sql.Integer, nullable=False)
# The maximum value of the 'position' attribute of edges.
# Although this is an integral value, we store it as a float when using sqlite
# backend because for an unknown reason, sql.func.max(edge_position_max)
# returns a byte array when aggregating over sqlite backend.
edge_position_max: int = sql.Column(
sql.Integer().with_variant(sqlite.FLOAT(), "sqlite"), nullable=False
)
# The dimensionality of node-level features and labels.
node_x_dimensionality: int = sql.Column(
sql.Integer, default=0, nullable=False
)
node_y_dimensionality: int = sql.Column(
sql.Integer, default=0, nullable=False
)
# The dimensionality of graph-level features and labels.
graph_x_dimensionality: int = sql.Column(
sql.Integer, default=0, nullable=False
)
graph_y_dimensionality: int = sql.Column(
sql.Integer, default=0, nullable=False
)
# The size of the pickled graph tuple in bytes.
pickled_graph_tuple_size: int = sql.Column(sql.Integer, nullable=False)
# A copy of attributes from the
# deeplearning.ml4pl.graphs.labelled.data_flow_graphs.DataFlowAnnotatedGraph
# tuple for storing metadata of data flow analysis graphs. If not relevant ,
# these columns may be null.
data_flow_steps: int = sql.Column(sql.Integer, nullable=True)
data_flow_root_node: int = sql.Column(sql.Integer, nullable=True)
data_flow_positive_node_count: int = sql.Column(sql.Integer, nullable=True)
timestamp: datetime.datetime = sqlutil.ColumnFactory.MillisecondDatetime()
# Create the one-to-one relationship from GraphTuple to GraphTupleData.
data: "GraphTupleData" = sql.orm.relationship(
"GraphTupleData", uselist=False, cascade="all, delete-orphan"
)
@property
def has_data_flow(self) -> bool:
"""Returns whether graph tuple has data flow columns."""
return self.data_flow_steps is not None
@property
def edge_count(self) -> int:
return self.control_edge_count + self.data_edge_count + self.call_edge_count
# Joined table accessors:
@property
def sha1(self) -> str:
"""Return the sha1 of the graph tuple."""
return self.data.sha1
@decorators.memoized_property
def tuple(self) -> graph_tuple_lib.GraphTuple:
"""Un-pickle the graph tuple and cache the binary results."""
return pickle.loads(self.data.pickled_graph_tuple)
def ToFile(self, path: pathlib.Path) -> None:
"""Dump the pickled graph tuple to file.
This is lossy, as the ir_id column is not dumped.
Args:
path: The path of the graph tuple to write.
"""
with open(path, "wb") as f:
pickle.dump(self.tuple, f)
# Factory methods:
@classmethod
def FromFile(cls, path: pathlib.Path, ir_id: int):
"""Construct a mapped database instance from a file generated by ToFile().
Args:
path: The path of the file to read.
ir_id: The IR id of the graph tuple.
Returns:
A GraphTuple instance.
"""
with open(path, "rb") as f:
graph_tuple = pickle.load(f)
return cls.CreateFromGraphTuple(graph_tuple, ir_id)
@classmethod
def CreateFromGraphTuple(
cls,
graph_tuple: graph_tuple_lib.GraphTuple,
ir_id: int,
split: Optional[int] = None,
) -> "GraphTuple":
"""Create a mapped database instance from the given graph tuple.
This is the preferred method of populating databases of graph tuples, as
it contains the boilerplate to extract and set the metadata columns, and
handles the join between the two data/metadata tables invisibly.
Args:
graph_tuple: The graph tuple to map.
ir_id: The intermediate representation ID.
split: The split value of this graph.
Returns:
A GraphTuple instance.
"""
pickled_graph_tuple = pickle.dumps(graph_tuple)
return GraphTuple(
ir_id=ir_id,
split=split,
node_count=graph_tuple.node_count,
control_edge_count=graph_tuple.control_edge_count,
data_edge_count=graph_tuple.data_edge_count,
call_edge_count=graph_tuple.call_edge_count,
edge_position_max=graph_tuple.edge_position_max,
node_x_dimensionality=graph_tuple.node_x_dimensionality,
node_y_dimensionality=graph_tuple.node_y_dimensionality,
graph_x_dimensionality=graph_tuple.graph_x_dimensionality,
graph_y_dimensionality=graph_tuple.graph_y_dimensionality,
pickled_graph_tuple_size=len(pickled_graph_tuple),
data=GraphTupleData(
sha1=crypto.sha1(pickled_graph_tuple),
pickled_graph_tuple=pickled_graph_tuple,
),
)
@classmethod
def CreateFromNetworkX(
cls, g: nx.MultiDiGraph, ir_id: int, split: Optional[int] = None,
) -> "GraphTuple":
"""Create a mapped database instance from the given networkx graph.
This is the preferred method of populating databases of graph tuples, as
it contains the boilerplate to extract and set the metadata columns, and
handles the join between the two data/metadata tables invisibly.
Args:
g: The networkx graph.
ir_id: The intermediate representation ID.
split: The split value of this graph.
Returns:
A GraphTuple instance.
"""
graph_tuple = graph_tuple_lib.GraphTuple.CreateFromNetworkX(g)
mapped = cls.CreateFromGraphTuple(graph_tuple, ir_id=ir_id, split=split)
mapped.data_flow_steps = g.graph.get("data_flow_steps")
mapped.data_flow_root_node = g.graph.get("data_flow_root_node")
mapped.data_flow_positive_node_count = g.graph.get(
"data_flow_positive_node_count"
)
return mapped
@classmethod
def CreateEmpty(cls, ir_id: int) -> "GraphTuple":
"""Create an "empty" graph tuple.
An empty graph tuple can be used to signal that the conversion to GraphTuple
failed, and is signalled by a node_count of 0. An empty graph tuple has
no corresponding GraphTupleData row.
"""
return GraphTuple(
ir_id=ir_id,
node_count=0,
control_edge_count=0,
data_edge_count=0,
call_edge_count=0,
edge_position_max=0,
pickled_graph_tuple_size=0,
)
@classmethod
def CreateFromProgramGraph(
cls,
program_graph: programl_pb2.ProgramGraph,
ir_id: int,
split: Optional[int] = None,
) -> "GraphTuple":
"""Create a mapped database instance from the given annotated graph.
This is the preferred method of populating databases of graph tuples, as
it contains the boilerplate to extract and set the metadata columns, and
handles the join between the two data/metadata tables invisibly.
Args:
annotated_graph: A DataFlowAnnotatedGraph instance.
ir_id: The intermediate representation ID.
split: The split value of this graph.
Returns:
A GraphTuple instance.
"""
graph_tuple = graph_tuple_lib.GraphTuple.CreateFromProgramGraph(
program_graph
)
mapped = cls.CreateFromGraphTuple(graph_tuple, ir_id, split)
mapped.data_flow_steps = program_graph.data_flow_steps
mapped.data_flow_root_node = program_graph.data_flow_root_node
mapped.data_flow_positive_node_count = (
program_graph.data_flow_positive_node_count
)
return mapped
class GraphTupleData(Base, sqlutil.PluralTablenameFromCamelCapsClassNameMixin):
"""The pickled graph tuple data. See GraphTuple for the parent table."""
id: int = sql.Column(
sql.Integer,
sql.ForeignKey("graph_tuples.id", onupdate="CASCADE", ondelete="CASCADE"),
primary_key=True,
)
# The sha1sum of the 'pickled_graph_tuple' column. There is no requirement
# that graph tuples be unique, but, should you wish to enforce this,
# you can group by this sha1 column and prune the duplicates.
sha1: str = sql.Column(sql.String(40), nullable=False, index=True)
# The pickled GraphTuple data.
pickled_graph_tuple: bytes = sql.Column(
sqlutil.ColumnTypes.LargeBinary(), nullable=False
)
# A registry of database statics, where each entry is a <name, property> tuple.
database_statistics_registry: List[Tuple[str, Callable[["Database"], Any]]] = []
def database_statistic(func):
"""A decorator to mark a method on a Database as a database static.
Database statistics can be accessed using Database.stats_json property to
retrieve a <name, vale> dictionary.
"""
global database_statistics_registry
database_statistics_registry.append((func.__name__, func))
return property(func)
class Database(sqlutil.Database):
"""A database of GraphTuples."""
def __init__(
self,
url: str,
must_exist: bool = False,
ctx: progress.ProgressContext = progress.NullContext,
):
super(Database, self).__init__(url, Base, must_exist=must_exist)
self.ctx = ctx
# Lazily evaluated attributes.
self._graph_tuple_stats = None
self._splits = None
self._split_counts = None
##############################################################################
# Database stats. These are evaluated lazily and the results cached. There is
# no cache invalidation strategy - after modifying the database, you must
# manually call RefreshStats() to ensure that stale stats are re-computed.
##############################################################################
@database_statistic
def graph_count(self) -> int:
"""The number of non-empty graphs in the database."""
return int(self.graph_tuple_stats.graph_count)
@database_statistic
def ir_count(self) -> int:
"""The number of distinct intermediate representations that the non-empty
graphs are constructed from.
"""
return int(self.graph_tuple_stats.ir_count or 0)
@database_statistic
def split_count(self) -> int:
"""The number of distinct splits in the database."""
return int(self.graph_tuple_stats.split_count or 0)
@database_statistic
def node_count(self) -> int:
"""The total node count in non-empty graphs."""
return int(self.graph_tuple_stats.node_count or 0)
@database_statistic
def edge_count(self) -> int:
"""The total edge count in non-empty graphs."""
return int(self.graph_tuple_stats.edge_count or 0)
@database_statistic
def control_edge_count(self) -> int:
"""The total control edge count in non-empty graphs."""
return int(self.graph_tuple_stats.control_edge_count or 0)
@database_statistic
def data_edge_count(self) -> int:
"""The total data edge count in non-empty graphs."""
return int(self.graph_tuple_stats.data_edge_count or 0)
@database_statistic
def call_edge_count(self) -> int:
"""The total call edge count in non-empty graphs."""
return int(self.graph_tuple_stats.call_edge_count or 0)
@database_statistic
def node_count_max(self) -> int:
"""The maximum node count in non-empty graphs."""
return int(self.graph_tuple_stats.node_count_max or 0)
@database_statistic
def edge_count_max(self) -> int:
"""The maximum edge count in non-empty graphs."""
return int(self.graph_tuple_stats.edge_count_max or 0)
@database_statistic
def control_edge_count_max(self) -> int:
"""The maximum control edge count in non-empty graphs."""
return int(self.graph_tuple_stats.control_edge_count_max or 0)
@database_statistic
def data_edge_count_max(self) -> int:
"""The maximum data edge count in non-empty graphs."""
return int(self.graph_tuple_stats.data_edge_count_max or 0)
@database_statistic
def call_edge_count_max(self) -> int:
"""The maximum call edge count in non-empty graphs."""
return int(self.graph_tuple_stats.call_edge_count_max or 0)
@database_statistic
def edge_position_max(self) -> int:
"""The maximum edge position in non-empty graphs."""
return int(self.graph_tuple_stats.edge_position_max or 0)
@database_statistic
def node_x_dimensionality(self) -> int:
"""The node x dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.node_x_dimensionality or 0)
@database_statistic
def node_y_dimensionality(self) -> int:
"""The node y dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.node_y_dimensionality or 0)
@database_statistic
def graph_x_dimensionality(self) -> int:
"""The graph x dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.graph_x_dimensionality or 0)
@database_statistic
def graph_y_dimensionality(self) -> int:
"""The graph y dimensionality of all non-empty graphs."""
return int(self.graph_tuple_stats.graph_y_dimensionality or 0)
@database_statistic
def graph_data_size(self) -> int:
"""The total size of the non-empty graph data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size or 0)
@database_statistic
def graph_data_size_min(self) -> int:
"""The minimum size of the non-empty graph tuple data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size_min or 0)
@database_statistic
def graph_data_size_avg(self) -> float:
"""The average size of the non-empty graph tuple data, in bytes."""
return float(self.graph_tuple_stats.graph_data_size_avg or 0)
@database_statistic
def graph_data_size_max(self) -> int:
"""The maximum size of the non-empty graph tuple data, in bytes."""
return int(self.graph_tuple_stats.graph_data_size_max or 0)
@database_statistic
def has_data_flow(self) -> bool:
"""Return whether the graph database has data flow annotations.
This is only true if *all* columns have data flow values.
"""
return self.graph_count and not self.data_flow_null_count
@database_statistic
def data_flow_null_count(self) -> int:
"""The number of database rows without data flow information.
If > 0, then has_data_flow is False.
"""
return self.graph_count - int(
self.graph_tuple_stats.data_flow_steps_count or 0
)
@database_statistic
def data_flow_steps_min(self) -> Optional[int]:
"""The minimum data flow steps for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_steps_min or 0)
@database_statistic
def data_flow_steps_avg(self) -> Optional[float]:
"""The average data flow steps for non-empty graphs."""
if self.has_data_flow:
return float(self.graph_tuple_stats.data_flow_steps_avg)
@database_statistic
def data_flow_steps_max(self) -> Optional[int]:
"""The maximum data flow steps for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_steps_max or 0)
@database_statistic
def data_flow_positive_node_count_min(self) -> Optional[int]:
"""The minimum data flow positive node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_positive_node_count_min or 0)
@database_statistic
def data_flow_positive_node_count_avg(self) -> Optional[int]:
"""The minimum data flow average node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_positive_node_count_avg or 0)
@database_statistic
def data_flow_positive_node_count_max(self) -> Optional[int]:
"""The minimum data flow max node count for non-empty graphs."""
if self.has_data_flow:
return int(self.graph_tuple_stats.data_flow_positive_node_count_max or 0)
@database_statistic
def splits(self) -> List[int]:
"""Return a list of unique split values."""
if self._splits is None:
self.RefreshStats()
return self._splits
@database_statistic
def split_counts(self) -> Dict[int, int]:
"""Return a dictionary mapping split to the number of graphs."""
if self._split_counts is None:
self.RefreshStats()
return self._split_counts
def RefreshStats(self):
"""Compute the database stats for access via the instance properties.
Raises:
ValueError: If the database contains invalid entries, e.g. inconsistent
vector dimensionalities.
"""
with self.ctx.Profile(
2,
lambda t: (
"Computed stats over "
f"{humanize.BinaryPrefix(stats.graph_data_size, 'B')} database "
f"({humanize.Plural(stats.graph_count, 'graph')})"
),
), self.Session() as session:
query = session.query(
# Graph and IR counts.
sql.func.count(GraphTuple.id).label("graph_count"),
sql.func.count(sql.func.distinct(GraphTuple.ir_id)).label("ir_count"),
sql.func.count(sql.func.distinct(GraphTuple.split)).label(
"split_count"
),
# Node and edge attribute sums.
sql.func.sum(GraphTuple.node_count).label("node_count"),
sql.func.sum(GraphTuple.control_edge_count).label("control_edge_count"),
sql.func.sum(GraphTuple.data_edge_count).label("data_edge_count"),
sql.func.sum(GraphTuple.call_edge_count).label("call_edge_count"),
sql.func.sum(
GraphTuple.control_edge_count
+ GraphTuple.data_edge_count
+ GraphTuple.call_edge_count
).label("edge_count"),
# Node and edge attribute maximums.
sql.func.max(GraphTuple.node_count).label("node_count_max"),
sql.func.max(GraphTuple.control_edge_count).label(
"control_edge_count_max"
),
sql.func.max(GraphTuple.data_edge_count).label("data_edge_count_max"),
sql.func.max(GraphTuple.call_edge_count).label("call_edge_count_max"),
sql.func.max(GraphTuple.call_edge_count).label("call_edge_count_max"),
sql.func.max(
GraphTuple.control_edge_count
+ GraphTuple.data_edge_count
+ GraphTuple.call_edge_count
).label("edge_count_max"),
sql.func.max(GraphTuple.edge_position_max).label("edge_position_max"),
# Feature and label dimensionality counts. Each of these columns
# should be one, showing that there is a single value for all graph
# tuples.
sql.func.count(
sql.func.distinct(GraphTuple.node_x_dimensionality)
).label("node_x_dimensionality_count"),
sql.func.count(
sql.func.distinct(GraphTuple.node_y_dimensionality)
).label("node_y_dimensionality_count"),
sql.func.count(
sql.func.distinct(GraphTuple.graph_x_dimensionality)
).label("graph_x_dimensionality_count"),
sql.func.count(
sql.func.distinct(GraphTuple.graph_y_dimensionality)
).label("graph_y_dimensionality_count"),
# Feature and label dimensionalities.
sql.func.max(GraphTuple.node_x_dimensionality).label(
"node_x_dimensionality"
),
sql.func.max(GraphTuple.node_y_dimensionality).label(
"node_y_dimensionality"
),
sql.func.max(GraphTuple.graph_x_dimensionality).label(
"graph_x_dimensionality"
),
sql.func.max(GraphTuple.graph_y_dimensionality).label(
"graph_y_dimensionality"
),
# Graph tuple sizes.
sql.func.sum(GraphTuple.pickled_graph_tuple_size).label(
"graph_data_size"
),
sql.func.min(GraphTuple.pickled_graph_tuple_size).label(
"graph_data_size_min"
),
sql.func.avg(GraphTuple.pickled_graph_tuple_size).label(
"graph_data_size_avg"
),
sql.func.max(GraphTuple.pickled_graph_tuple_size).label(
"graph_data_size_max"
),
# Data flow column null counts.
sql.func.count(GraphTuple.data_flow_steps).label(
"data_flow_steps_count"
),
# Data flow step counts.
sql.func.min(GraphTuple.data_flow_steps).label("data_flow_steps_min"),
sql.func.avg(GraphTuple.data_flow_steps).label("data_flow_steps_avg"),
sql.func.max(GraphTuple.data_flow_steps).label("data_flow_steps_max"),
# Data flow positive node count.
sql.func.min(GraphTuple.data_flow_positive_node_count).label(
"data_flow_positive_node_count_min"
),
sql.func.avg(GraphTuple.data_flow_positive_node_count).label(
"data_flow_positive_node_count_avg"
),
sql.func.max(GraphTuple.data_flow_positive_node_count).label(
"data_flow_positive_node_count_max"
),
)
# Ignore "empty" graphs.
query = query.filter(GraphTuple.node_count > 1)
# Compute the stats.
stats = query.one()
# Check that databases have a consistent value for dimensionalities.
if stats.node_x_dimensionality_count > 1:
raise ValueError(
f"Database contains {stats.node_x_dimensionality_count} "
"distinct node x dimensionalities"
)
if stats.node_y_dimensionality_count > 1:
raise ValueError(
f"Database contains {stats.node_y_dimensionality_count} "
"distinct node y dimensionalities"
)
if stats.graph_x_dimensionality_count > 1:
raise ValueError(
f"Database contains {stats.graph_x_dimensionality_count} "
"distinct graph x dimensionalities"
)
if stats.graph_y_dimensionality_count > 1:
raise ValueError(
f"Database contains {stats.graph_y_dimensionality_count} "
"distinct graph y dimensionalities"
)
# Check that every graph has data flow attributes, or none of them do.
if not (
stats.data_flow_steps_count == 0
or stats.data_flow_steps_count == stats.graph_count
):
raise ValueError(
f"{stats.graph_count - stats.data_flow_steps_count} of "
f"{stats.graph_count} graphs have no data_flow_steps "
"value"
)
self._graph_tuple_stats = stats
with self.Session() as session:
self._splits = sorted(
set(
[
row.split
for row in session.query(GraphTuple.split)
.filter(GraphTuple.split != None)
.group_by(GraphTuple.split)
]
)
)
self._split_counts = {
split: session.query(sql.func.count(GraphTuple.id))
.filter(GraphTuple.split == split)
.scalar()
for split in self._splits
}
@property
def graph_tuple_stats(self):
"""Fetch aggregate graph tuple stats, or compute them if not set."""
if self._graph_tuple_stats is None:
self.RefreshStats()
return self._graph_tuple_stats
@property
def stats_json(self) -> Dict[str, Any]:
"""Fetch the database statics as a JSON dictionary."""
return {
name: function(self) for name, function in database_statistics_registry
}
def __repr__(self) -> str:
return (
f"Database of {humanize.DecimalPrefix(self.graph_count, 'graph')} with "
f"dimensionalities: node_x={self.node_x_dimensionality}, "
f"node_y={self.node_y_dimensionality}, "
f"graph_x={self.graph_x_dimensionality}, "
f"graph_y={self.graph_y_dimensionality}."
)
# Deferred declaration of flags because we need to reference Database class.
app.DEFINE_database(
"graph_db", Database, None, "The database to read graph tuples from.",
)
def Main():
"""Main entry point."""
graph_db = FLAGS.graph_db()
print(jsonutil.format_json(graph_db.stats_json))
if __name__ == "__main__":
app.Run(Main)
| [
40,
44,
48,
54,
63
] |
582 | be1bfa3e366d715d32613284924cf79abde06d41 | <mask token>
class QueueManager(BaseManager):
pass
def start_request():
QueueManager.register('get_task_queue')
QueueManager.register('get_result_queue')
server_add = '127.0.0.1'
print('Connect to server %s...' % server_add)
manager = QueueManager(address=(server_add, 5000), authkey=b'abc')
manager.connect()
task = manager.get_task_queue()
result = manager.get_result_queue()
for i in range(10):
try:
n = task.get(timeout=1)
print('run task %d * %d...' % (n, n))
r = '%d * %d = %d' % (n + 1, n + 1, (n + 1) * (n + 1))
time.sleep(5)
result.put(r)
except queue.Empty:
print('task queue is empty!')
print('worker exit..')
<mask token>
| <mask token>
class QueueManager(BaseManager):
pass
def start_request():
QueueManager.register('get_task_queue')
QueueManager.register('get_result_queue')
server_add = '127.0.0.1'
print('Connect to server %s...' % server_add)
manager = QueueManager(address=(server_add, 5000), authkey=b'abc')
manager.connect()
task = manager.get_task_queue()
result = manager.get_result_queue()
for i in range(10):
try:
n = task.get(timeout=1)
print('run task %d * %d...' % (n, n))
r = '%d * %d = %d' % (n + 1, n + 1, (n + 1) * (n + 1))
time.sleep(5)
result.put(r)
except queue.Empty:
print('task queue is empty!')
print('worker exit..')
if __name__ == '__main__':
start_request()
| <mask token>
__author__ = 'Edwin'
<mask token>
class QueueManager(BaseManager):
pass
def start_request():
QueueManager.register('get_task_queue')
QueueManager.register('get_result_queue')
server_add = '127.0.0.1'
print('Connect to server %s...' % server_add)
manager = QueueManager(address=(server_add, 5000), authkey=b'abc')
manager.connect()
task = manager.get_task_queue()
result = manager.get_result_queue()
for i in range(10):
try:
n = task.get(timeout=1)
print('run task %d * %d...' % (n, n))
r = '%d * %d = %d' % (n + 1, n + 1, (n + 1) * (n + 1))
time.sleep(5)
result.put(r)
except queue.Empty:
print('task queue is empty!')
print('worker exit..')
if __name__ == '__main__':
start_request()
| <mask token>
__author__ = 'Edwin'
import queue
import time
from multiprocessing.managers import BaseManager
class QueueManager(BaseManager):
pass
def start_request():
QueueManager.register('get_task_queue')
QueueManager.register('get_result_queue')
server_add = '127.0.0.1'
print('Connect to server %s...' % server_add)
manager = QueueManager(address=(server_add, 5000), authkey=b'abc')
manager.connect()
task = manager.get_task_queue()
result = manager.get_result_queue()
for i in range(10):
try:
n = task.get(timeout=1)
print('run task %d * %d...' % (n, n))
r = '%d * %d = %d' % (n + 1, n + 1, (n + 1) * (n + 1))
time.sleep(5)
result.put(r)
except queue.Empty:
print('task queue is empty!')
print('worker exit..')
if __name__ == '__main__':
start_request()
| # -*- coding: utf-8 -*-
"""
-----------------------------------------
IDEA Name : PyCharm
Project Name : HelloWorld
-----------------------------------------
File Name : task_worker
Description :
Author : Edwin
Date : 2018/1/4 23:38
-----------------------------------------
Changer : Edwin
Date : 2018/1/4 23:38
Description :
-----------------------------------------
"""
__author__ = 'Edwin'
import queue
import time
from multiprocessing.managers import BaseManager
# 创建类似的QueueManager:
class QueueManager(BaseManager):
pass
def start_request():
# 由于这个QueueManager只从网络上获取Queue,所以注册时只提供名字:
QueueManager.register('get_task_queue')
QueueManager.register('get_result_queue')
# 连接到服务器,也就是运行task_master.py的机器:
server_add = '127.0.0.1'
print('Connect to server %s...' % server_add)
# 端口和验证码注意保持与task_master.py设置的完全一致:
manager = QueueManager(address=(server_add, 5000), authkey=b'abc')
# 从网络连接:
manager.connect()
# 获取Queue的对象:
task = manager.get_task_queue()
result = manager.get_result_queue()
# 从task队列取任务,并把结果写入result队列:
for i in range(10):
try:
n = task.get(timeout=1)
print('run task %d * %d...' % (n, n))
r = '%d * %d = %d' % (n + 1, n + 1, (n + 1) * (n + 1))
time.sleep(5)
result.put(r)
except queue.Empty:
print('task queue is empty!')
# 处理结果
print('worker exit..')
if __name__ == '__main__':
start_request()
| [
2,
3,
4,
5,
6
] |
583 | 63d9a0fa0d0747762e65f6f1e85e53090035454c | <mask token>
class Category(Document):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
| <mask token>
class Category(Document):
<mask token>
<mask token>
<mask token>
<mask token>
@classmethod
def get_category_by_text(cls, category_text: str) ->'Category':
try:
category = cls.objects.get(Q(name=category_text) | Q(aliases=
category_text.lower()))
except Category.DoesNotExist:
raise exceptions.InvalidCategory(
'Нет такой категории по имени или алиасам')
return category
| <mask token>
class Category(Document):
id = StringField(primary_key=True)
name = StringField()
is_base_expenses = BooleanField(default=False)
aliases = ListField(StringField())
@classmethod
def get_category_by_text(cls, category_text: str) ->'Category':
try:
category = cls.objects.get(Q(name=category_text) | Q(aliases=
category_text.lower()))
except Category.DoesNotExist:
raise exceptions.InvalidCategory(
'Нет такой категории по имени или алиасам')
return category
| from mongoengine import Document, StringField, BooleanField, ListField, Q
import exceptions
class Category(Document):
id = StringField(primary_key=True)
name = StringField()
is_base_expenses = BooleanField(default=False)
aliases = ListField(StringField())
@classmethod
def get_category_by_text(cls, category_text: str) ->'Category':
try:
category = cls.objects.get(Q(name=category_text) | Q(aliases=
category_text.lower()))
except Category.DoesNotExist:
raise exceptions.InvalidCategory(
'Нет такой категории по имени или алиасам')
return category
| null | [
1,
2,
3,
4
] |
584 | 7163be250ae3a22931de037cb6896c2e6d5f00a8 | <mask token>
| '''
Encontrar el valor mas alto el mas rapido, el mas lento
para eso son los algoritmos de optimizacion
Para eso debemos pensar en una funcion que queramos maximizar o minimizar
Se aplican mas que todo para empresas como despegar, en donde se pueden generar buenas empresas
Empresas a la optimizacion
#############################################33
Traveling Sales Man
Cual es la ruta mas eficiente para recorrer todas las ciudades
Resolver el algoritmo de sales man
Turing Prize
'''
| null | null | null | [
0,
1
] |
585 | b5275fc068526063fd8baf13210052971b05503f | <mask token>
| <mask token>
assert ec.scale(4, ec.order) == 0
<mask token>
print('Factoring...')
<mask token>
for i in range(2, 2 ** 24):
if x % i == 0:
if x % (i * i) != 0:
factors.append(i)
x = pp(x, i)
print('Getting remainders...')
<mask token>
for f in factors:
u = 0
while u == 0:
while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):
u = randint(1, ec.prime - 1)
u = ec.scale(u, pp(twist_ord, f))
while ec.scale(u, f) != 0:
u = ec.scale(u, f)
shared = ec.scale(u, aPriv)
for i in range(f):
if ec.scale(u, i) == shared:
print('\tSolved mod %d' % f)
rems.append(i)
break
print('Correcting parities...')
for i in range(len(factors)):
if rems[i] != 0:
break
<mask token>
for i in range(len(factors)):
if i == fixed:
continue
u = 0
while u == 0:
while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):
u = randint(1, ec.prime - 1)
u = ec.scale(u, pp(pp(twist_ord, factors[fixed]), factors[i]))
if ec.scale(u, factors[fixed]) == 0:
u = 0
elif ec.scale(u, factors[i]) == 0:
u = 0
shared = ec.scale(u, aPriv)
r, _ = crt([rems[fixed], rems[i]], [factors[fixed], factors[i]])
if ec.scale(u, r) != shared:
rems[i] = -rems[i] % factors[i]
| <mask token>
ec = EC_M(233970423115425145524320034830162017933, 534, 1, 4, order=
233970423115425145498902418297807005944)
assert ec.scale(4, ec.order) == 0
aPriv = randint(1, ec.order - 1)
aPub = ec.scale(4, aPriv)
print('Factoring...')
twist_ord = 2 * ec.prime + 2 - ec.order
factors = []
x = twist_ord
for i in range(2, 2 ** 24):
if x % i == 0:
if x % (i * i) != 0:
factors.append(i)
x = pp(x, i)
print('Getting remainders...')
rems = []
for f in factors:
u = 0
while u == 0:
while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):
u = randint(1, ec.prime - 1)
u = ec.scale(u, pp(twist_ord, f))
while ec.scale(u, f) != 0:
u = ec.scale(u, f)
shared = ec.scale(u, aPriv)
for i in range(f):
if ec.scale(u, i) == shared:
print('\tSolved mod %d' % f)
rems.append(i)
break
print('Correcting parities...')
for i in range(len(factors)):
if rems[i] != 0:
break
fixed = i
for i in range(len(factors)):
if i == fixed:
continue
u = 0
while u == 0:
while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):
u = randint(1, ec.prime - 1)
u = ec.scale(u, pp(pp(twist_ord, factors[fixed]), factors[i]))
if ec.scale(u, factors[fixed]) == 0:
u = 0
elif ec.scale(u, factors[i]) == 0:
u = 0
shared = ec.scale(u, aPriv)
r, _ = crt([rems[fixed], rems[i]], [factors[fixed], factors[i]])
if ec.scale(u, r) != shared:
rems[i] = -rems[i] % factors[i]
| from matasano import *
ec = EC_M(233970423115425145524320034830162017933, 534, 1, 4, order=
233970423115425145498902418297807005944)
assert ec.scale(4, ec.order) == 0
aPriv = randint(1, ec.order - 1)
aPub = ec.scale(4, aPriv)
print('Factoring...')
twist_ord = 2 * ec.prime + 2 - ec.order
factors = []
x = twist_ord
for i in range(2, 2 ** 24):
if x % i == 0:
if x % (i * i) != 0:
factors.append(i)
x = pp(x, i)
print('Getting remainders...')
rems = []
for f in factors:
u = 0
while u == 0:
while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):
u = randint(1, ec.prime - 1)
u = ec.scale(u, pp(twist_ord, f))
while ec.scale(u, f) != 0:
u = ec.scale(u, f)
shared = ec.scale(u, aPriv)
for i in range(f):
if ec.scale(u, i) == shared:
print('\tSolved mod %d' % f)
rems.append(i)
break
print('Correcting parities...')
for i in range(len(factors)):
if rems[i] != 0:
break
fixed = i
for i in range(len(factors)):
if i == fixed:
continue
u = 0
while u == 0:
while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):
u = randint(1, ec.prime - 1)
u = ec.scale(u, pp(pp(twist_ord, factors[fixed]), factors[i]))
if ec.scale(u, factors[fixed]) == 0:
u = 0
elif ec.scale(u, factors[i]) == 0:
u = 0
shared = ec.scale(u, aPriv)
r, _ = crt([rems[fixed], rems[i]], [factors[fixed], factors[i]])
if ec.scale(u, r) != shared:
rems[i] = -rems[i] % factors[i]
| from matasano import *
ec = EC_M(233970423115425145524320034830162017933,534,1,4,order=233970423115425145498902418297807005944)
assert(ec.scale(4,ec.order) == 0)
aPriv = randint(1,ec.order-1)
aPub = ec.scale(4,aPriv)
print("Factoring...")
twist_ord = 2*ec.prime+2 - ec.order
factors = []
x = twist_ord
for i in range(2,2**24):
if x%i == 0:
if x%(i*i) != 0:
factors.append(i)
x = pp(x,i)
print("Getting remainders...")
rems = []
for f in factors:
u = 0
while u == 0:
while isQRes((u**3+ec.A*u**2+u)%ec.prime,ec.prime):
u = randint(1,ec.prime-1)
u = ec.scale(u,pp(twist_ord,f))
while ec.scale(u,f) != 0:
u = ec.scale(u,f)
shared = ec.scale(u,aPriv) #Not generating the MAC this time
for i in range(f):
if ec.scale(u,i) == shared:
print("\tSolved mod %d"%f)
rems.append(i)
break
#Now aPriv is +-rems[i] mod factors[i]
#Do them 2 at a time to get down to 2 values mod Prod factors[i]
print("Correcting parities...")
for i in range(len(factors)):
if rems[i] != 0:
break
fixed = i
for i in range(len(factors)):
if i == fixed:
continue
u = 0
while u == 0:
while isQRes((u**3+ec.A*u**2+u)%ec.prime,ec.prime):
u = randint(1,ec.prime-1)
u = ec.scale(u,pp(pp(twist_ord,factors[fixed]),factors[i]))
if ec.scale(u,factors[fixed]) == 0:
u = 0
elif ec.scale(u,factors[i]) == 0:
u = 0
shared = ec.scale(u,aPriv)
r,_ = crt([rems[fixed],rems[i]],[factors[fixed],factors[i]])
if ec.scale(u,r) != shared:
rems[i] = (-rems[i])%factors[i]
#Now I need to run down the remaining bits
| [
0,
1,
2,
3,
4
] |
586 | aa4fd27382119e3b10d2b57c9b87deff32b5c1ab | from final import getMood
import pickle
def get_mood(username_t,username_i):
mapping={'sadness':'0,0,255','angry':'255,0,0','happy':'0,255,0','surprise':'139,69,19','neutral':'189,183,107','fear':'255,165,0'}
#Sad: Blue, Angry: Red, Happy: Green, Surprise: Brown, Neutral:Yellow,Fear:Orange
mood=getMood(username_i,username_t)
value=mood[0][0]
print value
with open('colorfile', 'wb') as fp:
pickle.dump(mapping[value], fp)
return mood
| null | null | null | null | [
0
] |
587 | dee1ab3adb7f627680410c774be44ae196f63f6c | <mask token>
def Get_Attachments(service, userId, msg_id, store_dir):
"""Get and store attachment from Message with given id.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me"
can be used to indicate the authenticated user.
msg_id: ID of Message containing attachment.
store_dir: The directory used to store attachments.
"""
try:
message = service.users().messages().get(userId=userId, id=msg_id
).execute()
parts = [message['payload']]
while parts:
part = parts.pop()
if part.get('parts'):
parts.extend(part['parts'])
if part.get('filename'):
if 'data' in part['body']:
file_data = base64.urlsafe_b64decode(part['body'][
'data'].encode('UTF-8'))
elif 'attachmentId' in part['body']:
attachment = service.users().messages().attachments().get(
userId=userId, messageId=message['id'], id=part[
'body']['attachmentId']).execute()
file_data = base64.urlsafe_b64decode(attachment['data']
.encode('UTF-8'))
else:
file_data = None
if file_data:
path = ''.join([store_dir, part['filename']])
with open(path, 'wb') as f:
f.write(file_data)
except errors.HttpError as error:
print('An error occurred: %s' % error)
<mask token>
def Delete_Message(service, userId, message_id):
"""Permanently delete message.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
message_id: Identifies specific message to interact with.
"""
service.users().messages().delete(userId=userId, id=message_id).execute()
| <mask token>
def Get_Attachments(service, userId, msg_id, store_dir):
"""Get and store attachment from Message with given id.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me"
can be used to indicate the authenticated user.
msg_id: ID of Message containing attachment.
store_dir: The directory used to store attachments.
"""
try:
message = service.users().messages().get(userId=userId, id=msg_id
).execute()
parts = [message['payload']]
while parts:
part = parts.pop()
if part.get('parts'):
parts.extend(part['parts'])
if part.get('filename'):
if 'data' in part['body']:
file_data = base64.urlsafe_b64decode(part['body'][
'data'].encode('UTF-8'))
elif 'attachmentId' in part['body']:
attachment = service.users().messages().attachments().get(
userId=userId, messageId=message['id'], id=part[
'body']['attachmentId']).execute()
file_data = base64.urlsafe_b64decode(attachment['data']
.encode('UTF-8'))
else:
file_data = None
if file_data:
path = ''.join([store_dir, part['filename']])
with open(path, 'wb') as f:
f.write(file_data)
except errors.HttpError as error:
print('An error occurred: %s' % error)
def Reply_With_Attchment(service, userId, receiver, subject, message,
attachments, threadId, message_id):
"""Reply to message with the new pdf attached.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
receiver: Email address of who to send to.
subject: Email subject.
message: Email message, plain text
attachments: 'new_pdf.pdf' Name can be changed in pdf.combine_pdfs
threadId: Used to match reply with message thread
message_id: Identifies specific message to interact with.
"""
emailMsg = message
mimeMessage = MIMEMultipart()
mimeMessage['to'] = receiver
mimeMessage['subject'] = subject
mimeMessage['threadId'] = threadId
mimeMessage['In-Reply-To'] = message_id
mimeMessage['References'] = message_id
mimeMessage.attach(MIMEText(emailMsg, 'plain'))
if attachments != None:
attachment = attachments
content_type = mimetypes.guess_type(attachment)
main_type, sub_type = content_type[0].split('/', 1)
file_name = os.path.basename(attachment)
f = open(attachment, 'rb')
myFile = MIMEBase(main_type, sub_type)
myFile.set_payload(f.read())
myFile.add_header('Content-Disposition', 'attachment', filename=
file_name)
encoders.encode_base64(myFile)
f.close()
mimeMessage.attach(myFile)
raw_string = {'raw': base64.urlsafe_b64encode(mimeMessage.as_bytes()).
decode()}
raw_string['threadId'] = threadId
message = service.users().messages().send(userId=userId, body=raw_string
).execute()
<mask token>
def Get_Message_Info(service, userId, message_id):
"""Retrieves received message info, returns tuple.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
message_id: Identifies specific message to interact with.
"""
message_info = service.users().messages().get(userId=userId, id=message_id
).execute()
ID = message_info['id']
thread_id = message_info['threadId']
header_info = message_info['payload']['headers']
for header in header_info:
if header['name'] == 'Message-ID':
message_id = header['value']
if header['name'] == 'From':
sender = header['value']
if header['name'] == 'Subject':
subject = header['value']
attachment_info = message_info['payload']['parts']
attachment_list = []
for attachment in attachment_info:
if attachment['mimeType'] == 'application/pdf':
attachment_list.append(attachment['filename'])
info = sender, subject, thread_id, message_id, attachment_list, ID
return info
def Delete_Message(service, userId, message_id):
"""Permanently delete message.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
message_id: Identifies specific message to interact with.
"""
service.users().messages().delete(userId=userId, id=message_id).execute()
| <mask token>
def Get_Attachments(service, userId, msg_id, store_dir):
"""Get and store attachment from Message with given id.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me"
can be used to indicate the authenticated user.
msg_id: ID of Message containing attachment.
store_dir: The directory used to store attachments.
"""
try:
message = service.users().messages().get(userId=userId, id=msg_id
).execute()
parts = [message['payload']]
while parts:
part = parts.pop()
if part.get('parts'):
parts.extend(part['parts'])
if part.get('filename'):
if 'data' in part['body']:
file_data = base64.urlsafe_b64decode(part['body'][
'data'].encode('UTF-8'))
elif 'attachmentId' in part['body']:
attachment = service.users().messages().attachments().get(
userId=userId, messageId=message['id'], id=part[
'body']['attachmentId']).execute()
file_data = base64.urlsafe_b64decode(attachment['data']
.encode('UTF-8'))
else:
file_data = None
if file_data:
path = ''.join([store_dir, part['filename']])
with open(path, 'wb') as f:
f.write(file_data)
except errors.HttpError as error:
print('An error occurred: %s' % error)
def Reply_With_Attchment(service, userId, receiver, subject, message,
attachments, threadId, message_id):
"""Reply to message with the new pdf attached.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
receiver: Email address of who to send to.
subject: Email subject.
message: Email message, plain text
attachments: 'new_pdf.pdf' Name can be changed in pdf.combine_pdfs
threadId: Used to match reply with message thread
message_id: Identifies specific message to interact with.
"""
emailMsg = message
mimeMessage = MIMEMultipart()
mimeMessage['to'] = receiver
mimeMessage['subject'] = subject
mimeMessage['threadId'] = threadId
mimeMessage['In-Reply-To'] = message_id
mimeMessage['References'] = message_id
mimeMessage.attach(MIMEText(emailMsg, 'plain'))
if attachments != None:
attachment = attachments
content_type = mimetypes.guess_type(attachment)
main_type, sub_type = content_type[0].split('/', 1)
file_name = os.path.basename(attachment)
f = open(attachment, 'rb')
myFile = MIMEBase(main_type, sub_type)
myFile.set_payload(f.read())
myFile.add_header('Content-Disposition', 'attachment', filename=
file_name)
encoders.encode_base64(myFile)
f.close()
mimeMessage.attach(myFile)
raw_string = {'raw': base64.urlsafe_b64encode(mimeMessage.as_bytes()).
decode()}
raw_string['threadId'] = threadId
message = service.users().messages().send(userId=userId, body=raw_string
).execute()
def Get_Unread_Messages(service, userId):
"""Retrieves all unread messages with attachments, returns list of message ids.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
"""
message_list = []
message_ids = service.users().messages().list(userId=userId, labelIds=
'INBOX', alt='json', q='is:unread has:attachment').execute()
if message_ids['resultSizeEstimate'] > 0:
for message in message_ids['messages']:
message_list.append(message['id'])
return message_list
def Get_Message_Info(service, userId, message_id):
"""Retrieves received message info, returns tuple.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
message_id: Identifies specific message to interact with.
"""
message_info = service.users().messages().get(userId=userId, id=message_id
).execute()
ID = message_info['id']
thread_id = message_info['threadId']
header_info = message_info['payload']['headers']
for header in header_info:
if header['name'] == 'Message-ID':
message_id = header['value']
if header['name'] == 'From':
sender = header['value']
if header['name'] == 'Subject':
subject = header['value']
attachment_info = message_info['payload']['parts']
attachment_list = []
for attachment in attachment_info:
if attachment['mimeType'] == 'application/pdf':
attachment_list.append(attachment['filename'])
info = sender, subject, thread_id, message_id, attachment_list, ID
return info
def Delete_Message(service, userId, message_id):
"""Permanently delete message.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
message_id: Identifies specific message to interact with.
"""
service.users().messages().delete(userId=userId, id=message_id).execute()
| import base64
from apiclient import errors
import os
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from email import encoders
import mimetypes
def Get_Attachments(service, userId, msg_id, store_dir):
"""Get and store attachment from Message with given id.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me"
can be used to indicate the authenticated user.
msg_id: ID of Message containing attachment.
store_dir: The directory used to store attachments.
"""
try:
message = service.users().messages().get(userId=userId, id=msg_id
).execute()
parts = [message['payload']]
while parts:
part = parts.pop()
if part.get('parts'):
parts.extend(part['parts'])
if part.get('filename'):
if 'data' in part['body']:
file_data = base64.urlsafe_b64decode(part['body'][
'data'].encode('UTF-8'))
elif 'attachmentId' in part['body']:
attachment = service.users().messages().attachments().get(
userId=userId, messageId=message['id'], id=part[
'body']['attachmentId']).execute()
file_data = base64.urlsafe_b64decode(attachment['data']
.encode('UTF-8'))
else:
file_data = None
if file_data:
path = ''.join([store_dir, part['filename']])
with open(path, 'wb') as f:
f.write(file_data)
except errors.HttpError as error:
print('An error occurred: %s' % error)
def Reply_With_Attchment(service, userId, receiver, subject, message,
attachments, threadId, message_id):
"""Reply to message with the new pdf attached.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
receiver: Email address of who to send to.
subject: Email subject.
message: Email message, plain text
attachments: 'new_pdf.pdf' Name can be changed in pdf.combine_pdfs
threadId: Used to match reply with message thread
message_id: Identifies specific message to interact with.
"""
emailMsg = message
mimeMessage = MIMEMultipart()
mimeMessage['to'] = receiver
mimeMessage['subject'] = subject
mimeMessage['threadId'] = threadId
mimeMessage['In-Reply-To'] = message_id
mimeMessage['References'] = message_id
mimeMessage.attach(MIMEText(emailMsg, 'plain'))
if attachments != None:
attachment = attachments
content_type = mimetypes.guess_type(attachment)
main_type, sub_type = content_type[0].split('/', 1)
file_name = os.path.basename(attachment)
f = open(attachment, 'rb')
myFile = MIMEBase(main_type, sub_type)
myFile.set_payload(f.read())
myFile.add_header('Content-Disposition', 'attachment', filename=
file_name)
encoders.encode_base64(myFile)
f.close()
mimeMessage.attach(myFile)
raw_string = {'raw': base64.urlsafe_b64encode(mimeMessage.as_bytes()).
decode()}
raw_string['threadId'] = threadId
message = service.users().messages().send(userId=userId, body=raw_string
).execute()
def Get_Unread_Messages(service, userId):
"""Retrieves all unread messages with attachments, returns list of message ids.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
"""
message_list = []
message_ids = service.users().messages().list(userId=userId, labelIds=
'INBOX', alt='json', q='is:unread has:attachment').execute()
if message_ids['resultSizeEstimate'] > 0:
for message in message_ids['messages']:
message_list.append(message['id'])
return message_list
def Get_Message_Info(service, userId, message_id):
"""Retrieves received message info, returns tuple.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
message_id: Identifies specific message to interact with.
"""
message_info = service.users().messages().get(userId=userId, id=message_id
).execute()
ID = message_info['id']
thread_id = message_info['threadId']
header_info = message_info['payload']['headers']
for header in header_info:
if header['name'] == 'Message-ID':
message_id = header['value']
if header['name'] == 'From':
sender = header['value']
if header['name'] == 'Subject':
subject = header['value']
attachment_info = message_info['payload']['parts']
attachment_list = []
for attachment in attachment_info:
if attachment['mimeType'] == 'application/pdf':
attachment_list.append(attachment['filename'])
info = sender, subject, thread_id, message_id, attachment_list, ID
return info
def Delete_Message(service, userId, message_id):
"""Permanently delete message.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
message_id: Identifies specific message to interact with.
"""
service.users().messages().delete(userId=userId, id=message_id).execute()
| #!/usr/bin/env python3
import base64
from apiclient import errors
import os
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from email import encoders
import mimetypes
def Get_Attachments(service, userId, msg_id, store_dir):
"""Get and store attachment from Message with given id.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me"
can be used to indicate the authenticated user.
msg_id: ID of Message containing attachment.
store_dir: The directory used to store attachments.
"""
try:
message = service.users().messages().get(userId=userId, id=msg_id).execute()
parts = [message['payload']]
while parts:
part = parts.pop()
if part.get('parts'):
parts.extend(part['parts'])
if part.get('filename'):
if 'data' in part['body']:
file_data = base64.urlsafe_b64decode(part['body']['data'].encode('UTF-8'))
#self.stdout.write('FileData for %s, %s found! size: %s' % (message['id'], part['filename'], part['size']))
elif 'attachmentId' in part['body']:
attachment = service.users().messages().attachments().get(
userId=userId, messageId=message['id'], id=part['body']['attachmentId']
).execute()
file_data = base64.urlsafe_b64decode(attachment['data'].encode('UTF-8'))
#self.stdout.write('FileData for %s, %s found! size: %s' % (message['id'], part['filename'], attachment['size']))
else:
file_data = None
if file_data:
#do some staff, e.g.
path = ''.join([store_dir, part['filename']])
with open(path, 'wb') as f:
f.write(file_data)
except errors.HttpError as error:
print('An error occurred: %s' % error)
def Reply_With_Attchment(service, userId, receiver, subject, message, attachments, threadId, message_id):
"""Reply to message with the new pdf attached.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
receiver: Email address of who to send to.
subject: Email subject.
message: Email message, plain text
attachments: 'new_pdf.pdf' Name can be changed in pdf.combine_pdfs
threadId: Used to match reply with message thread
message_id: Identifies specific message to interact with.
"""
# Create email message
emailMsg = message
mimeMessage = MIMEMultipart()
mimeMessage['to'] = receiver
mimeMessage['subject'] = subject
mimeMessage['threadId'] = threadId
mimeMessage['In-Reply-To'] = message_id
mimeMessage['References'] = message_id
mimeMessage.attach(MIMEText(emailMsg, 'plain'))
# Attach files
if attachments != None:
attachment = attachments
content_type = mimetypes.guess_type(attachment)
main_type, sub_type = content_type[0].split('/', 1)
file_name = os.path.basename(attachment)
f = open(attachment, 'rb')
myFile = MIMEBase(main_type, sub_type)
myFile.set_payload(f.read())
myFile.add_header('Content-Disposition', 'attachment', filename=file_name)
encoders.encode_base64(myFile)
f.close()
mimeMessage.attach(myFile)
raw_string = {'raw':base64.urlsafe_b64encode(mimeMessage.as_bytes()).decode()}
raw_string['threadId']=threadId
message = service.users().messages().send(userId=userId, body=raw_string).execute()
def Get_Unread_Messages(service, userId):
"""Retrieves all unread messages with attachments, returns list of message ids.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
"""
message_list = []
message_ids = service.users().messages().list(userId=userId, labelIds='INBOX', alt="json", q='is:unread has:attachment').execute()
if message_ids['resultSizeEstimate'] > 0:
for message in message_ids['messages']:
message_list.append(message['id'])
return message_list
def Get_Message_Info(service, userId, message_id):
"""Retrieves received message info, returns tuple.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
message_id: Identifies specific message to interact with.
"""
message_info = service.users().messages().get(userId=userId, id=message_id).execute()
ID = message_info['id']
thread_id = message_info['threadId']
header_info = message_info['payload']['headers']
for header in header_info:
if header['name']=='Message-ID':
message_id=header['value']
if header['name']=='From':
sender=header['value']
if header['name']=='Subject':
subject=header['value']
attachment_info = message_info['payload']['parts']
attachment_list = []
for attachment in attachment_info:
if attachment['mimeType'] == 'application/pdf':
attachment_list.append(attachment['filename'])
info = (sender, subject, thread_id, message_id, attachment_list, ID)
return info
def Delete_Message(service, userId, message_id):
"""Permanently delete message.
Args:
service: Authorized Gmail API service instance.
userId: User's email address. The special value "me".
can be used to indicate the authenticated user.
message_id: Identifies specific message to interact with.
"""
service.users().messages().delete(userId=userId, id=message_id).execute() | [
2,
4,
5,
6,
7
] |
588 | c4b9fdba9e9eeccc52999dab9232302f159c882a | <mask token>
| if sm.hasItem(4310100, 1):
sm.setSpeakerID(9390220)
sm.sendSayOkay(
"You can't start your voyage until you finish the tutorial quest!")
else:
sm.setSpeakerID(9390220)
sm.sendNext(
'What? You threw away the coins without finishing the tutorial? (Sighs) I suppose I can give you some more coins so that you can complete the tutorial.'
)
sm.setSpeakerID(9390220)
sm.sendSay("Just remember, you can't trade without gold!")
sm.giveItem(4310100, 10)
sm.setSpeakerID(9390220)
sm.sendPrev('Check to make sure there you have coins in your inventory.')
| # Created by MechAviv
# [Maestra Fiametta] | [9390220]
# Commerci Republic : San Commerci
if sm.hasItem(4310100, 1):
sm.setSpeakerID(9390220)
sm.sendSayOkay("You can't start your voyage until you finish the tutorial quest!")
else:
sm.setSpeakerID(9390220)
sm.sendNext("What? You threw away the coins without finishing the tutorial? (Sighs) I suppose I can give you some more coins so that you can complete the tutorial.")
sm.setSpeakerID(9390220)
sm.sendSay("Just remember, you can't trade without gold!")
sm.giveItem(4310100, 10)
sm.setSpeakerID(9390220)
sm.sendPrev("Check to make sure there you have coins in your inventory.") | null | null | [
0,
1,
2
] |
589 | 5a103a4f72b9cd3ea3911aeefeeb2194c8ad7df0 | <mask token>
class BloomFilter:
def __init__(self):
bit_array = bitarray(BIT_SIZE)
bit_array.setall(0)
self.bit_array = bit_array
def add(self, val):
point_list = self.get_postions(val)
for b in point_list:
self.bit_array[b] = 1
<mask token>
def is_contains(self, val):
point_list = self.get_postions(val)
result = True
for b in point_list:
result = result and self.bit_array[b]
return result
<mask token>
| <mask token>
class BloomFilter:
def __init__(self):
bit_array = bitarray(BIT_SIZE)
bit_array.setall(0)
self.bit_array = bit_array
def add(self, val):
point_list = self.get_postions(val)
for b in point_list:
self.bit_array[b] = 1
def get_postions(self, val):
point1 = mmh3.hash(val, 5) % BIT_SIZE
point2 = mmh3.hash(val, 7) % BIT_SIZE
point3 = mmh3.hash(val, 11) % BIT_SIZE
point4 = mmh3.hash(val, 13) % BIT_SIZE
point7 = mmh3.hash(val, 19) % BIT_SIZE
point5 = mmh3.hash(val, 23) % BIT_SIZE
point6 = mmh3.hash(val, 31) % BIT_SIZE
return [point1, point2, point3, point4, point5, point6]
def is_contains(self, val):
point_list = self.get_postions(val)
result = True
for b in point_list:
result = result and self.bit_array[b]
return result
<mask token>
| <mask token>
class BloomFilter:
def __init__(self):
bit_array = bitarray(BIT_SIZE)
bit_array.setall(0)
self.bit_array = bit_array
def add(self, val):
point_list = self.get_postions(val)
for b in point_list:
self.bit_array[b] = 1
def get_postions(self, val):
point1 = mmh3.hash(val, 5) % BIT_SIZE
point2 = mmh3.hash(val, 7) % BIT_SIZE
point3 = mmh3.hash(val, 11) % BIT_SIZE
point4 = mmh3.hash(val, 13) % BIT_SIZE
point7 = mmh3.hash(val, 19) % BIT_SIZE
point5 = mmh3.hash(val, 23) % BIT_SIZE
point6 = mmh3.hash(val, 31) % BIT_SIZE
return [point1, point2, point3, point4, point5, point6]
def is_contains(self, val):
point_list = self.get_postions(val)
result = True
for b in point_list:
result = result and self.bit_array[b]
return result
if __name__ == '__main__':
bf = BloomFilter()
if bf.is_contains('zqw'):
print('exists')
else:
print('not exists')
bf.add('zqw')
if bf.is_contains('shooter'):
print('exists')
else:
bf.add('shooter')
if bf.is_contains('zqw'):
print('exists')
else:
bf.add('zqw')
| <mask token>
BIT_SIZE = 1 << 30
class BloomFilter:
def __init__(self):
bit_array = bitarray(BIT_SIZE)
bit_array.setall(0)
self.bit_array = bit_array
def add(self, val):
point_list = self.get_postions(val)
for b in point_list:
self.bit_array[b] = 1
def get_postions(self, val):
point1 = mmh3.hash(val, 5) % BIT_SIZE
point2 = mmh3.hash(val, 7) % BIT_SIZE
point3 = mmh3.hash(val, 11) % BIT_SIZE
point4 = mmh3.hash(val, 13) % BIT_SIZE
point7 = mmh3.hash(val, 19) % BIT_SIZE
point5 = mmh3.hash(val, 23) % BIT_SIZE
point6 = mmh3.hash(val, 31) % BIT_SIZE
return [point1, point2, point3, point4, point5, point6]
def is_contains(self, val):
point_list = self.get_postions(val)
result = True
for b in point_list:
result = result and self.bit_array[b]
return result
if __name__ == '__main__':
bf = BloomFilter()
if bf.is_contains('zqw'):
print('exists')
else:
print('not exists')
bf.add('zqw')
if bf.is_contains('shooter'):
print('exists')
else:
bf.add('shooter')
if bf.is_contains('zqw'):
print('exists')
else:
bf.add('zqw')
| #coding: utf-8
import mmh3
from bitarray import bitarray
BIT_SIZE = 1 << 30
class BloomFilter:
def __init__(self):
# Initialize bloom filter, set size and all bits to 0
bit_array = bitarray(BIT_SIZE)
bit_array.setall(0)
self.bit_array = bit_array
def add(self, val):
point_list = self.get_postions(val)
for b in point_list:
self.bit_array[b] = 1
def get_postions(self, val):
# Get points positions in bit vector.
# 提供不同的hash种子得到多个hash函数, seed最好为质数
point1 = mmh3.hash(val, 5) % BIT_SIZE
point2 = mmh3.hash(val, 7) % BIT_SIZE
point3 = mmh3.hash(val, 11) % BIT_SIZE
point4 = mmh3.hash(val, 13) % BIT_SIZE
point7 = mmh3.hash(val, 19) % BIT_SIZE
point5 = mmh3.hash(val, 23) % BIT_SIZE
point6 = mmh3.hash(val, 31) % BIT_SIZE
return [point1, point2, point3, point4, point5, point6]
def is_contains(self, val):
point_list = self.get_postions(val)
result = True
for b in point_list:
result = result and self.bit_array[b]
return result
if __name__ == '__main__':
bf = BloomFilter()
# 第一次运行时会显示 not exists
if bf.is_contains('zqw'):
print('exists')
else:
print('not exists')
bf.add('zqw')
if bf.is_contains('shooter'):
print('exists')
else:
bf.add('shooter')
if bf.is_contains('zqw'):
print('exists')
else:
bf.add('zqw') | [
4,
5,
6,
7,
9
] |
590 | 59d04ebd9a45c6a179a2da1f88f728ba2af91c05 | <mask token>
| <mask token>
admin.site.register(Persona)
| from django.contrib import admin
from pharma_models.personas.models import Persona
admin.site.register(Persona)
| null | null | [
0,
1,
2
] |
591 | ed5653455062cb3468c232cf0fa3f1d18793626a | <mask token>
| <mask token>
CustomLogger.init_log()
<mask token>
CustomLogger.info('[main]', log_str)
| <mask token>
CustomLogger.init_log()
log_str = '%s/%s/%s\n' % ('demo1', 'demo2', 'demo3')
CustomLogger.info('[main]', log_str)
| from python_logging.Demo_CustomLogger import CustomLogger
CustomLogger.init_log()
log_str = '%s/%s/%s\n' % ('demo1', 'demo2', 'demo3')
CustomLogger.info('[main]', log_str)
| from python_logging.Demo_CustomLogger import CustomLogger
CustomLogger.init_log()
# CustomLogger.info()
log_str = '%s/%s/%s\n' % ("demo1", "demo2", "demo3")
CustomLogger.info('[main]', log_str)
| [
0,
1,
2,
3,
4
] |
592 | a9f3d5f11a9f2781571029b54d54b41d9f1f83b3 | <mask token>
class ProfileInline(admin.StackedInline):
<mask token>
<mask token>
<mask token>
class UserAdmin(BaseUserAdmin):
inlines = ProfileInline,
<mask token>
| <mask token>
class ProfileInline(admin.StackedInline):
model = UserProfile
can_delete = False
verbose_name_plural = 'profile'
class UserAdmin(BaseUserAdmin):
inlines = ProfileInline,
<mask token>
| <mask token>
class ProfileInline(admin.StackedInline):
model = UserProfile
can_delete = False
verbose_name_plural = 'profile'
class UserAdmin(BaseUserAdmin):
inlines = ProfileInline,
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(Thread)
admin.site.register(Comment)
admin.site.register(Experience)
admin.site.register(ThreadTag)
admin.site.register(ExperienceTag)
admin.site.register(UserProfile)
admin.site.register(ExperiencesLike)
admin.site.register(ExperiencesDislike)
admin.site.register(Like)
admin.site.register(Dislike)
admin.site.register(Toolbox)
admin.site.register(ToolboxUser)
admin.site.register(Question)
admin.site.register(Answer)
| from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.models import User
from app.models import *
class ProfileInline(admin.StackedInline):
model = UserProfile
can_delete = False
verbose_name_plural = 'profile'
class UserAdmin(BaseUserAdmin):
inlines = ProfileInline,
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(Thread)
admin.site.register(Comment)
admin.site.register(Experience)
admin.site.register(ThreadTag)
admin.site.register(ExperienceTag)
admin.site.register(UserProfile)
admin.site.register(ExperiencesLike)
admin.site.register(ExperiencesDislike)
admin.site.register(Like)
admin.site.register(Dislike)
admin.site.register(Toolbox)
admin.site.register(ToolboxUser)
admin.site.register(Question)
admin.site.register(Answer)
| from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.models import User
from app.models import *
# Register your models here.
class ProfileInline(admin.StackedInline):
model = UserProfile
can_delete = False
verbose_name_plural = 'profile'
class UserAdmin(BaseUserAdmin):
inlines = (ProfileInline, )
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(Thread)
admin.site.register(Comment)
admin.site.register(Experience)
admin.site.register(ThreadTag)
admin.site.register(ExperienceTag)
admin.site.register(UserProfile)
admin.site.register(ExperiencesLike)
admin.site.register(ExperiencesDislike)
admin.site.register(Like)
admin.site.register(Dislike)
admin.site.register(Toolbox)
admin.site.register(ToolboxUser)
admin.site.register(Question)
admin.site.register(Answer) | [
3,
4,
5,
6,
7
] |
593 | a9b2a4d4924dcdd6e146ea346e71bf42c0259846 | <mask token>
class GregerUpdateAgent(Thread):
<mask token>
<mask token>
@property
def localRevisionRecord(self):
"""
Get local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Getting local revision record...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to get record from file...')
try:
with open(revisionRecordPath, 'r') as f:
localRecord = f.read()
localLog.debug('Local revision record: ' + str(localRecord))
except Exception as e:
self.log.warning('Failed to open file! - ' + str(e))
self.localRevisionRecord = 0
localRecord = self.localRevisionRecord
return localRecord
<mask token>
def getSoftwareInfo(self, rev='HEAD'):
"""
Retrieve information about a revision available on server.
"""
localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')
localLog.debug('Attempting to retrieve software revision info...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
moduleReturn = {'revision': '', 'revision_SHA': '',
'revision_author': '', 'revision_date': '', 'revision_comment': ''}
localLog.debug('Attempting to retrieve info from server... ' +
guaSWServerURI)
pCmd = 'svn proplist -v -R --revprop -r ' + rev
pCmd += ' ' + guaSWServerURI
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
outputList = output.splitlines()[1:]
outputList = [elem.strip() for elem in outputList]
revStr = output.splitlines()[0]
revStr = revStr.split()[-1]
moduleReturn['revision'] = revStr[:-1]
localLog.debug('Revision: ' + revStr[:-1])
shaStr = outputList[outputList.index('git-commit') + 1]
moduleReturn['revision_SHA'] = shaStr
localLog.debug('Revision SHA: ' + shaStr)
authorStr = outputList[outputList.index('svn:author') + 1]
moduleReturn['revision_author'] = authorStr
localLog.debug('Revision author: ' + authorStr)
dateStr = outputList[outputList.index('svn:date') + 1]
moduleReturn['revision_date'] = dateStr
localLog.debug('Revision date: ' + dateStr)
commentStr = outputList[outputList.index('svn:log') + 1].strip()
moduleReturn['revision_comment'] = commentStr
localLog.debug('Revision Comment: ' + commentStr)
if err is not None:
localLog.debug('Error message: ' + str(err))
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
return moduleReturn
def updateSoftware(self, swRev='HEAD'):
"""
Get and updating software from server
"""
localLog = logging.getLogger(self.logPath + '.updateSoftware')
localLog.debug('Getting software revision ' + str(swRev) +
' from server and updating local client...')
localLog.debug('Constructing target path for new software...')
targetRoot = self._location
targetDir = 'gcm'
targetPath = os.path.join(targetRoot, targetDir)
localLog.debug('Target path: ' + targetPath)
localLog.debug('Retrieving relevant parameters from server...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
localLog.debug('Getting software files from server...')
pCmd = 'svn export --force -r ' + str(swRev)
pCmd += ' ' + guaSWServerURI
pCmd += ' ' + targetPath
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
if err is not None:
self.log.warning('Error message: ' + str(err))
else:
self.log.info('Download successful!')
for line in output.splitlines():
self.log.info(line)
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
localLog.debug('Reading downloaded revision from "' + output.
splitlines()[-1] + '"...')
revText = output.splitlines()[-1].split()[-1][:-1]
localLog.debug('Downloaded Revision: ' + revText)
self.localRevisionRecord = revText
localLog.debug('Listing downloaded files...')
downloadedFiles = []
for row in output.splitlines()[:-1]:
file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]
)
downloadedFiles.append(file)
localLog.debug('File: ' + file)
self.log.debug('Getting all files in local directory (after update)...'
)
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
localLog.debug('File: ' + allFiles[-1])
for dir in d:
allFiles.append(os.path.join(r, dir))
localLog.debug('Dir: ' + allFiles[-1])
self.log.info(
'Identifying old files to remove (<new_files> - <all_files>)...')
diffFiles = list(set(allFiles) - set(downloadedFiles))
for file in diffFiles:
self.log.info('Removing: ' + file)
try:
if os.path.isfile(file):
os.unlink(file)
elif os.path.isdir(file):
shutil.rmtree(file)
except Exception as e:
self.log.warning('Oops! Something went wrong! - ' + str(e))
self.log.debug('Re-getting all files in local directory...')
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
self.log.debug('File: ' + os.path.join(r, file))
for dir in d:
allFiles.append(os.path.join(r, dir))
self.log.debug('Dir: ' + os.path.join(r, file))
def run(self):
"""
Run Greger Update Agent.
"""
localLog = logging.getLogger(self.logPath + '.run')
self.log.info('Starting Greger Update Agent (GUA)...')
localLog.debug('Wait for Greger Client Module to start...')
self.ready.wait()
allThreads = {}
for thr in enumerate():
localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'
)
allThreads.update({thr.__class__.__name__: thr})
if thr.__class__.__name__ == 'GregerClientModule':
localLog.debug('Greger Client Module thread found! ' +
allThreads['GregerClientModule'].name)
loopCount = 0
while not self.stopExecution.is_set():
loopCount += 1
localLog.debug('Checking for updates (' + str(loopCount) + ')...')
localLog.debug('Getting local revision record...')
localRevision = self.localRevisionRecord
localLog.debug('Getting latest software info...')
softwareInfo = self.getSoftwareInfo()
self.log.info('Revision check done! (' + str(localRevision) + ')')
if int(localRevision) == int(softwareInfo['revision']):
self.log.info('No new revision found.')
else:
self.log.info('New revision found!')
localLog.debug('Attempting to update software...')
self.updateSoftware()
localLog.debug(
'Attempting to update server with software info...')
allThreads['GregerDatabase'].update('about', softwareInfo)
self.log.info(
'Attempting to stop all exection before restarting...')
allThreads['GregerClientModule'].stopAll(GUA=True)
self.log.info('Attemption to restart application...')
restart_program()
if 'guaCheckUpdateDelay' in GregerDatabase.settings:
delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][
'value']
else:
delayTime = 10
self.log.warning('Settings not defined! (using default=10)')
self.log.info('Waiting ' + str(delayTime) + 's...')
self.stopExecution.wait(delayTime)
self.log.info('Greger Update Agent (GUA) execution stopped!')
| <mask token>
class GregerUpdateAgent(Thread):
<mask token>
<mask token>
@property
def localRevisionRecord(self):
"""
Get local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Getting local revision record...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to get record from file...')
try:
with open(revisionRecordPath, 'r') as f:
localRecord = f.read()
localLog.debug('Local revision record: ' + str(localRecord))
except Exception as e:
self.log.warning('Failed to open file! - ' + str(e))
self.localRevisionRecord = 0
localRecord = self.localRevisionRecord
return localRecord
@localRevisionRecord.setter
def localRevisionRecord(self, newRevision):
"""
Set local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Setting local revision record (.gcm) to ' + str(
newRevision) + '...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to write "' + str(newRevision) +
'" to file...')
with open(revisionRecordPath, 'w') as f:
f.write(str(newRevision))
self.log.info('Local revision record set: ' + str(newRevision))
def getSoftwareInfo(self, rev='HEAD'):
"""
Retrieve information about a revision available on server.
"""
localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')
localLog.debug('Attempting to retrieve software revision info...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
moduleReturn = {'revision': '', 'revision_SHA': '',
'revision_author': '', 'revision_date': '', 'revision_comment': ''}
localLog.debug('Attempting to retrieve info from server... ' +
guaSWServerURI)
pCmd = 'svn proplist -v -R --revprop -r ' + rev
pCmd += ' ' + guaSWServerURI
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
outputList = output.splitlines()[1:]
outputList = [elem.strip() for elem in outputList]
revStr = output.splitlines()[0]
revStr = revStr.split()[-1]
moduleReturn['revision'] = revStr[:-1]
localLog.debug('Revision: ' + revStr[:-1])
shaStr = outputList[outputList.index('git-commit') + 1]
moduleReturn['revision_SHA'] = shaStr
localLog.debug('Revision SHA: ' + shaStr)
authorStr = outputList[outputList.index('svn:author') + 1]
moduleReturn['revision_author'] = authorStr
localLog.debug('Revision author: ' + authorStr)
dateStr = outputList[outputList.index('svn:date') + 1]
moduleReturn['revision_date'] = dateStr
localLog.debug('Revision date: ' + dateStr)
commentStr = outputList[outputList.index('svn:log') + 1].strip()
moduleReturn['revision_comment'] = commentStr
localLog.debug('Revision Comment: ' + commentStr)
if err is not None:
localLog.debug('Error message: ' + str(err))
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
return moduleReturn
def updateSoftware(self, swRev='HEAD'):
"""
Get and updating software from server
"""
localLog = logging.getLogger(self.logPath + '.updateSoftware')
localLog.debug('Getting software revision ' + str(swRev) +
' from server and updating local client...')
localLog.debug('Constructing target path for new software...')
targetRoot = self._location
targetDir = 'gcm'
targetPath = os.path.join(targetRoot, targetDir)
localLog.debug('Target path: ' + targetPath)
localLog.debug('Retrieving relevant parameters from server...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
localLog.debug('Getting software files from server...')
pCmd = 'svn export --force -r ' + str(swRev)
pCmd += ' ' + guaSWServerURI
pCmd += ' ' + targetPath
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
if err is not None:
self.log.warning('Error message: ' + str(err))
else:
self.log.info('Download successful!')
for line in output.splitlines():
self.log.info(line)
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
localLog.debug('Reading downloaded revision from "' + output.
splitlines()[-1] + '"...')
revText = output.splitlines()[-1].split()[-1][:-1]
localLog.debug('Downloaded Revision: ' + revText)
self.localRevisionRecord = revText
localLog.debug('Listing downloaded files...')
downloadedFiles = []
for row in output.splitlines()[:-1]:
file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]
)
downloadedFiles.append(file)
localLog.debug('File: ' + file)
self.log.debug('Getting all files in local directory (after update)...'
)
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
localLog.debug('File: ' + allFiles[-1])
for dir in d:
allFiles.append(os.path.join(r, dir))
localLog.debug('Dir: ' + allFiles[-1])
self.log.info(
'Identifying old files to remove (<new_files> - <all_files>)...')
diffFiles = list(set(allFiles) - set(downloadedFiles))
for file in diffFiles:
self.log.info('Removing: ' + file)
try:
if os.path.isfile(file):
os.unlink(file)
elif os.path.isdir(file):
shutil.rmtree(file)
except Exception as e:
self.log.warning('Oops! Something went wrong! - ' + str(e))
self.log.debug('Re-getting all files in local directory...')
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
self.log.debug('File: ' + os.path.join(r, file))
for dir in d:
allFiles.append(os.path.join(r, dir))
self.log.debug('Dir: ' + os.path.join(r, file))
def run(self):
"""
Run Greger Update Agent.
"""
localLog = logging.getLogger(self.logPath + '.run')
self.log.info('Starting Greger Update Agent (GUA)...')
localLog.debug('Wait for Greger Client Module to start...')
self.ready.wait()
allThreads = {}
for thr in enumerate():
localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'
)
allThreads.update({thr.__class__.__name__: thr})
if thr.__class__.__name__ == 'GregerClientModule':
localLog.debug('Greger Client Module thread found! ' +
allThreads['GregerClientModule'].name)
loopCount = 0
while not self.stopExecution.is_set():
loopCount += 1
localLog.debug('Checking for updates (' + str(loopCount) + ')...')
localLog.debug('Getting local revision record...')
localRevision = self.localRevisionRecord
localLog.debug('Getting latest software info...')
softwareInfo = self.getSoftwareInfo()
self.log.info('Revision check done! (' + str(localRevision) + ')')
if int(localRevision) == int(softwareInfo['revision']):
self.log.info('No new revision found.')
else:
self.log.info('New revision found!')
localLog.debug('Attempting to update software...')
self.updateSoftware()
localLog.debug(
'Attempting to update server with software info...')
allThreads['GregerDatabase'].update('about', softwareInfo)
self.log.info(
'Attempting to stop all exection before restarting...')
allThreads['GregerClientModule'].stopAll(GUA=True)
self.log.info('Attemption to restart application...')
restart_program()
if 'guaCheckUpdateDelay' in GregerDatabase.settings:
delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][
'value']
else:
delayTime = 10
self.log.warning('Settings not defined! (using default=10)')
self.log.info('Waiting ' + str(delayTime) + 's...')
self.stopExecution.wait(delayTime)
self.log.info('Greger Update Agent (GUA) execution stopped!')
| <mask token>
class GregerUpdateAgent(Thread):
<mask token>
def __init__(self, ready=None):
"""
Initialize the main class
"""
Thread.__init__(self)
self.ready = ready
self.logPath = 'root.GUA'
self.log = logging.getLogger(self.logPath)
localLog = logging.getLogger(self.logPath + '.__init__')
localLog.debug('Initiating Greger Update Agent (GUA)...')
self.stopExecution = Event()
self._location = os.path.abspath(__file__)
self._location = self._location[:-15]
localLog.debug('Local path: ' + self._location)
localLog.debug('Getting configuration parameters from file...')
config = getLocalConfig()
self.localRevisionRecordPath = config.get('greger_update_agent',
'local_revision_path')
localLog.debug('Parameter: (localRevisionRecordPath) ' + self.
localRevisionRecordPath)
self.log.info('Greger Update Agent (GUA) successfully initiated!')
@property
def localRevisionRecord(self):
"""
Get local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Getting local revision record...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to get record from file...')
try:
with open(revisionRecordPath, 'r') as f:
localRecord = f.read()
localLog.debug('Local revision record: ' + str(localRecord))
except Exception as e:
self.log.warning('Failed to open file! - ' + str(e))
self.localRevisionRecord = 0
localRecord = self.localRevisionRecord
return localRecord
@localRevisionRecord.setter
def localRevisionRecord(self, newRevision):
"""
Set local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Setting local revision record (.gcm) to ' + str(
newRevision) + '...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to write "' + str(newRevision) +
'" to file...')
with open(revisionRecordPath, 'w') as f:
f.write(str(newRevision))
self.log.info('Local revision record set: ' + str(newRevision))
def getSoftwareInfo(self, rev='HEAD'):
"""
Retrieve information about a revision available on server.
"""
localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')
localLog.debug('Attempting to retrieve software revision info...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
moduleReturn = {'revision': '', 'revision_SHA': '',
'revision_author': '', 'revision_date': '', 'revision_comment': ''}
localLog.debug('Attempting to retrieve info from server... ' +
guaSWServerURI)
pCmd = 'svn proplist -v -R --revprop -r ' + rev
pCmd += ' ' + guaSWServerURI
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
outputList = output.splitlines()[1:]
outputList = [elem.strip() for elem in outputList]
revStr = output.splitlines()[0]
revStr = revStr.split()[-1]
moduleReturn['revision'] = revStr[:-1]
localLog.debug('Revision: ' + revStr[:-1])
shaStr = outputList[outputList.index('git-commit') + 1]
moduleReturn['revision_SHA'] = shaStr
localLog.debug('Revision SHA: ' + shaStr)
authorStr = outputList[outputList.index('svn:author') + 1]
moduleReturn['revision_author'] = authorStr
localLog.debug('Revision author: ' + authorStr)
dateStr = outputList[outputList.index('svn:date') + 1]
moduleReturn['revision_date'] = dateStr
localLog.debug('Revision date: ' + dateStr)
commentStr = outputList[outputList.index('svn:log') + 1].strip()
moduleReturn['revision_comment'] = commentStr
localLog.debug('Revision Comment: ' + commentStr)
if err is not None:
localLog.debug('Error message: ' + str(err))
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
return moduleReturn
def updateSoftware(self, swRev='HEAD'):
"""
Get and updating software from server
"""
localLog = logging.getLogger(self.logPath + '.updateSoftware')
localLog.debug('Getting software revision ' + str(swRev) +
' from server and updating local client...')
localLog.debug('Constructing target path for new software...')
targetRoot = self._location
targetDir = 'gcm'
targetPath = os.path.join(targetRoot, targetDir)
localLog.debug('Target path: ' + targetPath)
localLog.debug('Retrieving relevant parameters from server...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
localLog.debug('Getting software files from server...')
pCmd = 'svn export --force -r ' + str(swRev)
pCmd += ' ' + guaSWServerURI
pCmd += ' ' + targetPath
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
if err is not None:
self.log.warning('Error message: ' + str(err))
else:
self.log.info('Download successful!')
for line in output.splitlines():
self.log.info(line)
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
localLog.debug('Reading downloaded revision from "' + output.
splitlines()[-1] + '"...')
revText = output.splitlines()[-1].split()[-1][:-1]
localLog.debug('Downloaded Revision: ' + revText)
self.localRevisionRecord = revText
localLog.debug('Listing downloaded files...')
downloadedFiles = []
for row in output.splitlines()[:-1]:
file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]
)
downloadedFiles.append(file)
localLog.debug('File: ' + file)
self.log.debug('Getting all files in local directory (after update)...'
)
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
localLog.debug('File: ' + allFiles[-1])
for dir in d:
allFiles.append(os.path.join(r, dir))
localLog.debug('Dir: ' + allFiles[-1])
self.log.info(
'Identifying old files to remove (<new_files> - <all_files>)...')
diffFiles = list(set(allFiles) - set(downloadedFiles))
for file in diffFiles:
self.log.info('Removing: ' + file)
try:
if os.path.isfile(file):
os.unlink(file)
elif os.path.isdir(file):
shutil.rmtree(file)
except Exception as e:
self.log.warning('Oops! Something went wrong! - ' + str(e))
self.log.debug('Re-getting all files in local directory...')
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
self.log.debug('File: ' + os.path.join(r, file))
for dir in d:
allFiles.append(os.path.join(r, dir))
self.log.debug('Dir: ' + os.path.join(r, file))
def run(self):
"""
Run Greger Update Agent.
"""
localLog = logging.getLogger(self.logPath + '.run')
self.log.info('Starting Greger Update Agent (GUA)...')
localLog.debug('Wait for Greger Client Module to start...')
self.ready.wait()
allThreads = {}
for thr in enumerate():
localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'
)
allThreads.update({thr.__class__.__name__: thr})
if thr.__class__.__name__ == 'GregerClientModule':
localLog.debug('Greger Client Module thread found! ' +
allThreads['GregerClientModule'].name)
loopCount = 0
while not self.stopExecution.is_set():
loopCount += 1
localLog.debug('Checking for updates (' + str(loopCount) + ')...')
localLog.debug('Getting local revision record...')
localRevision = self.localRevisionRecord
localLog.debug('Getting latest software info...')
softwareInfo = self.getSoftwareInfo()
self.log.info('Revision check done! (' + str(localRevision) + ')')
if int(localRevision) == int(softwareInfo['revision']):
self.log.info('No new revision found.')
else:
self.log.info('New revision found!')
localLog.debug('Attempting to update software...')
self.updateSoftware()
localLog.debug(
'Attempting to update server with software info...')
allThreads['GregerDatabase'].update('about', softwareInfo)
self.log.info(
'Attempting to stop all exection before restarting...')
allThreads['GregerClientModule'].stopAll(GUA=True)
self.log.info('Attemption to restart application...')
restart_program()
if 'guaCheckUpdateDelay' in GregerDatabase.settings:
delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][
'value']
else:
delayTime = 10
self.log.warning('Settings not defined! (using default=10)')
self.log.info('Waiting ' + str(delayTime) + 's...')
self.stopExecution.wait(delayTime)
self.log.info('Greger Update Agent (GUA) execution stopped!')
| <mask token>
class GregerUpdateAgent(Thread):
"""
Main class which holds the main sequence of the application.
"""
def __init__(self, ready=None):
"""
Initialize the main class
"""
Thread.__init__(self)
self.ready = ready
self.logPath = 'root.GUA'
self.log = logging.getLogger(self.logPath)
localLog = logging.getLogger(self.logPath + '.__init__')
localLog.debug('Initiating Greger Update Agent (GUA)...')
self.stopExecution = Event()
self._location = os.path.abspath(__file__)
self._location = self._location[:-15]
localLog.debug('Local path: ' + self._location)
localLog.debug('Getting configuration parameters from file...')
config = getLocalConfig()
self.localRevisionRecordPath = config.get('greger_update_agent',
'local_revision_path')
localLog.debug('Parameter: (localRevisionRecordPath) ' + self.
localRevisionRecordPath)
self.log.info('Greger Update Agent (GUA) successfully initiated!')
@property
def localRevisionRecord(self):
"""
Get local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Getting local revision record...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to get record from file...')
try:
with open(revisionRecordPath, 'r') as f:
localRecord = f.read()
localLog.debug('Local revision record: ' + str(localRecord))
except Exception as e:
self.log.warning('Failed to open file! - ' + str(e))
self.localRevisionRecord = 0
localRecord = self.localRevisionRecord
return localRecord
@localRevisionRecord.setter
def localRevisionRecord(self, newRevision):
"""
Set local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Setting local revision record (.gcm) to ' + str(
newRevision) + '...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to write "' + str(newRevision) +
'" to file...')
with open(revisionRecordPath, 'w') as f:
f.write(str(newRevision))
self.log.info('Local revision record set: ' + str(newRevision))
def getSoftwareInfo(self, rev='HEAD'):
"""
Retrieve information about a revision available on server.
"""
localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')
localLog.debug('Attempting to retrieve software revision info...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
moduleReturn = {'revision': '', 'revision_SHA': '',
'revision_author': '', 'revision_date': '', 'revision_comment': ''}
localLog.debug('Attempting to retrieve info from server... ' +
guaSWServerURI)
pCmd = 'svn proplist -v -R --revprop -r ' + rev
pCmd += ' ' + guaSWServerURI
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
outputList = output.splitlines()[1:]
outputList = [elem.strip() for elem in outputList]
revStr = output.splitlines()[0]
revStr = revStr.split()[-1]
moduleReturn['revision'] = revStr[:-1]
localLog.debug('Revision: ' + revStr[:-1])
shaStr = outputList[outputList.index('git-commit') + 1]
moduleReturn['revision_SHA'] = shaStr
localLog.debug('Revision SHA: ' + shaStr)
authorStr = outputList[outputList.index('svn:author') + 1]
moduleReturn['revision_author'] = authorStr
localLog.debug('Revision author: ' + authorStr)
dateStr = outputList[outputList.index('svn:date') + 1]
moduleReturn['revision_date'] = dateStr
localLog.debug('Revision date: ' + dateStr)
commentStr = outputList[outputList.index('svn:log') + 1].strip()
moduleReturn['revision_comment'] = commentStr
localLog.debug('Revision Comment: ' + commentStr)
if err is not None:
localLog.debug('Error message: ' + str(err))
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
return moduleReturn
def updateSoftware(self, swRev='HEAD'):
"""
Get and updating software from server
"""
localLog = logging.getLogger(self.logPath + '.updateSoftware')
localLog.debug('Getting software revision ' + str(swRev) +
' from server and updating local client...')
localLog.debug('Constructing target path for new software...')
targetRoot = self._location
targetDir = 'gcm'
targetPath = os.path.join(targetRoot, targetDir)
localLog.debug('Target path: ' + targetPath)
localLog.debug('Retrieving relevant parameters from server...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
localLog.debug('Getting software files from server...')
pCmd = 'svn export --force -r ' + str(swRev)
pCmd += ' ' + guaSWServerURI
pCmd += ' ' + targetPath
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
if err is not None:
self.log.warning('Error message: ' + str(err))
else:
self.log.info('Download successful!')
for line in output.splitlines():
self.log.info(line)
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
localLog.debug('Reading downloaded revision from "' + output.
splitlines()[-1] + '"...')
revText = output.splitlines()[-1].split()[-1][:-1]
localLog.debug('Downloaded Revision: ' + revText)
self.localRevisionRecord = revText
localLog.debug('Listing downloaded files...')
downloadedFiles = []
for row in output.splitlines()[:-1]:
file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]
)
downloadedFiles.append(file)
localLog.debug('File: ' + file)
self.log.debug('Getting all files in local directory (after update)...'
)
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
localLog.debug('File: ' + allFiles[-1])
for dir in d:
allFiles.append(os.path.join(r, dir))
localLog.debug('Dir: ' + allFiles[-1])
self.log.info(
'Identifying old files to remove (<new_files> - <all_files>)...')
diffFiles = list(set(allFiles) - set(downloadedFiles))
for file in diffFiles:
self.log.info('Removing: ' + file)
try:
if os.path.isfile(file):
os.unlink(file)
elif os.path.isdir(file):
shutil.rmtree(file)
except Exception as e:
self.log.warning('Oops! Something went wrong! - ' + str(e))
self.log.debug('Re-getting all files in local directory...')
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
self.log.debug('File: ' + os.path.join(r, file))
for dir in d:
allFiles.append(os.path.join(r, dir))
self.log.debug('Dir: ' + os.path.join(r, file))
def run(self):
"""
Run Greger Update Agent.
"""
localLog = logging.getLogger(self.logPath + '.run')
self.log.info('Starting Greger Update Agent (GUA)...')
localLog.debug('Wait for Greger Client Module to start...')
self.ready.wait()
allThreads = {}
for thr in enumerate():
localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'
)
allThreads.update({thr.__class__.__name__: thr})
if thr.__class__.__name__ == 'GregerClientModule':
localLog.debug('Greger Client Module thread found! ' +
allThreads['GregerClientModule'].name)
loopCount = 0
while not self.stopExecution.is_set():
loopCount += 1
localLog.debug('Checking for updates (' + str(loopCount) + ')...')
localLog.debug('Getting local revision record...')
localRevision = self.localRevisionRecord
localLog.debug('Getting latest software info...')
softwareInfo = self.getSoftwareInfo()
self.log.info('Revision check done! (' + str(localRevision) + ')')
if int(localRevision) == int(softwareInfo['revision']):
self.log.info('No new revision found.')
else:
self.log.info('New revision found!')
localLog.debug('Attempting to update software...')
self.updateSoftware()
localLog.debug(
'Attempting to update server with software info...')
allThreads['GregerDatabase'].update('about', softwareInfo)
self.log.info(
'Attempting to stop all exection before restarting...')
allThreads['GregerClientModule'].stopAll(GUA=True)
self.log.info('Attemption to restart application...')
restart_program()
if 'guaCheckUpdateDelay' in GregerDatabase.settings:
delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][
'value']
else:
delayTime = 10
self.log.warning('Settings not defined! (using default=10)')
self.log.info('Waiting ' + str(delayTime) + 's...')
self.stopExecution.wait(delayTime)
self.log.info('Greger Update Agent (GUA) execution stopped!')
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Greger Update Agent (GUA) module for the Greger Client Module
"""
__author__ = "Eric Sandbling"
__license__ = 'MIT'
__status__ = 'Development'
# System modules
import os, sys
import shutil
import logging
import subprocess
from threading import Event
from threading import Thread
from threading import enumerate
# Local Modules
from common import getLocalConfig
from common import restart_program
from gdb import GregerDatabase
# from gcm import GregerClientModule
class GregerUpdateAgent(Thread):
"""
Main class which holds the main sequence of the application.
"""
def __init__(self, ready=None):
'''
Initialize the main class
'''
Thread.__init__(self)
self.ready = ready
# Setup logging
self.logPath = "root.GUA"
self.log = logging.getLogger(self.logPath)
localLog = logging.getLogger(self.logPath + ".__init__")
localLog.debug("Initiating Greger Update Agent (GUA)...")
# Stop execution handler
self.stopExecution = Event()
# Get local path
self._location = os.path.abspath(__file__)
self._location = self._location[:-15] # Trim gcm/__main__.py from path to get at location of application
localLog.debug("Local path: " + self._location)
# Get Local Configuration Parameters
localLog.debug("Getting configuration parameters from file...")
config = getLocalConfig()
# Locally relevant parameters
self.localRevisionRecordPath = config.get("greger_update_agent", "local_revision_path")
localLog.debug("Parameter: (localRevisionRecordPath) " + self.localRevisionRecordPath)
self.log.info("Greger Update Agent (GUA) successfully initiated!")
@property
def localRevisionRecord(self):
'''
Get local revision record (.gcm)
'''
# Logging
localLog = logging.getLogger(self.logPath + ".localRevisionRecord")
localLog.debug("Getting local revision record...")
# Local parameters
# revisionRecordPath = os.path.join(self._location, ".gcm")
revisionRecordPath = self.localRevisionRecordPath
localLog.debug("Attemption to get record from file...")
try:
with open(revisionRecordPath,"r") as f:
localRecord = f.read()
localLog.debug("Local revision record: " + str(localRecord))
except Exception as e:
self.log.warning("Failed to open file! - " + str(e))
self.localRevisionRecord = 0
localRecord = self.localRevisionRecord
return localRecord
@localRevisionRecord.setter
def localRevisionRecord(self, newRevision):
'''
Set local revision record (.gcm)
'''
# Logging
localLog = logging.getLogger(self.logPath + ".localRevisionRecord")
localLog.debug("Setting local revision record (.gcm) to " + str(newRevision) + "...")
# Local parameters
# revisionRecordPath = os.path.join(self._location, ".gcm")
revisionRecordPath = self.localRevisionRecordPath
localLog.debug("Attemption to write \"" + str(newRevision) + "\" to file...")
with open(revisionRecordPath,"w") as f:
f.write(str(newRevision))
self.log.info("Local revision record set: " + str(newRevision))
def getSoftwareInfo(self, rev='HEAD'):
'''
Retrieve information about a revision available on server.
'''
# Logging
localLog = logging.getLogger(self.logPath + ".getSoftwareInfo")
localLog.debug("Attempting to retrieve software revision info...")
# Locally relevant parameters
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
else:
self.log.warning("Setting " + str(guaSWSource) + " not defined!")
return
moduleReturn = {
'revision': "",
'revision_SHA' : "",
'revision_author' : "",
'revision_date' : "",
'revision_comment' : ""
}
# Get server revision info
localLog.debug("Attempting to retrieve info from server... " + guaSWServerURI)
pCmd = "svn proplist -v -R --revprop -r " + rev
pCmd += " " + guaSWServerURI
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
# Create list of output and remove extra white spaces
outputList = output.splitlines()[1:]
outputList = [elem.strip() for elem in outputList]
# Get revision from output
revStr = output.splitlines()[0]
revStr = revStr.split()[-1]
moduleReturn['revision'] = revStr[:-1]
localLog.debug("Revision: " + revStr[:-1])
# Get SHA
shaStr = outputList[outputList.index('git-commit')+1]
moduleReturn['revision_SHA'] = shaStr
localLog.debug("Revision SHA: " + shaStr)
# Get revision author
authorStr = outputList[outputList.index('svn:author')+1]
moduleReturn['revision_author'] = authorStr
localLog.debug("Revision author: " + authorStr)
# Get revision date
dateStr = outputList[outputList.index('svn:date')+1]
moduleReturn['revision_date'] = dateStr
localLog.debug("Revision date: " + dateStr)
# Get revision comment
commentStr = outputList[outputList.index('svn:log')+1].strip()
moduleReturn['revision_comment'] = commentStr
localLog.debug("Revision Comment: " + commentStr)
if err is not None:
localLog.debug("Error message: " + str(err))
except Exception as e:
self.log.error("Oops! Something went wrong - " + str(e))
return moduleReturn
def updateSoftware(self, swRev='HEAD'):
'''
Get and updating software from server
'''
# Logging
localLog = logging.getLogger(self.logPath + ".updateSoftware")
localLog.debug("Getting software revision " + str(swRev) + " from server and updating local client...")
# Locally relevant parameters
localLog.debug("Constructing target path for new software...")
targetRoot = self._location
targetDir = "gcm"
targetPath = os.path.join(targetRoot, targetDir)
localLog.debug("Target path: " + targetPath)
localLog.debug("Retrieving relevant parameters from server...")
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
localLog.debug("Parameter: (guaSWSource) " + guaSWServerURI)
else:
self.log.warning("Setting " + str(guaSWSource) + " not defined!")
return
# Get software files from server
localLog.debug("Getting software files from server...")
# Compile download command
pCmd = "svn export --force -r " + str(swRev)
pCmd += " " + guaSWServerURI
pCmd += " " + targetPath
localLog.debug(pCmd)
# Execute command
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
if err is not None:
self.log.warning("Error message: " + str(err))
else:
self.log.info("Download successful!")
# Print output
for line in output.splitlines():
self.log.info(line)
except Exception as e:
self.log.error("Oops! Something went wrong - " + str(e))
# Read revision text
localLog.debug("Reading downloaded revision from \"" + output.splitlines()[-1] + "\"...")
revText = output.splitlines()[-1].split()[-1][:-1]
localLog.debug("Downloaded Revision: " + revText)
# Update local revision record
self.localRevisionRecord = revText
# Get downloaded files text
localLog.debug("Listing downloaded files...")
downloadedFiles = []
for row in output.splitlines()[:-1]:
file = os.path.join(targetRoot, [t.strip() for t in row.split()][1])
downloadedFiles.append(file)
localLog.debug("File: " + file)
# List files in directory
self.log.debug("Getting all files in local directory (after update)...")
allFiles = []
# r=root, d=directories, f = files
for r, d, f in os.walk(targetPath):
for file in f:
# allFiles.append(os.path.abspath(file))
allFiles.append(os.path.join(r, file))
localLog.debug("File: " + allFiles[-1])
# localLog.debug("File: " + os.path.join(r, file))
for dir in d:
# allFiles.append(os.path.abspath(dir))
allFiles.append(os.path.join(r, dir))
localLog.debug("Dir: " + allFiles[-1])
# localLog.debug("Dir: " + os.path.join(r, dir))
self.log.info("Identifying old files to remove (<new_files> - <all_files>)...")
diffFiles = list(set(allFiles) - set(downloadedFiles))
for file in diffFiles:
self.log.info("Removing: " + file)
try:
if os.path.isfile(file):
os.unlink(file)
elif os.path.isdir(file):
shutil.rmtree(file)
except Exception as e:
self.log.warning("Oops! Something went wrong! - " + str(e))
# List files in directory
self.log.debug("Re-getting all files in local directory...")
allFiles = []
# r=root, d=directories, f = files
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
self.log.debug("File: " + os.path.join(r, file))
for dir in d:
allFiles.append(os.path.join(r, dir))
self.log.debug("Dir: " + os.path.join(r, file))
def run(self):
'''
Run Greger Update Agent.
'''
# Logging
localLog = logging.getLogger(self.logPath + ".run")
self.log.info("Starting Greger Update Agent (GUA)...")
# Wait for Greger Client Module to start...
localLog.debug("Wait for Greger Client Module to start...")
self.ready.wait()
# Get all active threads!
allThreads = {}
for thr in enumerate():
localLog.debug(thr.name + " " + thr.__class__.__name__ +" active!")
allThreads.update({thr.__class__.__name__ : thr})
if thr.__class__.__name__ == "GregerClientModule":
localLog.debug("Greger Client Module thread found! " +
allThreads['GregerClientModule'].name)
# Start checking for updates
loopCount = 0
while not self.stopExecution.is_set():
loopCount += 1
localLog.debug("Checking for updates (" + str(loopCount) + ")...")
# Get local revision record
localLog.debug("Getting local revision record...")
localRevision = self.localRevisionRecord
# Get server revision...
localLog.debug("Getting latest software info...")
softwareInfo = self.getSoftwareInfo()
self.log.info("Revision check done! (" + str(localRevision) + ")")
if int(localRevision) == int(softwareInfo['revision']):
self.log.info("No new revision found.")
else:
self.log.info("New revision found!")
# Do update!!
localLog.debug("Attempting to update software...")
self.updateSoftware()
# Update server with updated software
localLog.debug("Attempting to update server with software info...")
allThreads['GregerDatabase'].update('about', softwareInfo)
# Tell GCM to stop all treads (except GUA)...
self.log.info("Attempting to stop all exection before restarting...")
allThreads['GregerClientModule'].stopAll(GUA=True)
# Restart Application
self.log.info("Attemption to restart application...")
restart_program()
if 'guaCheckUpdateDelay' in GregerDatabase.settings:
delayTime = GregerDatabase.settings['guaCheckUpdateDelay']['value']
else:
delayTime = 10
self.log.warning("Settings not defined! (using default=10)")
# Wait update delay
self.log.info("Waiting " + str(delayTime) + "s...")
self.stopExecution.wait(delayTime)
self.log.info("Greger Update Agent (GUA) execution stopped!")
| [
5,
6,
7,
8,
11
] |
594 | 9bd659bb3bf812e48710f625bb65a848d3a8d074 | <mask token>
def author():
return ''
def student_id():
return ''
<mask token>
def find_words(pattern, words, scoring_f, minlen, maxlen):
patternCopy = pattern
bestWord = '', 0
bestState = [('', 0), [], []]
toConsider = ''
possibleWords = []
length = minlen
wordDict = {}
beg_point = 0
states = []
if len(pattern) < minlen:
return pattern
for w in words:
if len(w) in wordDict:
wordDict[len(w)] += [w]
else:
wordDict[len(w)] = [w]
while len(patternCopy) > 1:
if length in wordDict:
for w in wordDict[length]:
snip = patternCopy[:length]
for p in range(len(snip)):
if patternCopy[p] != '-' and patternCopy[p] != w[p]:
toConsider = ''
break
toConsider = w
try:
if (patternCopy[len(toConsider)] == '-' and toConsider !=
'' and toConsider not in possibleWords):
if scoring_f(toConsider) > bestWord[1]:
bestWord = toConsider, scoring_f(toConsider)
except:
break
if length == maxlen:
patternCopy = patternCopy[1:]
leftHalf = pattern[:beg_point]
rightHalf = pattern[len(leftHalf) + len(bestWord[0]):]
beg_point += 1
if len(leftHalf) > 0 and leftHalf[-1] == '-':
states.append([bestWord, leftHalf, rightHalf])
bestWord = '', 0
length = minlen
length += 1
for s in states:
if s[0][1] > bestState[0][1]:
bestState = s
leftState = fill_words(bestState[1], words, scoring_f, minlen, maxlen)
rightState = fill_words(bestState[2], words, scoring_f, minlen, maxlen)
if len(leftState) == 0:
leftState = ''
if len(rightState) == 0:
rightState = ''
return leftState + bestState[0][0] + rightState
<mask token>
def length_squared(word):
if minlen <= len(word) <= maxlen:
return len(word) ** 2
else:
return 0
def scoring_f(w):
return length_squared(w) + scrabble_value(w)
<mask token>
| <mask token>
def author():
return ''
def student_id():
return ''
def fill_words(pattern, words, scoring_f, minlen, maxlen):
foundWords = find_words(pattern, words, scoring_f, minlen, maxlen)
foundWords = foundWords + pattern[len(foundWords):]
return foundWords
def find_words(pattern, words, scoring_f, minlen, maxlen):
patternCopy = pattern
bestWord = '', 0
bestState = [('', 0), [], []]
toConsider = ''
possibleWords = []
length = minlen
wordDict = {}
beg_point = 0
states = []
if len(pattern) < minlen:
return pattern
for w in words:
if len(w) in wordDict:
wordDict[len(w)] += [w]
else:
wordDict[len(w)] = [w]
while len(patternCopy) > 1:
if length in wordDict:
for w in wordDict[length]:
snip = patternCopy[:length]
for p in range(len(snip)):
if patternCopy[p] != '-' and patternCopy[p] != w[p]:
toConsider = ''
break
toConsider = w
try:
if (patternCopy[len(toConsider)] == '-' and toConsider !=
'' and toConsider not in possibleWords):
if scoring_f(toConsider) > bestWord[1]:
bestWord = toConsider, scoring_f(toConsider)
except:
break
if length == maxlen:
patternCopy = patternCopy[1:]
leftHalf = pattern[:beg_point]
rightHalf = pattern[len(leftHalf) + len(bestWord[0]):]
beg_point += 1
if len(leftHalf) > 0 and leftHalf[-1] == '-':
states.append([bestWord, leftHalf, rightHalf])
bestWord = '', 0
length = minlen
length += 1
for s in states:
if s[0][1] > bestState[0][1]:
bestState = s
leftState = fill_words(bestState[1], words, scoring_f, minlen, maxlen)
rightState = fill_words(bestState[2], words, scoring_f, minlen, maxlen)
if len(leftState) == 0:
leftState = ''
if len(rightState) == 0:
rightState = ''
return leftState + bestState[0][0] + rightState
<mask token>
def scrabble_value(word):
if minlen <= len(word) <= maxlen:
return sum(letter_values.get(c, 0) for c in word)
else:
return 0
def length_squared(word):
if minlen <= len(word) <= maxlen:
return len(word) ** 2
else:
return 0
def scoring_f(w):
return length_squared(w) + scrabble_value(w)
<mask token>
| <mask token>
def author():
return ''
def student_id():
return ''
def fill_words(pattern, words, scoring_f, minlen, maxlen):
foundWords = find_words(pattern, words, scoring_f, minlen, maxlen)
foundWords = foundWords + pattern[len(foundWords):]
return foundWords
def find_words(pattern, words, scoring_f, minlen, maxlen):
patternCopy = pattern
bestWord = '', 0
bestState = [('', 0), [], []]
toConsider = ''
possibleWords = []
length = minlen
wordDict = {}
beg_point = 0
states = []
if len(pattern) < minlen:
return pattern
for w in words:
if len(w) in wordDict:
wordDict[len(w)] += [w]
else:
wordDict[len(w)] = [w]
while len(patternCopy) > 1:
if length in wordDict:
for w in wordDict[length]:
snip = patternCopy[:length]
for p in range(len(snip)):
if patternCopy[p] != '-' and patternCopy[p] != w[p]:
toConsider = ''
break
toConsider = w
try:
if (patternCopy[len(toConsider)] == '-' and toConsider !=
'' and toConsider not in possibleWords):
if scoring_f(toConsider) > bestWord[1]:
bestWord = toConsider, scoring_f(toConsider)
except:
break
if length == maxlen:
patternCopy = patternCopy[1:]
leftHalf = pattern[:beg_point]
rightHalf = pattern[len(leftHalf) + len(bestWord[0]):]
beg_point += 1
if len(leftHalf) > 0 and leftHalf[-1] == '-':
states.append([bestWord, leftHalf, rightHalf])
bestWord = '', 0
length = minlen
length += 1
for s in states:
if s[0][1] > bestState[0][1]:
bestState = s
leftState = fill_words(bestState[1], words, scoring_f, minlen, maxlen)
rightState = fill_words(bestState[2], words, scoring_f, minlen, maxlen)
if len(leftState) == 0:
leftState = ''
if len(rightState) == 0:
rightState = ''
return leftState + bestState[0][0] + rightState
<mask token>
with open('words_sorted.txt', 'r', encoding='utf-8') as f:
words = [x.strip() for x in f]
words = [x for x in words if minlen <= len(x) <= maxlen]
wordset = set(words)
for word in words:
for c in word:
letter_counts[c] += 1
def scrabble_value(word):
if minlen <= len(word) <= maxlen:
return sum(letter_values.get(c, 0) for c in word)
else:
return 0
def length_squared(word):
if minlen <= len(word) <= maxlen:
return len(word) ** 2
else:
return 0
def scoring_f(w):
return length_squared(w) + scrabble_value(w)
<mask token>
| alphabet = 'abcdefghijklmnopqrstuvwxyz'
def author():
return ''
def student_id():
return ''
def fill_words(pattern, words, scoring_f, minlen, maxlen):
foundWords = find_words(pattern, words, scoring_f, minlen, maxlen)
foundWords = foundWords + pattern[len(foundWords):]
return foundWords
def find_words(pattern, words, scoring_f, minlen, maxlen):
patternCopy = pattern
bestWord = '', 0
bestState = [('', 0), [], []]
toConsider = ''
possibleWords = []
length = minlen
wordDict = {}
beg_point = 0
states = []
if len(pattern) < minlen:
return pattern
for w in words:
if len(w) in wordDict:
wordDict[len(w)] += [w]
else:
wordDict[len(w)] = [w]
while len(patternCopy) > 1:
if length in wordDict:
for w in wordDict[length]:
snip = patternCopy[:length]
for p in range(len(snip)):
if patternCopy[p] != '-' and patternCopy[p] != w[p]:
toConsider = ''
break
toConsider = w
try:
if (patternCopy[len(toConsider)] == '-' and toConsider !=
'' and toConsider not in possibleWords):
if scoring_f(toConsider) > bestWord[1]:
bestWord = toConsider, scoring_f(toConsider)
except:
break
if length == maxlen:
patternCopy = patternCopy[1:]
leftHalf = pattern[:beg_point]
rightHalf = pattern[len(leftHalf) + len(bestWord[0]):]
beg_point += 1
if len(leftHalf) > 0 and leftHalf[-1] == '-':
states.append([bestWord, leftHalf, rightHalf])
bestWord = '', 0
length = minlen
length += 1
for s in states:
if s[0][1] > bestState[0][1]:
bestState = s
leftState = fill_words(bestState[1], words, scoring_f, minlen, maxlen)
rightState = fill_words(bestState[2], words, scoring_f, minlen, maxlen)
if len(leftState) == 0:
leftState = ''
if len(rightState) == 0:
rightState = ''
return leftState + bestState[0][0] + rightState
minlen, maxlen = 4, 30
letter_values = {'a': 1, 'b': 3, 'c': 3, 'd': 2, 'e': 1, 'f': 4, 'g': 2,
'h': 4, 'i': 1, 'j': 8, 'k': 5, 'l': 1, 'm': 3, 'n': 1, 'o': 1, 'p': 3,
'q': 10, 'r': 1, 's': 1, 't': 1, 'u': 1, 'v': 4, 'w': 4, 'x': 8, 'y': 4,
'z': 10}
letter_counts = {c: (0) for c in letter_values}
with open('words_sorted.txt', 'r', encoding='utf-8') as f:
words = [x.strip() for x in f]
words = [x for x in words if minlen <= len(x) <= maxlen]
wordset = set(words)
for word in words:
for c in word:
letter_counts[c] += 1
def scrabble_value(word):
if minlen <= len(word) <= maxlen:
return sum(letter_values.get(c, 0) for c in word)
else:
return 0
def length_squared(word):
if minlen <= len(word) <= maxlen:
return len(word) ** 2
else:
return 0
def scoring_f(w):
return length_squared(w) + scrabble_value(w)
pattern = '-l-h--i-o--w--s--u--g-d-u-n-c-c--b--c-t-'
| #THIS BUILD WORKS, BUT IS VERY SLOW. CURRENTLY YIELDS A DECENT SCORE, NOT GREAT
alphabet = "abcdefghijklmnopqrstuvwxyz"
def author():
return ""
def student_id():
return ""
def fill_words(pattern,words,scoring_f,minlen,maxlen):
foundWords = find_words(pattern,words,scoring_f,minlen,maxlen)
foundWords = foundWords + pattern[len(foundWords):]
return foundWords
def find_words(pattern,words,scoring_f,minlen,maxlen):
patternCopy = pattern
bestWord=("",0)
bestState=[("",0),[],[]]
toConsider = ""
possibleWords=[]
length = minlen
wordDict = {}
beg_point = 0
states = []
if len(pattern) < minlen:
return pattern
for w in words:
if len(w) in wordDict:
wordDict[len(w)] += [w]
else:
wordDict[len(w)] = [w]
while len(patternCopy) > 1:
if length in wordDict:
for w in wordDict[length]:
snip = patternCopy[:length]
for p in range(len(snip)):
if patternCopy[p] != "-" and patternCopy[p] != w[p]:
toConsider = ""
break
toConsider = w
try:
if patternCopy[len(toConsider)] == "-" and toConsider != "" and toConsider not in possibleWords:
if scoring_f(toConsider) > bestWord[1]:
bestWord = (toConsider, scoring_f(toConsider))
except:
break
if length == maxlen:
patternCopy = patternCopy[1:]
leftHalf = pattern[:beg_point]
rightHalf = pattern[len(leftHalf) + len(bestWord[0]):]
beg_point += 1
if len(leftHalf) > 0 and leftHalf[-1] == "-":
states.append([bestWord,leftHalf,rightHalf])
bestWord = ("",0)
length = minlen
length+=1
for s in states:
if s[0][1] > bestState[0][1]:
bestState = s
leftState = fill_words(bestState[1],words,scoring_f,minlen,maxlen)
rightState = fill_words(bestState[2],words,scoring_f,minlen,maxlen)
if len(leftState) == 0:
leftState = ""
if len(rightState) == 0:
rightState = ""
return leftState + bestState[0][0] + rightState
minlen, maxlen = 4, 30
letter_values = {
'a': 1, 'b': 3, 'c': 3, 'd': 2, 'e': 1, 'f': 4, 'g': 2,
'h': 4, 'i': 1, 'j': 8, 'k': 5, 'l': 1, 'm': 3, 'n': 1,
'o': 1, 'p': 3, 'q': 10, 'r': 1, 's': 1, 't': 1, 'u': 1,
'v': 4, 'w': 4, 'x': 8, 'y': 4, 'z': 10
}
letter_counts = {c: 0 for c in letter_values}
with open('words_sorted.txt', 'r', encoding='utf-8') as f:
words = [x.strip() for x in f]
words = [x for x in words if minlen <= len(x) <= maxlen]
wordset = set(words)
for word in words:
for c in word:
letter_counts[c] += 1
def scrabble_value(word):
if minlen <= len(word) <= maxlen:
return sum(letter_values.get(c, 0) for c in word)
else:
return 0
def length_squared(word):
if minlen <= len(word) <= maxlen:
return len(word) ** 2
else:
return 0
def scoring_f(w):
return length_squared(w) + scrabble_value(w)
pattern = "-l-h--i-o--w--s--u--g-d-u-n-c-c--b--c-t-"
# print(pattern)
# print(fill_words(pattern,words,scoring_f,minlen,maxlen)) | [
5,
7,
8,
9,
10
] |
595 | 3212bb7df990ad7d075b8ca49a99e1072eab2a90 | <mask token>
| <mask token>
def flask_adapter(request: any, api_route: Type[Route]) ->any:
"""Adapter pattern for Flask
:param - Flask Request
:api_route: Composite Routes
"""
try:
query_string_params = request.args.to_dict()
if 'account_id' in query_string_params.keys():
body = None
query_string_params['account_id'] = int(query_string_params[
'account_id'])
else:
body = request.json
except:
http_error = HttpErrors.error_400()
return HttpResponse(status_code=http_error['status_code'], body=
http_error['body'])
http_request = HttpRequest(header=request.headers, body=body, query=
query_string_params)
try:
response = api_route.route(http_request)
except IntegrityError:
http_error = HttpErrors.error_400()
return HttpResponse(status_code=http_error['status_code'], body=
http_error['body'])
except Exception as exc:
print(exc)
http_error = HttpErrors.error_500()
return HttpResponse(status_code=http_error['status_code'], body=
http_error['body'])
return response
| from typing import Type
from sqlalchemy.exc import IntegrityError
from src.main.interface import RouteInterface as Route
from src.presenters.helpers import HttpRequest, HttpResponse
from src.presenters.errors import HttpErrors
def flask_adapter(request: any, api_route: Type[Route]) ->any:
"""Adapter pattern for Flask
:param - Flask Request
:api_route: Composite Routes
"""
try:
query_string_params = request.args.to_dict()
if 'account_id' in query_string_params.keys():
body = None
query_string_params['account_id'] = int(query_string_params[
'account_id'])
else:
body = request.json
except:
http_error = HttpErrors.error_400()
return HttpResponse(status_code=http_error['status_code'], body=
http_error['body'])
http_request = HttpRequest(header=request.headers, body=body, query=
query_string_params)
try:
response = api_route.route(http_request)
except IntegrityError:
http_error = HttpErrors.error_400()
return HttpResponse(status_code=http_error['status_code'], body=
http_error['body'])
except Exception as exc:
print(exc)
http_error = HttpErrors.error_500()
return HttpResponse(status_code=http_error['status_code'], body=
http_error['body'])
return response
| from typing import Type
from sqlalchemy.exc import IntegrityError
from src.main.interface import RouteInterface as Route
from src.presenters.helpers import HttpRequest, HttpResponse
from src.presenters.errors import HttpErrors
def flask_adapter(request: any, api_route: Type[Route]) -> any:
"""Adapter pattern for Flask
:param - Flask Request
:api_route: Composite Routes
"""
try:
query_string_params = request.args.to_dict()
if "account_id" in query_string_params.keys():
body = None
query_string_params["account_id"] = int(query_string_params["account_id"])
else:
body = request.json
except:
http_error = HttpErrors.error_400()
return HttpResponse(
status_code=http_error["status_code"], body=http_error["body"]
)
http_request = HttpRequest(
header=request.headers, body=body, query=query_string_params
)
try:
response = api_route.route(http_request)
except IntegrityError:
http_error = HttpErrors.error_400()
return HttpResponse(
status_code=http_error["status_code"], body=http_error["body"]
)
except Exception as exc:
print(exc)
http_error = HttpErrors.error_500()
return HttpResponse(
status_code=http_error["status_code"], body=http_error["body"]
)
return response
| null | [
0,
1,
2,
3
] |
596 | 3a65565af4c55fa5479e323a737c48f7f2fdb8ce | <mask token>
| <mask token>
print(f.read())
<mask token>
| <mask token>
f = open('1.txt', 'r', encoding='utf-8')
print(f.read())
<mask token>
| '''
python open() 函数用于打开一个文件,创建一个 file 对象,相关的方法才可以调用它进行读写。
更多文件操作可参考:Python 文件I/O。
函数语法
open(name[, mode[, buffering]])
参数说明:
name : 一个包含了你要访问的文件名称的字符串值。
mode : mode 决定了打开文件的模式:只读,写入,追加等。所有可取值见如下的完全列表。这个参数是非强制的,默认文件访问模式为只读(r)。
buffering : 如果 buffering 的值被设为 0,就不会有寄存。如果 buffering 的值取 1,访问文件时会寄存行。如果将 buffering 的值设为大于 1 的整数,表明了这就是的寄存区的缓冲大小。如果取负值,寄存区的缓冲大小则为系统默认。
不同模式打开文件的完全列表:
模式
描述
r
以只读方式打开文件。文件的指针将会放在文件的开头。这是默认模式。
rb
以二进制格式打开一个文件用于只读。文件指针将会放在文件的开头。这是默认模式。
r+
打开一个文件用于读写。文件指针将会放在文件的开头。
rb+
以二进制格式打开一个文件用于读写。文件指针将会放在文件的开头。
w
打开一个文件只用于写入。如果该文件已存在则打开文件,并从开头开始编辑,即原有内容会被删除。如果该文件不存在,创建新文件。
wb
以二进制格式打开一个文件只用于写入。如果该文件已存在则打开文件,并从开头开始编辑,即原有内容会被删除。如果该文件不存在,创建新文件。
w+
打开一个文件用于读写。如果该文件已存在则打开文件,并从开头开始编辑,即原有内容会被删除。如果该文件不存在,创建新文件。
wb+
以二进制格式打开一个文件用于读写。如果该文件已存在则打开文件,并从开头开始编辑,即原有内容会被删除。如果该文件不存在,创建新文件。
a
打开一个文件用于追加。如果该文件已存在,文件指针将会放在文件的结尾。也就是说,新的内容将会被写入到已有内容之后。如果该文件不存在,创建新文件进行写入。
ab
以二进制格式打开一个文件用于追加。如果该文件已存在,文件指针将会放在文件的结尾。也就是说,新的内容将会被写入到已有内容之后。如果该文件不存在,创建新文件进行写入。
a+
打开一个文件用于读写。如果该文件已存在,文件指针将会放在文件的结尾。文件打开时会是追加模式。如果该文件不存在,创建新文件用于读写。
ab+
以二进制格式打开一个文件用于追加。如果该文件已存在,文件指针将会放在文件的结尾。如果该文件不存在,创建新文件用于读写。
file 对象方法
file.read([size]):size 未指定则返回整个文件,如果文件大小 >2 倍内存则有问题,f.read()读到文件尾时返回""(空字串)。
file.readline():返回一行。
file.readlines([size]) :返回包含size行的列表, size 未指定则返回全部行。
for line in f: print line :通过迭代器访问。
f.write("hello\n"):如果要写入字符串以外的数据,先将他转换为字符串。
f.tell():返回一个整数,表示当前文件指针的位置(就是到文件头的比特数)。
f.seek(偏移量,[起始位置]):用来移动文件指针。
偏移量: 单位为比特,可正可负
起始位置: 0 - 文件头, 默认值; 1 - 当前位置; 2 - 文件尾
f.close() 关闭文件
open(filename [, mode [, bufsize]])
打开一个文件,返回一个file对象。 如果文件无法打开,将处罚IOError异常。
应该使用open()来代替直接使用file类型的构造函数打开文件。
参数filename表示将要被打开的文件的路径字符串;
参数mode表示打开的模式,最常用的模式有:'r'表示读文本,'w'表示写文本文件,'a'表示在文件中追加。
Mode的默认值是'r'。
当操作的是二进制文件时,只要在模式值上添加'b'。这样提高了程序的可移植性。
可选参数bufsize定义了文件缓冲区的大小。0表示不缓冲;1表示行缓冲;任何其他正数表示使用该大小的缓冲区;
负数表示使用系统默认缓冲区大小,对于tty设备它往往是行缓冲,而对于其他文件往往完全缓冲。如果参数值被省却。
使用系统默认值。
'''
f=open('1.txt','r',encoding='utf-8')
print(f.read())
'''
输出...
ltf
zhongguo
shanxi
yuncheng
男
20
'''
#参考博客 https://www.cnblogs.com/Devilf/p/8006663.html
| null | [
0,
1,
2,
3
] |
597 | c0348fc5f51e6f7a191fea6d0e3cb84c60b03e22 | '''
Условие
Дано два числа a и b. Выведите гипотенузу треугольника с заданными катетами.
'''
import math
a = int(input())
b = int(input())
print(math.sqrt(a * a + b * b)) | null | null | null | null | [
0
] |
598 | 8ac84aa29e9e4f3b85f1b3c27819feb5f41e8d8e | <mask token>
def freqModAvgFunc(dirName):
fullList = factorStatFileCreator.directoryFreq(dirName)
UA = dirName.split('/')[1]
avgList = []
sum = 0
i = 0
while i <= len(fullList) - 2:
diff = factorStatFileCreator.diffFunc(fullList[i], fullList[i + 1])
if diff == None:
i += 1
else:
avgList.append(int(diff))
i += 1
for item in avgList:
sum += item
if len(avgList) != 0:
if UA not in freqAgentDic.keys():
freqAgentDic[UA] = [sum / len(avgList)]
else:
agentList = freqAgentDic[UA]
agentList.append(sum / len(avgList))
freqAgentDic[UA] = agentList
<mask token>
def printFreqDiff():
finalFreqFunc(dirName)
finalFreqFunc(dirName2)
for keys, vals in freqAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[0] / vals[1]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
freqAgentDic[keys] = score
return freqAgentDic
<mask token>
def finalLenFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
avgModFunc(file)
def printLenDiff():
finalLenFunc(dirName)
finalLenFunc(dirName2)
for keys, vals in lenAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[1] / vals[0]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
lenAgentDic[keys] = score
return lenAgentDic
def directoryModCont(directory):
contentSet = set()
newSet = set()
listHolder = []
numofReq = 0
UA = directory.split('/')[1]
for filename in os.listdir(directory):
file = directory + '/' + filename
listHolder = factorStatFileCreator.contentCommand(file)
newSet = listHolder[0]
numofReq += len(listHolder[1])
contentSet = contentSet | newSet
newSet = set()
if UA not in contAgentDic.keys():
contAgentDic[UA] = [numofReq]
else:
agentList = contAgentDic[UA]
agentList.append(numofReq)
contAgentDic[UA] = agentList
return contentSet, numofReq
def finalContFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
directoryModCont(file)
def printContDiff():
finalContFunc(dirName)
finalContFunc(dirName2)
for keys, vals in contAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[0] / vals[1]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
contAgentDic[keys] = score
return contAgentDic
| <mask token>
def freqModAvgFunc(dirName):
fullList = factorStatFileCreator.directoryFreq(dirName)
UA = dirName.split('/')[1]
avgList = []
sum = 0
i = 0
while i <= len(fullList) - 2:
diff = factorStatFileCreator.diffFunc(fullList[i], fullList[i + 1])
if diff == None:
i += 1
else:
avgList.append(int(diff))
i += 1
for item in avgList:
sum += item
if len(avgList) != 0:
if UA not in freqAgentDic.keys():
freqAgentDic[UA] = [sum / len(avgList)]
else:
agentList = freqAgentDic[UA]
agentList.append(sum / len(avgList))
freqAgentDic[UA] = agentList
def finalFreqFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
freqModAvgFunc(file)
def printFreqDiff():
finalFreqFunc(dirName)
finalFreqFunc(dirName2)
for keys, vals in freqAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[0] / vals[1]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
freqAgentDic[keys] = score
return freqAgentDic
<mask token>
def finalLenFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
avgModFunc(file)
def printLenDiff():
finalLenFunc(dirName)
finalLenFunc(dirName2)
for keys, vals in lenAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[1] / vals[0]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
lenAgentDic[keys] = score
return lenAgentDic
def directoryModCont(directory):
contentSet = set()
newSet = set()
listHolder = []
numofReq = 0
UA = directory.split('/')[1]
for filename in os.listdir(directory):
file = directory + '/' + filename
listHolder = factorStatFileCreator.contentCommand(file)
newSet = listHolder[0]
numofReq += len(listHolder[1])
contentSet = contentSet | newSet
newSet = set()
if UA not in contAgentDic.keys():
contAgentDic[UA] = [numofReq]
else:
agentList = contAgentDic[UA]
agentList.append(numofReq)
contAgentDic[UA] = agentList
return contentSet, numofReq
def finalContFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
directoryModCont(file)
def printContDiff():
finalContFunc(dirName)
finalContFunc(dirName2)
for keys, vals in contAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[0] / vals[1]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
contAgentDic[keys] = score
return contAgentDic
| <mask token>
def freqModAvgFunc(dirName):
fullList = factorStatFileCreator.directoryFreq(dirName)
UA = dirName.split('/')[1]
avgList = []
sum = 0
i = 0
while i <= len(fullList) - 2:
diff = factorStatFileCreator.diffFunc(fullList[i], fullList[i + 1])
if diff == None:
i += 1
else:
avgList.append(int(diff))
i += 1
for item in avgList:
sum += item
if len(avgList) != 0:
if UA not in freqAgentDic.keys():
freqAgentDic[UA] = [sum / len(avgList)]
else:
agentList = freqAgentDic[UA]
agentList.append(sum / len(avgList))
freqAgentDic[UA] = agentList
def finalFreqFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
freqModAvgFunc(file)
def printFreqDiff():
finalFreqFunc(dirName)
finalFreqFunc(dirName2)
for keys, vals in freqAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[0] / vals[1]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
freqAgentDic[keys] = score
return freqAgentDic
def avgModFunc(directory):
sum = 0
UA = directory.split('/')[1]
byteList = factorStatFileCreator.directoryLen(directory)
for item in byteList:
sum += item
if len(byteList) != 0:
if UA not in lenAgentDic.keys():
lenAgentDic[UA] = [sum / len(byteList)]
else:
agentList = lenAgentDic[UA]
agentList.append(sum / len(byteList))
lenAgentDic[UA] = agentList
def finalLenFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
avgModFunc(file)
def printLenDiff():
finalLenFunc(dirName)
finalLenFunc(dirName2)
for keys, vals in lenAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[1] / vals[0]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
lenAgentDic[keys] = score
return lenAgentDic
def directoryModCont(directory):
contentSet = set()
newSet = set()
listHolder = []
numofReq = 0
UA = directory.split('/')[1]
for filename in os.listdir(directory):
file = directory + '/' + filename
listHolder = factorStatFileCreator.contentCommand(file)
newSet = listHolder[0]
numofReq += len(listHolder[1])
contentSet = contentSet | newSet
newSet = set()
if UA not in contAgentDic.keys():
contAgentDic[UA] = [numofReq]
else:
agentList = contAgentDic[UA]
agentList.append(numofReq)
contAgentDic[UA] = agentList
return contentSet, numofReq
def finalContFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
directoryModCont(file)
def printContDiff():
finalContFunc(dirName)
finalContFunc(dirName2)
for keys, vals in contAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[0] / vals[1]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
contAgentDic[keys] = score
return contAgentDic
| <mask token>
dirName = 'NoPerms/'
dirName2 = 'AllPerms/'
freqAgentDic = dict()
lenAgentDic = dict()
contAgentDic = dict()
def freqModAvgFunc(dirName):
fullList = factorStatFileCreator.directoryFreq(dirName)
UA = dirName.split('/')[1]
avgList = []
sum = 0
i = 0
while i <= len(fullList) - 2:
diff = factorStatFileCreator.diffFunc(fullList[i], fullList[i + 1])
if diff == None:
i += 1
else:
avgList.append(int(diff))
i += 1
for item in avgList:
sum += item
if len(avgList) != 0:
if UA not in freqAgentDic.keys():
freqAgentDic[UA] = [sum / len(avgList)]
else:
agentList = freqAgentDic[UA]
agentList.append(sum / len(avgList))
freqAgentDic[UA] = agentList
def finalFreqFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
freqModAvgFunc(file)
def printFreqDiff():
finalFreqFunc(dirName)
finalFreqFunc(dirName2)
for keys, vals in freqAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[0] / vals[1]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
freqAgentDic[keys] = score
return freqAgentDic
def avgModFunc(directory):
sum = 0
UA = directory.split('/')[1]
byteList = factorStatFileCreator.directoryLen(directory)
for item in byteList:
sum += item
if len(byteList) != 0:
if UA not in lenAgentDic.keys():
lenAgentDic[UA] = [sum / len(byteList)]
else:
agentList = lenAgentDic[UA]
agentList.append(sum / len(byteList))
lenAgentDic[UA] = agentList
def finalLenFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
avgModFunc(file)
def printLenDiff():
finalLenFunc(dirName)
finalLenFunc(dirName2)
for keys, vals in lenAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[1] / vals[0]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
lenAgentDic[keys] = score
return lenAgentDic
def directoryModCont(directory):
contentSet = set()
newSet = set()
listHolder = []
numofReq = 0
UA = directory.split('/')[1]
for filename in os.listdir(directory):
file = directory + '/' + filename
listHolder = factorStatFileCreator.contentCommand(file)
newSet = listHolder[0]
numofReq += len(listHolder[1])
contentSet = contentSet | newSet
newSet = set()
if UA not in contAgentDic.keys():
contAgentDic[UA] = [numofReq]
else:
agentList = contAgentDic[UA]
agentList.append(numofReq)
contAgentDic[UA] = agentList
return contentSet, numofReq
def finalContFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
directoryModCont(file)
def printContDiff():
finalContFunc(dirName)
finalContFunc(dirName2)
for keys, vals in contAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[0] / vals[1]
print('{:<15}: {:.2f}'.format(keys, score))
else:
score = 'N/A'
print('{:<15}: {}'.format(keys, score))
contAgentDic[keys] = score
return contAgentDic
| import os
import factorStatFileCreator
dirName = 'NoPerms/'
dirName2 = 'AllPerms/'
freqAgentDic = dict()
lenAgentDic = dict()
contAgentDic = dict()
def freqModAvgFunc(dirName):
fullList = factorStatFileCreator.directoryFreq(dirName)
UA = dirName.split("/")[1]
avgList = []
sum = 0
i = 0
while i <= len(fullList) - 2:
diff = factorStatFileCreator.diffFunc(fullList[i], fullList[i+1])
if diff == None:
i+=1
else:
avgList.append(int(diff))
i+=1
for item in avgList:
sum += item
if len(avgList) != 0:
if UA not in freqAgentDic.keys():
freqAgentDic[UA] = [sum/len(avgList)]
else:
agentList = freqAgentDic[UA]
agentList.append(sum/len(avgList))
freqAgentDic[UA] = agentList
def finalFreqFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
freqModAvgFunc(file)
def printFreqDiff():
finalFreqFunc(dirName)
finalFreqFunc(dirName2)
#print (freqAgentDic)
for keys, vals in freqAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[0] / vals[1]
print ("{:<15}: {:.2f}".format(keys,score))
else:
score = "N/A"
print ("{:<15}: {}".format(keys,score))
freqAgentDic[keys] = score
return (freqAgentDic)
def avgModFunc(directory):
sum = 0
UA = directory.split("/")[1]
byteList = factorStatFileCreator.directoryLen(directory)
for item in byteList:
sum += item
if len(byteList) != 0:
if UA not in lenAgentDic.keys():
lenAgentDic[UA] = [sum/len(byteList)]
else:
agentList = lenAgentDic[UA]
agentList.append(sum/len(byteList))
lenAgentDic[UA] = agentList
def finalLenFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
avgModFunc(file)
def printLenDiff():
finalLenFunc(dirName)
finalLenFunc(dirName2)
for keys, vals in lenAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[1] / vals[0]
print ("{:<15}: {:.2f}".format(keys,score))
else:
score = "N/A"
print ("{:<15}: {}".format(keys,score))
lenAgentDic[keys] = score
return lenAgentDic
def directoryModCont(directory):
contentSet = set()
newSet = set()
listHolder = []
numofReq = 0
UA = directory.split("/")[1]
for filename in os.listdir(directory):
file = directory + '/' + filename
listHolder = factorStatFileCreator.contentCommand(file)
#print(newSet)
newSet = listHolder[0]
numofReq += len(listHolder[1])
contentSet = contentSet|newSet
newSet = set()
if UA not in contAgentDic.keys():
contAgentDic[UA] = [numofReq]
else:
agentList = contAgentDic[UA]
agentList.append(numofReq)
contAgentDic[UA] = agentList
return contentSet, numofReq
def finalContFunc(dirName):
for filename in os.listdir(dirName):
file = dirName + filename
directoryModCont(file)
def printContDiff():
finalContFunc(dirName)
finalContFunc(dirName2)
for keys, vals in contAgentDic.items():
if len(vals) > 1 and vals[1] > 0:
score = vals[0] / vals[1]
print ("{:<15}: {:.2f}".format(keys,score))
else:
score = "N/A"
print ("{:<15}: {}".format(keys,score))
contAgentDic[keys] = score
return contAgentDic
| [
7,
8,
9,
10,
12
] |
599 | f733885eed5d1cbf6e49db0997655ad627c9d795 | <mask token>
| <mask token>
@register.filter(name='range')
def filter_range(start, end=None):
if end is None:
return range(start)
else:
return range(start, end)
| <mask token>
register = template.Library()
@register.filter(name='range')
def filter_range(start, end=None):
if end is None:
return range(start)
else:
return range(start, end)
| from django import template
register = template.Library()
@register.filter(name='range')
def filter_range(start, end=None):
if end is None:
return range(start)
else:
return range(start, end)
| null | [
0,
1,
2,
3
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.