file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
application.py | ():
return make_response(render_template("index.html"))
@application.route("/getGraph", methods=["POST", "GET"])
def getgraph():
#Metodo POST: responsabile di ottnere i dati in formato json dal server.
#Il server si aspetta un campo data che contenga il nome di un file esistente nel server nella cartella /static/json/
#Se non trova il file da un 404
#Se non trova il campo data da un 400
if request.method == "POST":
if('data' in request.form):
if(path.exists("static/jsons/" + request.form['data'] + ".json")):
with open("static/jsons/" + request.form['data'] + ".json", "r") as file:
jsonStr = file.read()
jsonStr = json.loads(jsonStr)
return jsonify(jsonStr)
else:
return "<h1>404 NOT FOUND"
else:
return "<h1>400 BAD REQUEST"
else:
#Metodo GET:
#si aspetta un campo graph che contenga uno dei nomi sotto presenti
#nel caso di mf e emig si aspetta anche un secondo campo che specifichi
#l'università o la provincia-
#Inoltre, iscrittiAtn e mf POSSONO (ma non devono necessariamente) avere
#un campo aggiuntivo che filtri i dati di uno specifico anno o per uno specifico sesso2
if 'graph' in request.args:
# HBar Graph per la paga oraria provinciale a seconda del livello di istruzione
if(request.args['graph'] == "pagaOra"):
return make_response(render_template("graphs/pagaOra.html"))
# Line Graph per gli iscritti alle università nel veneto per anno
elif(request.args['graph'] == "iscrittiAtn"):
if('sex' in request.args):
return make_response(render_template("graphs/iscrittiAtn.html", sex=int(request.args['sex'])))
else:
return make_response(render_template("graphs/iscrittiAtn.html", sex=0))
elif(request.args['graph'] == "disoccupati"):
return make_response(render_template("graphs/disoccupatiGraph.html"))
elif(request.args['graph'] == "iscrittiProv"):
return make_response(render_template("graphs/iscrittiProv.html"))
# Donut Graph per la distribuzione di m/f nelle università in veneto
elif(request.args['graph'] == "mf" and 'atn' in request.args):
dir = "graphs/mf/mf" + request.args['atn'] + ".html"
print(dir)
if(path.exists("templates/" + dir)):
if('year' in request.args):
return make_response(render_template(dir, year=int(request.args['year'])))
else:
return make_response(render_template(dir, year=0))
# Polar Area Graph per gli studenti emigrati in altre regioni
elif(request.args['graph'] == "emig" and "prov" in request.args):
dir = "graphs/emig/iscrittiEmig" + \
request.args['prov'] + ".html"
if(path.exists("templates/" + dir)):
return make_response(render_template(dir))
return "<h1>400 BAD REQUEST"
#Per aggiornare i dataset:
#A causa di un errore nella creazione del file riguardante gli iscritti per ogni ateneo da parte del MIUR il file
#riguardante gli iscritti per ateneo non sono scaricabili dinamicamente e va sostituito manualmente.
#Allo stesso modo, i dati ottenuti tramite l'istat non sono scaricabili dinamicamente tramite la api in quanto
#le sue prestazioni sono limitate (oltre a non permettere i filtri necessari per ottenere i file).
#Il dataset delle provincie viene aggiornato automaticamente ogni settimana. Gli altri vanno sostituiti manualmente.
#I dataset statici vanno inseriti nella cartella /static/notUpdating/
#Il dataset riguardante gli iscritti per ateneo va scaricato a questo link http://dati.ustat.miur.it/dataset/3dd9ca7f-9cc9-4a1a-915c-e569b181dbd5/resource/32d26e28-a0b5-45f3-9152-6072164f3e63/download/iscrittixateneo.csv
#e rinominato iscrittiAteneo.csv
#Il dataset riguardante gli iscritti emigrati dalla regione è stato creato manualmente a partire da altri dati e non può essere aggiornato
#I dataset riguardanti la percentuale di disoccupazione e la retribuzione oraria media sono reperibili a questo portale http://dati.istat.it/
#Sfortunatamente la funzione di ricerca del sito è molto lenta e limitata, comunque sia i due data set sono "Tasso di Disoccupazione - Dati Provinciali"
#e "Retribuzione oraria media per titolo di studio". In entrambi i casi, è necessario filtrare i risultati per le sole provincie del Veneto.
#I file vanno rinominati retribuzioneMedia.csv e taxDisocc.csv
#Fortunatamente, si aggiornano solo annualmente
@application.route("/doUpdate")
def updateData():
#File iscritti per ateneo
#I dati vengono inseriti in un dizionario come array, il formato è più sotto
with open('static/notUpdating/iscrittiAteneo.csv', newline='') as f: #Qui si può cambiare il nome del file se necessario, basta che sia in formato csv corretto
reader = csv.reader(f)
data = list(reader)[1:]
iscrittiAteneo = {
'Venezia CF': [],
'Verona': [],
'Venezia IUAV': [],
'Padova': []}
for row in data:
row = row[0].split(';')
if row[1] == 'Padova' or 'Venezia C' in row[1] or row[1] == 'Venezia Iuav' or row[1] == 'Verona':
tmp = row[1]
if 'Venezia C' in row[1]:
tmp = 'Venezia CF'
if tmp == 'Venezia Iuav':
tmp = 'Venezia IUAV'
iscrittiAteneo[tmp].append(
row[0] + ';' + row[3] + ';' + row[4])
iscrittiAteneoJson = json.dumps(iscrittiAteneo)
# Formato: {"nomeAteneo" : ["annoScolastico;numeroIscrittiMaschi;numeroIscrittiFemmine",...,...],...,...}
open('static/jsons/iscrittiAteneo.json',
"wb").write(iscrittiAteneoJson.encode())
# File iscritti emigrati in altre regioni
with open('static/notUpdating/iscrittiEmig.json', newline='') as f: #Qui si può cambiare il nome del file se necessario, basta che sia in formato csv corretto
reader = json.load(f)
iscrittiEmig = {
'vicenza': [],
'verona': [],
'venezia': [],
'padova': [],
'treviso': [],
'belluno': [],
'rovigo': []}
for row in reader['records']:
if row[4].lower() == 'padova' or row[4].lower() == 'vicenza' or row[4].lower() == 'venezia' or row[4].lower() == 'verona' or row[4].lower() == 'treviso' or row[4].lower() == 'belluno' or row[4].lower() == 'rovigo':
iscrittiEmig[row[4].lower()].append(
row[1] + ';' + row[4] + ';' + row[2] + ';' + str(row[6]))
lista = {
'vicenza': [],
'verona': [],
'venezia': [],
'padova': [],
'treviso': [],
'belluno': [],
'rovigo': []
}
count = 0
for key in iscrittiEmig.keys():
while len(iscrittiEmig[key]) > 2:
tmp = iscrittiEmig[key].pop(0).split(';')
if count == 0:
count = int(tmp[3])
tmp2 = iscrittiEmig[key][0].split(';')[2]
if tmp[2] == tmp2:
count += int(tmp[3])
else:
lista[tmp[1].lower()].append(
tmp[0] + ';' + tmp | index | identifier_name |
|
application.py | # HBar Graph per la paga oraria provinciale a seconda del livello di istruzione
if(request.args['graph'] == "pagaOra"):
return make_response(render_template("graphs/pagaOra.html"))
# Line Graph per gli iscritti alle università nel veneto per anno
elif(request.args['graph'] == "iscrittiAtn"):
if('sex' in request.args):
return make_response(render_template("graphs/iscrittiAtn.html", sex=int(request.args['sex'])))
else:
return make_response(render_template("graphs/iscrittiAtn.html", sex=0))
elif(request.args['graph'] == "disoccupati"):
return make_response(render_template("graphs/disoccupatiGraph.html"))
elif(request.args['graph'] == "iscrittiProv"):
return make_response(render_template("graphs/iscrittiProv.html"))
# Donut Graph per la distribuzione di m/f nelle università in veneto
elif(request.args['graph'] == "mf" and 'atn' in request.args):
dir = "graphs/mf/mf" + request.args['atn'] + ".html"
print(dir)
if(path.exists("templates/" + dir)):
if('year' in request.args):
return make_response(render_template(dir, year=int(request.args['year'])))
else:
return make_response(render_template(dir, year=0))
# Polar Area Graph per gli studenti emigrati in altre regioni
elif(request.args['graph'] == "emig" and "prov" in request.args):
dir = "graphs/emig/iscrittiEmig" + \
request.args['prov'] + ".html"
if(path.exists("templates/" + dir)):
return make_response(render_template(dir))
return "<h1>400 BAD REQUEST"
#Per aggiornare i dataset:
#A causa di un errore nella creazione del file riguardante gli iscritti per ogni ateneo da parte del MIUR il file
#riguardante gli iscritti per ateneo non sono scaricabili dinamicamente e va sostituito manualmente.
#Allo stesso modo, i dati ottenuti tramite l'istat non sono scaricabili dinamicamente tramite la api in quanto
#le sue prestazioni sono limitate (oltre a non permettere i filtri necessari per ottenere i file).
#Il dataset delle provincie viene aggiornato automaticamente ogni settimana. Gli altri vanno sostituiti manualmente.
#I dataset statici vanno inseriti nella cartella /static/notUpdating/
#Il dataset riguardante gli iscritti per ateneo va scaricato a questo link http://dati.ustat.miur.it/dataset/3dd9ca7f-9cc9-4a1a-915c-e569b181dbd5/resource/32d26e28-a0b5-45f3-9152-6072164f3e63/download/iscrittixateneo.csv
#e rinominato iscrittiAteneo.csv
#Il dataset riguardante gli iscritti emigrati dalla regione è stato creato manualmente a partire da altri dati e non può essere aggiornato
#I dataset riguardanti la percentuale di disoccupazione e la retribuzione oraria media sono reperibili a questo portale http://dati.istat.it/
#Sfortunatamente la funzione di ricerca del sito è molto lenta e limitata, comunque sia i due data set sono "Tasso di Disoccupazione - Dati Provinciali"
#e "Retribuzione oraria media per titolo di studio". In entrambi i casi, è necessario filtrare i risultati per le sole provincie del Veneto.
#I file vanno rinominati retribuzioneMedia.csv e taxDisocc.csv
#Fortunatamente, si aggiornano solo annualmente
@application.route("/doUpdate")
def updateData():
#File iscritti per ateneo
#I dati vengono inseriti in un dizionario come array, il formato è più sotto
with open('static/notUpdating/iscrittiAteneo.csv', newline='') as f: #Qui si può cambiare il nome del file se necessario, basta che sia in formato csv corretto
reader = csv.reader(f)
data = list(reader)[1:]
iscrittiAteneo = {
'Venezia CF': [],
'Verona': [],
'Venezia IUAV': [],
'Padova': []}
for row in data:
row = row[0].split(';')
if row[1] == 'Padova' or 'Venezia C' in row[1] or row[1] == 'Venezia Iuav' or row[1] == 'Verona':
tmp = row[1]
if 'Venezia C' in row[1]:
tmp = 'Venezia CF'
if tmp == 'Venezia Iuav':
tmp = 'Venezia IUAV'
iscrittiAteneo[tmp].append(
row[0] + ';' + row[3] + ';' + row[4])
iscrittiAteneoJson = json.dumps(iscrittiAteneo)
# Formato: {"nomeAteneo" : ["annoScolastico;numeroIscrittiMaschi;numeroIscrittiFemmine",...,...],...,...}
open('static/jsons/iscrittiAteneo.json',
"wb").write(iscrittiAteneoJson.encode())
# File iscritti emigrati in altre regioni
with open('static/notUpdating/iscrittiEmig.json', newline='') as f: #Qui si può cambiare il nome del file se necessario, basta che sia in formato csv corretto
reader = json.load(f)
iscrittiEmig = {
'vicenza': [],
'verona': [],
'venezia': [],
'padova': [],
'treviso': [],
'belluno': [],
'rovigo': []}
for row in reader['records']:
if row[4].lower() == 'padova' or row[4].lower() == 'vicenza' or row[4].lower() == 'venezia' or row[4].lower() == 'verona' or row[4].lower() == 'treviso' or row[4].lower() == 'belluno' or row[4].lower() == 'rovigo':
iscrittiEmig[row[4].lower()].append(
row[1] + ';' + row[4] + ';' + row[2] + ';' + str(row[6]))
lista = {
'vicenza': [],
'verona': [],
'venezia': [],
'padova': [],
'treviso': [],
'belluno': [],
'rovigo': []
}
count = 0
for key in iscrittiEmig.keys():
while len(iscrittiEmig[key]) > 2:
tmp = iscrittiEmig[key].pop(0).split(';')
if count == 0:
count = int(tmp[3])
tmp2 = iscrittiEmig[key][0].split(';')[2]
if tmp[2] == tmp2:
count += int(tmp[3])
else:
lista[tmp[1].lower()].append(
tmp[0] + ';' + tmp[2] + ';' + str(count))
count = 0
iscrittiEmigJson = json.dumps(lista)
# Formato: {"cittàInMinuscolo" : ["annoScolastico;CittàDiProvenienzaInMaiuscolo;RegioneDiEsodo;NumeroStudenti",...,...],...,...}
open('static/jsons/iscrittiEmig.json',
"wb").write(iscrittiEmigJson.encode())
# File paga media oraria | if request.method == "POST":
if('data' in request.form):
if(path.exists("static/jsons/" + request.form['data'] + ".json")):
with open("static/jsons/" + request.form['data'] + ".json", "r") as file:
jsonStr = file.read()
jsonStr = json.loads(jsonStr)
return jsonify(jsonStr)
else:
return "<h1>404 NOT FOUND"
else:
return "<h1>400 BAD REQUEST"
else:
#Metodo GET:
#si aspetta un campo graph che contenga uno dei nomi sotto presenti
#nel caso di mf e emig si aspetta anche un secondo campo che specifichi
#l'università o la provincia-
#Inoltre, iscrittiAtn e mf POSSONO (ma non devono necessariamente) avere
#un campo aggiuntivo che filtri i dati di uno specifico anno o per uno specifico sesso2
if 'graph' in request.args:
| identifier_body |
|
application.py | ("templates/" + dir)):
if('year' in request.args):
return make_response(render_template(dir, year=int(request.args['year'])))
else:
return make_response(render_template(dir, year=0))
# Polar Area Graph per gli studenti emigrati in altre regioni
elif(request.args['graph'] == "emig" and "prov" in request.args):
dir = "graphs/emig/iscrittiEmig" + \
request.args['prov'] + ".html"
if(path.exists("templates/" + dir)):
return make_response(render_template(dir))
return "<h1>400 BAD REQUEST"
#Per aggiornare i dataset:
#A causa di un errore nella creazione del file riguardante gli iscritti per ogni ateneo da parte del MIUR il file
#riguardante gli iscritti per ateneo non sono scaricabili dinamicamente e va sostituito manualmente.
#Allo stesso modo, i dati ottenuti tramite l'istat non sono scaricabili dinamicamente tramite la api in quanto
#le sue prestazioni sono limitate (oltre a non permettere i filtri necessari per ottenere i file).
#Il dataset delle provincie viene aggiornato automaticamente ogni settimana. Gli altri vanno sostituiti manualmente.
#I dataset statici vanno inseriti nella cartella /static/notUpdating/
#Il dataset riguardante gli iscritti per ateneo va scaricato a questo link http://dati.ustat.miur.it/dataset/3dd9ca7f-9cc9-4a1a-915c-e569b181dbd5/resource/32d26e28-a0b5-45f3-9152-6072164f3e63/download/iscrittixateneo.csv
#e rinominato iscrittiAteneo.csv
#Il dataset riguardante gli iscritti emigrati dalla regione è stato creato manualmente a partire da altri dati e non può essere aggiornato
#I dataset riguardanti la percentuale di disoccupazione e la retribuzione oraria media sono reperibili a questo portale http://dati.istat.it/
#Sfortunatamente la funzione di ricerca del sito è molto lenta e limitata, comunque sia i due data set sono "Tasso di Disoccupazione - Dati Provinciali"
#e "Retribuzione oraria media per titolo di studio". In entrambi i casi, è necessario filtrare i risultati per le sole provincie del Veneto.
#I file vanno rinominati retribuzioneMedia.csv e taxDisocc.csv
#Fortunatamente, si aggiornano solo annualmente
@application.route("/doUpdate")
def updateData():
#File iscritti per ateneo
#I dati vengono inseriti in un dizionario come array, il formato è più sotto
with open('static/notUpdating/iscrittiAteneo.csv', newline='') as f: #Qui si può cambiare il nome del file se necessario, basta che sia in formato csv corretto
reader = csv.reader(f)
data = list(reader)[1:]
iscrittiAteneo = {
'Venezia CF': [],
'Verona': [],
'Venezia IUAV': [],
'Padova': []}
for row in data:
row = row[0].split(';')
if row[1] == 'Padova' or 'Venezia C' in row[1] or row[1] == 'Venezia Iuav' or row[1] == 'Verona':
tmp = row[1]
if 'Venezia C' in row[1]:
tmp = 'Venezia CF'
if tmp == 'Venezia Iuav':
tmp = 'Venezia IUAV'
iscrittiAteneo[tmp].append(
row[0] + ';' + row[3] + ';' + row[4])
iscrittiAteneoJson = json.dumps(iscrittiAteneo)
# Formato: {"nomeAteneo" : ["annoScolastico;numeroIscrittiMaschi;numeroIscrittiFemmine",...,...],...,...}
open('static/jsons/iscrittiAteneo.json',
"wb").write(iscrittiAteneoJson.encode())
# File iscritti emigrati in altre regioni
with open('static/notUpdating/iscrittiEmig.json', newline='') as f: #Qui si può cambiare il nome del file se necessario, basta che sia in formato csv corretto
reader = json.load(f)
iscrittiEmig = {
'vicenza': [],
'verona': [],
'venezia': [],
'padova': [],
'treviso': [],
'belluno': [],
'rovigo': []}
for row in reader['records']:
if row[4].lower() == 'padova' or row[4].lower() == 'vicenza' or row[4].lower() == 'venezia' or row[4].lower() == 'verona' or row[4].lower() == 'treviso' or row[4].lower() == 'belluno' or row[4].lower() == 'rovigo':
iscrittiEmig[row[4].lower()].append(
row[1] + ';' + row[4] + ';' + row[2] + ';' + str(row[6]))
lista = {
'vicenza': [],
'verona': [],
'venezia': [],
'padova': [],
'treviso': [],
'belluno': [],
'rovigo': []
}
count = 0
for key in iscrittiEmig.keys():
while len(iscrittiEmig[key]) > 2:
tmp = iscrittiEmig[key].pop(0).split(';')
if count == 0:
count = int(tmp[3])
tmp2 = iscrittiEmig[key][0].split(';')[2]
if tmp[2] == tmp2:
count += int(tmp[3])
else:
lista[tmp[1].lower()].append(
tmp[0] + ';' + tmp[2] + ';' + str(count))
count = 0
iscrittiEmigJson = json.dumps(lista)
# Formato: {"cittàInMinuscolo" : ["annoScolastico;CittàDiProvenienzaInMaiuscolo;RegioneDiEsodo;NumeroStudenti",...,...],...,...}
open('static/jsons/iscrittiEmig.json',
"wb").write(iscrittiEmigJson.encode())
# File paga media oraria per titolo di studio
with open('static/notUpdating/retribuzioneMedia.csv', newline='') as f:
reader = csv.reader(f)
data = list(reader)[1:]
retribuzione = {
'Vicenza': [],
'Verona': [],
'Venezia': [],
'Padova': [],
'Treviso': [],
'Belluno': [],
'Rovigo': []}
for row in data:
if (row[1] == 'Padova' or row[1] == 'Vicenza' or row[1] == 'Venezia' or row[1] == 'Verona' or row[1] == 'Treviso' or row[1] == 'Belluno' or row[1] == 'Rovigo') and (row[5] != 'totale') and 'media)' in row[3]:
# La lista è divisa in titolo di studio, reddito medio orario
tmp = row[5]
if 'nessun' in tmp:
tmp = 'nessuno'
retribuzione[row[1]].append(tmp + ';' + str(row[8]))
retribuzioneMediaJson = json.dumps(retribuzione)
# Formato: {"nomeCittà" : ["laurea;media", "diploma;media", "nulla;media"],...,...}
open('static/jsons/retribuzioneMedia.json',
"wb").write(retribuzioneMediaJson.encode())
# File %disoccupazione
with open('static/notUpdating/taxDisocc.csv', newline='') as f: #Qui si può cambiare il nome del file se necessario, basta che sia in formato csv corretto
reader = csv.reader(f)
data = list(reader)[1:]
lavoro = {
'Vicenza': [],
'Verona': [],
'Venezia': [],
'Padova': [],
'Treviso': [],
'Belluno': [],
'Rovigo': []}
for row in data:
if (row[7] == '15-24 anni') and row[5] != 'totale':
if row[5] == 'femmine':
lavoro[row[1]].a | ppend(str(row[10]))
| conditional_block |
|
task4-main.py | .485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
# Preprocess the image
image_tensor = transformation(image).float()
# Add an extra batch dimension since pytorch treats all images as batches
image_tensor = image_tensor.unsqueeze_(0)
if torch.cuda.is_available():
image_tensor.cuda()
# Turn the input into a Variable
input = Variable(image_tensor)
input=input.to(device)
# Predict the class of the image
output = model1(input)
index = output.cpu().data.numpy().argmax()
return index
'''
Function name : Apredict_image(image_path,model)
input : image path and model
output : predicted class name index of Animal image
call example : a=Apredict_image(image_path,model1)
'''
#this function will predict image
def Apredict_image(image_path,model1):
#print("Prediction in progress")
#image = Image.open(image_path)
image=image_path
model_ft=model1
# Define transformations for the image, should (note that imagenet models are trained with image size 224)
'''transformation = transforms.Compose([
transforms.Resize(input_size),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])'''
transformation=transforms.Compose([
transforms.Resize(224),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
# Preprocess the image
image_tensor = transformation(image).float()
# Add an extra batch dimension since pytorch treats all images as batches
image_tensor = image_tensor.unsqueeze_(0)
if torch.cuda.is_available():
image_tensor.cuda()
# Turn the input into a Variable
input = Variable(image_tensor)
input=input.to(device)
# Predict the class of the image
output = model_ft(input)
index = output.cpu().data.numpy().argmax()
return index
#x is a variable which will count number of contour image
#This will draw contour and predict all the habitat image
x=1
for i in range(0,5):
for j in range(0,5):
image2=image[1629-i*310:1930-i*310,390+j*310:690+j*310,:] #habitat location of arena image
#cv2.imshow('image2',image2)
#cv2.waitKey(0)
#cv2.destroyAllWindows()
imggray=cv2.cvtColor(image2,cv2.COLOR_BGR2GRAY)
_,thres=cv2.threshold(imggray,220,225,0)
_,contures,_=cv2.findContours(thres,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE) #find conture of habitat image
# print(len(contures))
if len(contures) != 1:
pred=findhabit(image[1639-i*310:1922-i*310,396+j*310:680+j*310,:])#predict class name of habitat image
# print(x,pred)
position.append(x)
hposition.append(x)
name.append(pred)
hname.append(pred)
dicto=dict(zip(position,name))
habitatlist=dict(zip(hposition,hname))
image[1629-i*310:1930-i*310,390+j*310:690+j*310,:]=cv2.drawContours(image2,contures,0,(0,255,0),4)
val=x
cv2.putText(image2,str(val),(80,150),cv2.FONT_HERSHEY_SIMPLEX,1.8,(0,0,255),2)
#cv2.imshow('con',image)
#cv2.waitKey(0)
#cv2.destroyAllWindows()
x=x+1
#top corner
u=0
v=0
for i in range(0,2):
image3=image[120:265,120+u:264+v,:] #location of image
image11=image[90:265,120+u:264+v,:]
img10gray=cv2.cvtColor(image3,cv2.COLOR_BGR2GRAY)
_,thres=cv2.threshold(img10gray,220,225,0)
_,contures,_=cv2.findContours(thres,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)#find conture of image location
# print(len(contures))
if len(contures) !=3:
pred=findanimal(image[120:265,120+u:264+v,:])#prediction of animal image
image[120:265,120+u:264+v,:]=cv2.drawContours(image3,contures,1,(0,255,0),2)
if i==0:
value='A6' | else:
value='F6'
cv2.putText(image11,value,(50,30),cv2.FONT_HERSHEY_SIMPLEX,0.8,(0,0,0),2)
#cv2.imshow('track',image)
#cv2.imshow('im',image[120:265,120+u:264+v,:])
#cv2.waitKey(0)
#cv2.destroyAllWindows()
#print(value,pred)
position.append(value)
aposition.append(value)
name.append(pred)
aname.append(pred)
dicto=dict(zip(position,name))
animalliston=dict(zip(aposition,aname))
u=u+1936
v=v+1937
#bottom two corner contour find ,drawing and prediction
u=0
v=0
for i in range(0,2):
image7=image[2055:2200,120+u:265+v,:]#image location copy to image7
image8=image[2025:2200,120+u:265+v,:]
img7gray=cv2.cvtColor(image7,cv2.COLOR_BGR2GRAY)
_,thres=cv2.threshold(img7gray,220,225,0)
_,contures,_=cv2.findContours(thres,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)#find conture
#print(len(contures))
if len(contures) != 3:
pred=findanimal(image[2074:2181,138+u:249+v,:])#predict animal name
image[2055:2200,120+u:265+v,:]=cv2.drawContours(image7,contures,1,(0,255,0),2)
if i==0:
value='A1'
else:
value='F1'
cv2.putText(image8,value,(50,30),cv2.FONT_HERSHEY_SIMPLEX,0.8,(0,0,0),2)
#cv2.imshow('images',image)
#cv2.imshow('track',image[2055:2200,120+u:265+v,:])
#cv2.waitKey(0)
#cv2.destroyAllWindows()
# print(value,pred)
position.append(value)
aposition.append(value)
name.append(pred)
aname.append(pred)
dicto=dict(zip(position,name))
animalliston=dict(zip(aposition,aname))
u=u+1936
v=v+1937
#top to bottom contour find drawing and detection
a=0
b=0
k=0
x=0
for j in range(0,4):
c=0
d=0
for i in range(0,2):
image3=image[2055-c:2200-d,622+a:766+b,:] #location of arena image
image13=image[2025-c:2200-d,622+a:766+b,:]
img7gray=cv2.cvtColor(image3,cv2.COLOR_BGR2GRAY)
_,thres=cv2.threshold(img7gray,220,225,0)
_,contures,_=cv2.findContours(thres,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)#find all conture
#print(len(contures))
pred=findanimal(image[2075-c:2182-d,636+a:753+b,:]) #predict animal name
if len(contures) !=3:
image[2055-c:2200-d,622+a | random_line_split |
|
task4-main.py |
'''
Function name : Hpredict_image(image_path,model)
input : image path and model
output : predicted class name index of Habitat image
call example : a=Hpredict_image(image_path,model1)
'''
def Hpredict_image(image_path,model1):
#print("Prediction in progress")
image=image_path
#image = Image.open(image_path,'rb')
# Define transformations for the image, should (note that imagenet models are trained with image size 224)
transformation = transforms.Compose([
transforms.Resize(224),
#transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
# Preprocess the image
image_tensor = transformation(image).float()
# Add an extra batch dimension since pytorch treats all images as batches
image_tensor = image_tensor.unsqueeze_(0)
if torch.cuda.is_available():
image_tensor.cuda()
# Turn the input into a Variable
input = Variable(image_tensor)
input=input.to(device)
# Predict the class of the image
output = model1(input)
index = output.cpu().data.numpy().argmax()
return index
'''
Function name : Apredict_image(image_path,model)
input : image path and model
output : predicted class name index of Animal image
call example : a=Apredict_image(image_path,model1)
'''
#this function will predict image
def Apredict_image(image_path,model1):
#print("Prediction in progress")
#image = Image.open(image_path)
image=image_path
model_ft=model1
# Define transformations for the image, should (note that imagenet models are trained with image size 224)
'''transformation = transforms.Compose([
transforms.Resize(input_size),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])'''
transformation=transforms.Compose([
transforms.Resize(224),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
# Preprocess the image
image_tensor = transformation(image).float()
# Add an extra batch dimension since pytorch treats all images as batches
image_tensor = image_tensor.unsqueeze_(0)
if torch.cuda.is_available():
image_tensor.cuda()
# Turn the input into a Variable
input = Variable(image_tensor)
input=input.to(device)
# Predict the class of the image
output = model_ft(input)
index = output.cpu().data.numpy().argmax()
return index
#x is a variable which will count number of contour image
#This will draw contour and predict all the habitat image
x=1
for i in range(0,5):
for j in range(0,5):
image2=image[1629-i*310:1930-i*310,390+j*310:690+j*310,:] #habitat location of arena image
#cv2.imshow('image2',image2)
#cv2.waitKey(0)
#cv2.destroyAllWindows()
imggray=cv2.cvtColor(image2,cv2.COLOR_BGR2GRAY)
_,thres=cv2.threshold(imggray,220,225,0)
_,contures,_=cv2.findContours(thres,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE) #find conture of habitat image
# print(len(contures))
if len(contures) != 1:
pred=findhabit(image[1639-i*310:1922-i*310,396+j*310:680+j*310,:])#predict class name of habitat image
# print(x,pred)
position.append(x)
hposition.append(x)
name.append(pred)
hname.append(pred)
dicto=dict(zip(position,name))
habitatlist=dict(zip(hposition,hname))
image[1629-i*310:1930-i*310,390+j*310:690+j*310,:]=cv2.drawContours(image2,contures,0,(0,255,0),4)
val=x
cv2.putText(image2,str(val),(80,150),cv2.FONT_HERSHEY_SIMPLEX,1.8,(0,0,255),2)
#cv2.imshow('con',image)
#cv2.waitKey(0)
#cv2.destroyAllWindows()
x=x+1
#top corner
u=0
v=0
for i in range(0,2):
image3=image[120:265,120+u:264+v,:] #location of image
image11=image[90:265,120+u:264+v,:]
img10gray=cv2.cvtColor(image3,cv2.COLOR_BGR2GRAY)
_,thres=cv2.threshold(img10gray,220,225,0)
_,contures,_=cv2.findContours(thres,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)#find conture of image location
# print(len(contures))
if len(contures) !=3:
pred=findanimal(image[120:265,120+u:264+v,:])#prediction of animal image
image[120:265,120+u:264+v,:]=cv2.drawContours(image3,contures,1,(0,255,0),2)
if i==0:
value='A6'
else:
value='F6'
cv2.putText(image11,value,(50,30),cv2.FONT_HERSHEY_SIMPLEX,0.8,(0,0,0),2)
#cv2.imshow('track',image)
#cv2.imshow('im',image[120:265,120+u:264+v,:])
#cv2.waitKey(0)
#cv2.destroyAllWindows()
#print(value,pred)
position.append(value)
aposition.append(value)
name.append(pred)
aname.append(pred)
dicto=dict(zip(position,name))
animalliston=dict(zip(aposition,aname))
u=u+1936
v=v+1937
#bottom two corner contour find ,drawing and prediction
u=0
v=0
for i in range(0,2):
image7=image[2055:2200,120+u:265+v,:]#image location copy to image7
image8=image[2025:2200,120+u:265+v,:]
img7gray=cv2.cvtColor(image7,cv2.COLOR_BGR2GRAY)
_,thres=cv2.threshold(img7gray,220,225,0)
_,contures,_=cv2.findContours(thres,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)#find conture
#print(len(contures))
if len(contures) != 3:
pred=findanimal(image[2074:2181,138+u:249+v,:])#predict animal name
image[2055:2200,120+u:265+v,:]=cv2.drawContours(image7,contures,1,(0,255,0),2)
if i==0:
value='A1'
else:
value='F1'
cv2.putText(image8,value,(50,30),cv2.FONT_HERSHEY_SIMPLEX,0.8,(0,0,0),2)
#cv2.imshow('images',image)
#cv2.imshow('track',image[2055:2200,120+u:265+v,:])
#cv2.waitKey(0)
#cv2.destroyAllWindows()
# print(value,pred)
position.append(value)
aposition.append(value)
name.append(pred)
aname.append(pred)
dicto=dict(zip(position,name))
animalliston=dict(zip(aposition,aname))
u=u+1936
v=v+1937
#top to bottom contour find drawing and detection
a=0
b=0
k=0
x=0
for j in range(0,4):
c=0
d=0
for i in range(0,2):
image3=image[2055-c:2200-d,622+a:766 | image=Image.fromarray(image,'RGB')
index=Apredict_image(image,Amodel1)
prediction=Aclass_name[index]
return prediction | identifier_body |
|
task4-main.py | 2.putText(image8,value,(50,30),cv2.FONT_HERSHEY_SIMPLEX,0.8,(0,0,0),2)
#cv2.imshow('images',image)
#cv2.imshow('track',image[2055:2200,120+u:265+v,:])
#cv2.waitKey(0)
#cv2.destroyAllWindows()
# print(value,pred)
position.append(value)
aposition.append(value)
name.append(pred)
aname.append(pred)
dicto=dict(zip(position,name))
animalliston=dict(zip(aposition,aname))
u=u+1936
v=v+1937
#top to bottom contour find drawing and detection
a=0
b=0
k=0
x=0
for j in range(0,4):
c=0
d=0
for i in range(0,2):
image3=image[2055-c:2200-d,622+a:766+b,:] #location of arena image
image13=image[2025-c:2200-d,622+a:766+b,:]
img7gray=cv2.cvtColor(image3,cv2.COLOR_BGR2GRAY)
_,thres=cv2.threshold(img7gray,220,225,0)
_,contures,_=cv2.findContours(thres,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)#find all conture
#print(len(contures))
pred=findanimal(image[2075-c:2182-d,636+a:753+b,:]) #predict animal name
if len(contures) !=3:
image[2055-c:2200-d,622+a:766+b,:]=cv2.drawContours(image3,contures,1,(0,255,0),2)
if i==0:
value=chr(ord('B')+x)+'1'
else:
value=chr(ord('B')+x)+'6'
cv2.putText(image13,value,(50,30),cv2.FONT_HERSHEY_SIMPLEX,0.8,(0,0,0),2)
#cv2.imshow('track',image)
#cv2.imshow('image4',image[2055-c:2200-d,622+a:766+b,:])
#cv2.waitKey(0)
#cv2.destroyAllWindows()
#print(value,pred)
position.append(value)
aposition.append(value)
name.append(pred)
aname.append(pred)
dicto=dict(zip(position,name))
animalliston=dict(zip(aposition,aname))
c=c+1935
d=d+1935
x=x+1
a=a+311
b=b+309
#Two Side Left-Right contour detection drawing and prediction
a=0
b=0
k=0
for j in range(0,2):
x=2
for i in range(0,4):
image1=image[1552-i*310:1697-i*310,120+a:265+b,:]#location of arena image
image14=image[1522-i*310:1697-i*310,120+a:265+b,:]
img1gray=cv2.cvtColor(image1,cv2.COLOR_BGR2GRAY)
_,thres=cv2.threshold(img1gray,220,225,0)
_,contures,_=cv2.findContours(thres,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)#find conture of image location
#print(len(contures))
if len(contures) !=3:
pred=findanimal(image[1569-i*309:1676-i*311,140+a:244+b,:]) #predict animal name
image[1552-i*310:1697-i*310,120+a:265+b,:]=cv2.drawContours(image1,contures,1,(0,255,0),2)
if j==0:
val='A'+str(x)
else:
val='F'+str(x)
cv2.putText(image14,val,(50,30),cv2.FONT_HERSHEY_SIMPLEX,0.8,(0,0,0),2)
#cv2.imshow('track',image[1552-i*310:1697-i*310,120+a:265+b,:])
#cv2.imshow('ori',image)
#cv2.waitKey(0)
#cv2.destroyAllWindows()
#print(val,pred)
position.append(val)
aposition.append(value)
name.append(pred)
aname.append(pred)
dicto=dict(zip(position,name))
animalliston=dict(zip(aposition,aname))
x=x+1
else:
x=x+1
a=a+1933
b=b+1936
print('\n Animal And Habitat : ')
print("__________________________")
print(dicto) #this will print animal and habitat name with location
'''for i in dicto.keys():
print(dicto[i])'''
'''print('\nHabitat(Cell Numbers)')
print(habitatlist)'''
print("For Animal Dataset")
print("..................")
print('\nAnimal(Location)')
print('__________________\n')
print(animalliston)
a,b=df.shape #assign excel sheet column and row size in a and b variable
hab=[]
for i in range(0,a):
hab.append(df.iloc[i][0])#copy all habitat name of excell file in hab list
data={}
for i in range(0,a):
for j in range(0,b):
data.update({hab[i]:df.iloc[i][0:]})
#all the habitat and animal which are maching to excel file copy to habitatandanimal list
habitatandanimallist=[]
for x in hab:
for y in dicto.keys():
if(x==dicto[y]):
listOfhabitat = [key for (key, value) in dicto.items() if value == x]
# print(x,listOfhabitat)
habitatandanimallist.append(listOfhabitat)
for z in range(1,b):
for t in dicto.keys():
if(data[x][z]==dicto[t]):
#habitatandanimallist.append('\n')
listofanimal= [key for (key, value) in dicto.items() if value == data[x][z]]
# print(data[x][z],listofanimal)
#habitatandanimallist.append('\n')
habitatandanimallist.append(listofanimal)
#habitatandanimallist.append('\n')
break
#habitatandanimallist.append('\n')
break
handa=[]
flag=0
i=0
while(i<len(habitatandanimallist)):
j=i+1
while(j<len(habitatandanimallist)):
if(habitatandanimallist[i]==habitatandanimallist[j]):
print(habitatandanimallist[i],i)
flag=1
i=i+1
else:
flag=0
j=j+1
if(flag==0):
handa.append(habitatandanimallist[i])
i=i+1
habitatandanimallist=handa
#separate habitat and animal
i=0
habit=[]
animal=[]
while(i <len(habitatandanimallist)):
if(type(habitatandanimallist[i][0])==str):
habit.append(habitatandanimallist[i-1])
animal.append(habitatandanimallist[i])
#while j in range(i+1,len(habitatandanimallist)):
j=i+1
while(j<len(habitatandanimallist)):
if(type(habitatandanimallist[j][0])==str):
animal.append(habitatandanimallist[j])
habit.append(habitatandanimallist[i-1])
i=i+1
j=j+1
else:
break
i=i+1
#according to mapping rearrange habitat and animal
i=0
habitatloc=[]
animalloc=[]
while(i<len(animal)):
if(len(animal[i])==len(habit[i])):
l=0
while(l<len(habit[i])):
habitatloc.append(habit[i][l])
l=l+1
#print('animal=habit')
i=i+1
elif(len(animal[i])>len(habit[i])):
| j=0
# print('animal greater')
while(j<len(habit[i])):
habitatloc.append(habit[i][j])
j=j+1
k=0
while(k<(len(animal[i])-len(habit[i]))):
habitatloc.append(habit[i][0])
k=k+1
i=i+1 | conditional_block |
|
task4-main.py | (value)
aposition.append(value)
name.append(pred)
aname.append(pred)
dicto=dict(zip(position,name))
animalliston=dict(zip(aposition,aname))
u=u+1936
v=v+1937
#top to bottom contour find drawing and detection
a=0
b=0
k=0
x=0
for j in range(0,4):
c=0
d=0
for i in range(0,2):
image3=image[2055-c:2200-d,622+a:766+b,:] #location of arena image
image13=image[2025-c:2200-d,622+a:766+b,:]
img7gray=cv2.cvtColor(image3,cv2.COLOR_BGR2GRAY)
_,thres=cv2.threshold(img7gray,220,225,0)
_,contures,_=cv2.findContours(thres,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)#find all conture
#print(len(contures))
pred=findanimal(image[2075-c:2182-d,636+a:753+b,:]) #predict animal name
if len(contures) !=3:
image[2055-c:2200-d,622+a:766+b,:]=cv2.drawContours(image3,contures,1,(0,255,0),2)
if i==0:
value=chr(ord('B')+x)+'1'
else:
value=chr(ord('B')+x)+'6'
cv2.putText(image13,value,(50,30),cv2.FONT_HERSHEY_SIMPLEX,0.8,(0,0,0),2)
#cv2.imshow('track',image)
#cv2.imshow('image4',image[2055-c:2200-d,622+a:766+b,:])
#cv2.waitKey(0)
#cv2.destroyAllWindows()
#print(value,pred)
position.append(value)
aposition.append(value)
name.append(pred)
aname.append(pred)
dicto=dict(zip(position,name))
animalliston=dict(zip(aposition,aname))
c=c+1935
d=d+1935
x=x+1
a=a+311
b=b+309
#Two Side Left-Right contour detection drawing and prediction
a=0
b=0
k=0
for j in range(0,2):
x=2
for i in range(0,4):
image1=image[1552-i*310:1697-i*310,120+a:265+b,:]#location of arena image
image14=image[1522-i*310:1697-i*310,120+a:265+b,:]
img1gray=cv2.cvtColor(image1,cv2.COLOR_BGR2GRAY)
_,thres=cv2.threshold(img1gray,220,225,0)
_,contures,_=cv2.findContours(thres,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)#find conture of image location
#print(len(contures))
if len(contures) !=3:
pred=findanimal(image[1569-i*309:1676-i*311,140+a:244+b,:]) #predict animal name
image[1552-i*310:1697-i*310,120+a:265+b,:]=cv2.drawContours(image1,contures,1,(0,255,0),2)
if j==0:
val='A'+str(x)
else:
val='F'+str(x)
cv2.putText(image14,val,(50,30),cv2.FONT_HERSHEY_SIMPLEX,0.8,(0,0,0),2)
#cv2.imshow('track',image[1552-i*310:1697-i*310,120+a:265+b,:])
#cv2.imshow('ori',image)
#cv2.waitKey(0)
#cv2.destroyAllWindows()
#print(val,pred)
position.append(val)
aposition.append(value)
name.append(pred)
aname.append(pred)
dicto=dict(zip(position,name))
animalliston=dict(zip(aposition,aname))
x=x+1
else:
x=x+1
a=a+1933
b=b+1936
print('\n Animal And Habitat : ')
print("__________________________")
print(dicto) #this will print animal and habitat name with location
'''for i in dicto.keys():
print(dicto[i])'''
'''print('\nHabitat(Cell Numbers)')
print(habitatlist)'''
print("For Animal Dataset")
print("..................")
print('\nAnimal(Location)')
print('__________________\n')
print(animalliston)
a,b=df.shape #assign excel sheet column and row size in a and b variable
hab=[]
for i in range(0,a):
hab.append(df.iloc[i][0])#copy all habitat name of excell file in hab list
data={}
for i in range(0,a):
for j in range(0,b):
data.update({hab[i]:df.iloc[i][0:]})
#all the habitat and animal which are maching to excel file copy to habitatandanimal list
habitatandanimallist=[]
for x in hab:
for y in dicto.keys():
if(x==dicto[y]):
listOfhabitat = [key for (key, value) in dicto.items() if value == x]
# print(x,listOfhabitat)
habitatandanimallist.append(listOfhabitat)
for z in range(1,b):
for t in dicto.keys():
if(data[x][z]==dicto[t]):
#habitatandanimallist.append('\n')
listofanimal= [key for (key, value) in dicto.items() if value == data[x][z]]
# print(data[x][z],listofanimal)
#habitatandanimallist.append('\n')
habitatandanimallist.append(listofanimal)
#habitatandanimallist.append('\n')
break
#habitatandanimallist.append('\n')
break
handa=[]
flag=0
i=0
while(i<len(habitatandanimallist)):
j=i+1
while(j<len(habitatandanimallist)):
if(habitatandanimallist[i]==habitatandanimallist[j]):
print(habitatandanimallist[i],i)
flag=1
i=i+1
else:
flag=0
j=j+1
if(flag==0):
handa.append(habitatandanimallist[i])
i=i+1
habitatandanimallist=handa
#separate habitat and animal
i=0
habit=[]
animal=[]
while(i <len(habitatandanimallist)):
if(type(habitatandanimallist[i][0])==str):
habit.append(habitatandanimallist[i-1])
animal.append(habitatandanimallist[i])
#while j in range(i+1,len(habitatandanimallist)):
j=i+1
while(j<len(habitatandanimallist)):
if(type(habitatandanimallist[j][0])==str):
animal.append(habitatandanimallist[j])
habit.append(habitatandanimallist[i-1])
i=i+1
j=j+1
else:
break
i=i+1
#according to mapping rearrange habitat and animal
i=0
habitatloc=[]
animalloc=[]
while(i<len(animal)):
if(len(animal[i])==len(habit[i])):
l=0
while(l<len(habit[i])):
habitatloc.append(habit[i][l])
l=l+1
#print('animal=habit')
i=i+1
elif(len(animal[i])>len(habit[i])):
j=0
# print('animal greater')
while(j<len(habit[i])):
habitatloc.append(habit[i][j])
j=j+1
k=0
while(k<(len(animal[i])-len(habit[i]))):
habitatloc.append(habit[i][0])
k=k+1
i=i+1
else:
j=0
while(j<len(animal[i])):
habitatloc.append(habit[i][j])
j=j+1
i=i+1
t=0
while(t<len(animal)):
for j in range(0,len(animal[t])):
animalloc.append(animal[t][j])
t=t+1
dictokey=[]
for key in habitatlist:
dictokey.append(key)
def | Diff | identifier_name |
|
lib.rs | > tell us what your website is. You can just put your name in for now. Once you get a key, its what
//! > uniquely identifies you when accessing our WebAPI calls.
//!
//! In your `main.rs` or anywhere you intend to use the library create a non-mutable string of
//! you token pass first to use the library, there is no calls without the token.
//! ```rust
//! //main.rs
//! use dota2_webapi_bindings::Dota2Api;
//! static DOTA2_KEY: &str = "0123456789"; //example token
//!
//! fn main() {
//! let mut dota = Dota2Api::new(String::from(DOTA2_KEY));
//! // we use `set` to configure the URL first
//! dota.set_heroes().itemized_only(true).language("zh_zh");
//! // you can also write the above as just `dota.set_heroes();` or `dota.set_heroes().itemized_only(true);`
//! // or just `dota.set_heroes().language("zh_zh");` or `dota.set_heroes().language("zh_zh").itemized_only(true);`
//! // our builder like function takes care of optional parameters
//!
//! // and finally `get` to retrieve our struct
//! let data = dota.get_heroes().expect("something went wrong, ez mid");
//! }
//!
//! ```
//!
//! ##### Available calls :
//! * IEconDOTA2_570
//! * GetGameItems
//! * GetHeroes
//! * GetRarities
//! * GetTournamentPrizePool
//! * IDOTA2Match_205790
//! * GetLeagueListing
//! * IDOTA2Match_570
//! * GetLiveLeagueGames
//! * GetTopLiveGame
//!
//! **Note:** Try using `language()` with everything, just put in any string, it seems like its gives better readable name
//! and description for some reason, I have not set-up a default cause sometimes that might not be your intension.
#[macro_use]
extern crate serde_derive;
extern crate hyper;
extern crate serde_json;
pub mod dota;
use hyper::status::StatusCode;
use hyper::Client;
use std::io::Read;
use crate::dota::{
get_game_items::*, get_heroes::*, get_league_listing::*, get_live_league_games::*,
get_rarities::*, get_top_live_game::*, get_tournament_prize_pool::*,
};
/// language macro for easy implementation in various builder struct
///
/// The language to retrieve results in (default is en_us) (see http://en.wikipedia.org/wiki/ISO_639-1 for
/// the language codes (first two characters) and http://en.wikipedia.org/wiki/List_of_ISO_639-1_codes for
/// the country codes (last two characters))
///
/// language (Optional) (string) : The language to provide output in.
///
/// **Note:** Try using `language()` with everything, just put in any string, it seems like its gives better readable name
/// and description for some reason
macro_rules! language {
() => {
pub fn language(&mut self, param_value: &str) -> &mut Self {
self.url.push_str(&*format!("language={}&", param_value));
self
}
};
}
/// A `set!` macro to get our `set` functions
macro_rules! set {
($func: ident, $builder: ident, $build: ident) => {
pub fn $func(&mut self) -> &mut $build {
self.$builder = $build::build(&*self.key);
&mut self.$builder
}
};
}
/// A `get!` macro to get our `get` functions
macro_rules! get {
($func: ident, $return_type: ident, $builder: ident, $result: ident) => {
pub fn $func(&mut self) -> Result<$return_type, Error> {
let response = self.get(&*self.$builder.url.clone())?;
let data_result: $result = serde_json::from_str(response.as_str())?;
let data = data_result.result;
Ok(data)
}
};
}
/// builder to reduce boilerplate
macro_rules! builder {
($builder: ident, $url: expr) => {
#[derive(Debug, Default)]
pub struct $builder {
url: String,
}
impl $builder {
fn build(key: &str) -> Self {
Self {
url: format!($url, key),
}
}
}
};
}
/// different type of errors we can receive during either fetching of data or just unpacking JSON
#[derive(Debug)]
pub enum Error {
Http(hyper::Error),
Json(serde_json::Error),
Forbidden(&'static str),
Message(String),
}
impl From<hyper::Error> for Error {
fn from(e: hyper::Error) -> Error |
}
impl From<serde_json::Error> for Error {
fn from(e: serde_json::Error) -> Error {
Error::Json(e)
}
}
/// The main `Dota2Api` of you library works by saving states of all the invoked URLs (you only call the one you need)
/// language macro for easy implementation in various builder struct
///
/// The language to retrieve results in (default is en_us) (see http://en.wikipedia.org/wiki/ISO_639-1 for
/// the language codes (first two characters) and http://en.wikipedia.org/wiki/List_of_ISO_639-1_codes for
/// the country codes (last two characters))
///
/// language (Optional) (string) : The language to provide output in.
#[derive(Debug, Default)]
pub struct Dota2Api {
http_client: Client,
pub key: String,
get_heroes_builder: GetHeroesBuilder,
get_game_items_builder: GetGameItemsBuilder,
get_rarities_builder: GetRaritiesBuilder,
get_tournament_prize_pool_builder: GetTournamentPrizePoolBuilder,
get_league_listing_builder: GetLeagueListingBuilder,
get_live_league_games_builder: GetLiveLeagueGamesBuilder,
get_top_live_game_builder: GetTopLiveGameBuilder,
}
impl Dota2Api {
pub fn new(key: String) -> Self {
Dota2Api {
http_client: Client::new(),
key,
..Default::default()
}
}
set!(set_heroes, get_heroes_builder, GetHeroesBuilder);
// use `set` before `get`
get!(get_heroes, GetHeroes, get_heroes_builder, GetHeroesResult);
set!(set_game_items, get_game_items_builder, GetGameItemsBuilder);
// use `set` before `get`
get!(
get_game_items,
GetGameItems,
get_game_items_builder,
GetGameItemsResult
);
set!(set_rarities, get_rarities_builder, GetRaritiesBuilder);
// use `set` before `get`
get!(
get_rarities,
GetRarities,
get_rarities_builder,
GetRaritiesResult
);
set!(
set_tournament_prize_pool,
get_tournament_prize_pool_builder,
GetTournamentPrizePoolBuilder
);
// use `set` before `get`
get!(
get_tournament_prize_pool,
GetTournamentPrizePool,
get_tournament_prize_pool_builder,
GetTournamentPrizePoolResult
);
set!(
set_league_listing,
get_league_listing_builder,
GetLeagueListingBuilder
);
// use `set` before `get`
get!(
get_league_listing,
GetLeagueListing,
get_league_listing_builder,
GetLeagueListingResult
);
set!(
set_live_league_games,
get_live_league_games_builder,
GetLiveLeagueGamesBuilder
);
// use `set` before `get`
get!(
get_live_league_games,
GetLiveLeagueGames,
get_live_league_games_builder,
GetLiveLeagueGamesResult
);
set!(
set_top_live_game,
get_top_live_game_builder,
GetTopLiveGameBuilder
);
// use `set` before `get`
pub fn get_top_live_game(&mut self) -> Result<GetTopLiveGame, Error> {
let response = self.get(&*self.get_top_live_game_builder.url.clone())?;
let data_result: GetTopLiveGame = serde_json::from_str(response.as_str())?;
let data = data_result;
Ok(data)
}
/// our get function to actually get the data from the api
fn get(&mut self, url: &str) -> Result<String, Error> {
let mut response = self.http_client.get(url).send()?;
let mut temp = String::new();
if response.status == StatusCode::Forbidden {
return Err(Error::Forbidden(
"Access is denied. Retrying will not help. Please check your API key.",
));
}
let _ = response.read_to_string(&mut temp);
Ok(temp)
}
}
//==============================================================================
//IEconDOTA2_570
//==============================================================================
builder!(
GetHeroesBuilder,
"http://api.steampowered.com/IEcon | {
Error::Http(e)
} | identifier_body |
lib.rs | //! > tell us what your website is. You can just put your name in for now. Once you get a key, its what
//! > uniquely identifies you when accessing our WebAPI calls.
//!
//! In your `main.rs` or anywhere you intend to use the library create a non-mutable string of
//! you token pass first to use the library, there is no calls without the token.
//! ```rust
//! //main.rs
//! use dota2_webapi_bindings::Dota2Api;
//! static DOTA2_KEY: &str = "0123456789"; //example token
//!
//! fn main() {
//! let mut dota = Dota2Api::new(String::from(DOTA2_KEY));
//! // we use `set` to configure the URL first
//! dota.set_heroes().itemized_only(true).language("zh_zh");
//! // you can also write the above as just `dota.set_heroes();` or `dota.set_heroes().itemized_only(true);`
//! // or just `dota.set_heroes().language("zh_zh");` or `dota.set_heroes().language("zh_zh").itemized_only(true);`
//! // our builder like function takes care of optional parameters
//!
//! // and finally `get` to retrieve our struct
//! let data = dota.get_heroes().expect("something went wrong, ez mid");
//! }
//!
//! ```
//!
//! ##### Available calls :
//! * IEconDOTA2_570
//! * GetGameItems
//! * GetHeroes
//! * GetRarities
//! * GetTournamentPrizePool
//! * IDOTA2Match_205790
//! * GetLeagueListing
//! * IDOTA2Match_570
//! * GetLiveLeagueGames
//! * GetTopLiveGame
//!
//! **Note:** Try using `language()` with everything, just put in any string, it seems like its gives better readable name
//! and description for some reason, I have not set-up a default cause sometimes that might not be your intension.
#[macro_use]
extern crate serde_derive;
extern crate hyper;
extern crate serde_json;
pub mod dota;
use hyper::status::StatusCode;
use hyper::Client;
use std::io::Read;
use crate::dota::{
get_game_items::*, get_heroes::*, get_league_listing::*, get_live_league_games::*,
get_rarities::*, get_top_live_game::*, get_tournament_prize_pool::*,
};
/// language macro for easy implementation in various builder struct
///
/// The language to retrieve results in (default is en_us) (see http://en.wikipedia.org/wiki/ISO_639-1 for
/// the language codes (first two characters) and http://en.wikipedia.org/wiki/List_of_ISO_639-1_codes for
/// the country codes (last two characters))
///
/// language (Optional) (string) : The language to provide output in.
///
/// **Note:** Try using `language()` with everything, just put in any string, it seems like its gives better readable name
/// and description for some reason
macro_rules! language {
() => {
pub fn language(&mut self, param_value: &str) -> &mut Self {
self.url.push_str(&*format!("language={}&", param_value));
self
}
};
}
/// A `set!` macro to get our `set` functions
macro_rules! set {
($func: ident, $builder: ident, $build: ident) => { | self.$builder = $build::build(&*self.key);
&mut self.$builder
}
};
}
/// A `get!` macro to get our `get` functions
macro_rules! get {
($func: ident, $return_type: ident, $builder: ident, $result: ident) => {
pub fn $func(&mut self) -> Result<$return_type, Error> {
let response = self.get(&*self.$builder.url.clone())?;
let data_result: $result = serde_json::from_str(response.as_str())?;
let data = data_result.result;
Ok(data)
}
};
}
/// builder to reduce boilerplate
macro_rules! builder {
($builder: ident, $url: expr) => {
#[derive(Debug, Default)]
pub struct $builder {
url: String,
}
impl $builder {
fn build(key: &str) -> Self {
Self {
url: format!($url, key),
}
}
}
};
}
/// different type of errors we can receive during either fetching of data or just unpacking JSON
#[derive(Debug)]
pub enum Error {
Http(hyper::Error),
Json(serde_json::Error),
Forbidden(&'static str),
Message(String),
}
impl From<hyper::Error> for Error {
fn from(e: hyper::Error) -> Error {
Error::Http(e)
}
}
impl From<serde_json::Error> for Error {
fn from(e: serde_json::Error) -> Error {
Error::Json(e)
}
}
/// The main `Dota2Api` of you library works by saving states of all the invoked URLs (you only call the one you need)
/// language macro for easy implementation in various builder struct
///
/// The language to retrieve results in (default is en_us) (see http://en.wikipedia.org/wiki/ISO_639-1 for
/// the language codes (first two characters) and http://en.wikipedia.org/wiki/List_of_ISO_639-1_codes for
/// the country codes (last two characters))
///
/// language (Optional) (string) : The language to provide output in.
#[derive(Debug, Default)]
pub struct Dota2Api {
http_client: Client,
pub key: String,
get_heroes_builder: GetHeroesBuilder,
get_game_items_builder: GetGameItemsBuilder,
get_rarities_builder: GetRaritiesBuilder,
get_tournament_prize_pool_builder: GetTournamentPrizePoolBuilder,
get_league_listing_builder: GetLeagueListingBuilder,
get_live_league_games_builder: GetLiveLeagueGamesBuilder,
get_top_live_game_builder: GetTopLiveGameBuilder,
}
impl Dota2Api {
pub fn new(key: String) -> Self {
Dota2Api {
http_client: Client::new(),
key,
..Default::default()
}
}
set!(set_heroes, get_heroes_builder, GetHeroesBuilder);
// use `set` before `get`
get!(get_heroes, GetHeroes, get_heroes_builder, GetHeroesResult);
set!(set_game_items, get_game_items_builder, GetGameItemsBuilder);
// use `set` before `get`
get!(
get_game_items,
GetGameItems,
get_game_items_builder,
GetGameItemsResult
);
set!(set_rarities, get_rarities_builder, GetRaritiesBuilder);
// use `set` before `get`
get!(
get_rarities,
GetRarities,
get_rarities_builder,
GetRaritiesResult
);
set!(
set_tournament_prize_pool,
get_tournament_prize_pool_builder,
GetTournamentPrizePoolBuilder
);
// use `set` before `get`
get!(
get_tournament_prize_pool,
GetTournamentPrizePool,
get_tournament_prize_pool_builder,
GetTournamentPrizePoolResult
);
set!(
set_league_listing,
get_league_listing_builder,
GetLeagueListingBuilder
);
// use `set` before `get`
get!(
get_league_listing,
GetLeagueListing,
get_league_listing_builder,
GetLeagueListingResult
);
set!(
set_live_league_games,
get_live_league_games_builder,
GetLiveLeagueGamesBuilder
);
// use `set` before `get`
get!(
get_live_league_games,
GetLiveLeagueGames,
get_live_league_games_builder,
GetLiveLeagueGamesResult
);
set!(
set_top_live_game,
get_top_live_game_builder,
GetTopLiveGameBuilder
);
// use `set` before `get`
pub fn get_top_live_game(&mut self) -> Result<GetTopLiveGame, Error> {
let response = self.get(&*self.get_top_live_game_builder.url.clone())?;
let data_result: GetTopLiveGame = serde_json::from_str(response.as_str())?;
let data = data_result;
Ok(data)
}
/// our get function to actually get the data from the api
fn get(&mut self, url: &str) -> Result<String, Error> {
let mut response = self.http_client.get(url).send()?;
let mut temp = String::new();
if response.status == StatusCode::Forbidden {
return Err(Error::Forbidden(
"Access is denied. Retrying will not help. Please check your API key.",
));
}
let _ = response.read_to_string(&mut temp);
Ok(temp)
}
}
//==============================================================================
//IEconDOTA2_570
//==============================================================================
builder!(
GetHeroesBuilder,
"http://api.steampowered.com/IEconDOT | pub fn $func(&mut self) -> &mut $build { | random_line_split |
lib.rs | _live_league_games::*,
get_rarities::*, get_top_live_game::*, get_tournament_prize_pool::*,
};
/// language macro for easy implementation in various builder struct
///
/// The language to retrieve results in (default is en_us) (see http://en.wikipedia.org/wiki/ISO_639-1 for
/// the language codes (first two characters) and http://en.wikipedia.org/wiki/List_of_ISO_639-1_codes for
/// the country codes (last two characters))
///
/// language (Optional) (string) : The language to provide output in.
///
/// **Note:** Try using `language()` with everything, just put in any string, it seems like its gives better readable name
/// and description for some reason
macro_rules! language {
() => {
pub fn language(&mut self, param_value: &str) -> &mut Self {
self.url.push_str(&*format!("language={}&", param_value));
self
}
};
}
/// A `set!` macro to get our `set` functions
macro_rules! set {
($func: ident, $builder: ident, $build: ident) => {
pub fn $func(&mut self) -> &mut $build {
self.$builder = $build::build(&*self.key);
&mut self.$builder
}
};
}
/// A `get!` macro to get our `get` functions
macro_rules! get {
($func: ident, $return_type: ident, $builder: ident, $result: ident) => {
pub fn $func(&mut self) -> Result<$return_type, Error> {
let response = self.get(&*self.$builder.url.clone())?;
let data_result: $result = serde_json::from_str(response.as_str())?;
let data = data_result.result;
Ok(data)
}
};
}
/// builder to reduce boilerplate
macro_rules! builder {
($builder: ident, $url: expr) => {
#[derive(Debug, Default)]
pub struct $builder {
url: String,
}
impl $builder {
fn build(key: &str) -> Self {
Self {
url: format!($url, key),
}
}
}
};
}
/// different type of errors we can receive during either fetching of data or just unpacking JSON
#[derive(Debug)]
pub enum Error {
Http(hyper::Error),
Json(serde_json::Error),
Forbidden(&'static str),
Message(String),
}
impl From<hyper::Error> for Error {
fn from(e: hyper::Error) -> Error {
Error::Http(e)
}
}
impl From<serde_json::Error> for Error {
fn from(e: serde_json::Error) -> Error {
Error::Json(e)
}
}
/// The main `Dota2Api` of you library works by saving states of all the invoked URLs (you only call the one you need)
/// language macro for easy implementation in various builder struct
///
/// The language to retrieve results in (default is en_us) (see http://en.wikipedia.org/wiki/ISO_639-1 for
/// the language codes (first two characters) and http://en.wikipedia.org/wiki/List_of_ISO_639-1_codes for
/// the country codes (last two characters))
///
/// language (Optional) (string) : The language to provide output in.
#[derive(Debug, Default)]
pub struct Dota2Api {
http_client: Client,
pub key: String,
get_heroes_builder: GetHeroesBuilder,
get_game_items_builder: GetGameItemsBuilder,
get_rarities_builder: GetRaritiesBuilder,
get_tournament_prize_pool_builder: GetTournamentPrizePoolBuilder,
get_league_listing_builder: GetLeagueListingBuilder,
get_live_league_games_builder: GetLiveLeagueGamesBuilder,
get_top_live_game_builder: GetTopLiveGameBuilder,
}
impl Dota2Api {
pub fn new(key: String) -> Self {
Dota2Api {
http_client: Client::new(),
key,
..Default::default()
}
}
set!(set_heroes, get_heroes_builder, GetHeroesBuilder);
// use `set` before `get`
get!(get_heroes, GetHeroes, get_heroes_builder, GetHeroesResult);
set!(set_game_items, get_game_items_builder, GetGameItemsBuilder);
// use `set` before `get`
get!(
get_game_items,
GetGameItems,
get_game_items_builder,
GetGameItemsResult
);
set!(set_rarities, get_rarities_builder, GetRaritiesBuilder);
// use `set` before `get`
get!(
get_rarities,
GetRarities,
get_rarities_builder,
GetRaritiesResult
);
set!(
set_tournament_prize_pool,
get_tournament_prize_pool_builder,
GetTournamentPrizePoolBuilder
);
// use `set` before `get`
get!(
get_tournament_prize_pool,
GetTournamentPrizePool,
get_tournament_prize_pool_builder,
GetTournamentPrizePoolResult
);
set!(
set_league_listing,
get_league_listing_builder,
GetLeagueListingBuilder
);
// use `set` before `get`
get!(
get_league_listing,
GetLeagueListing,
get_league_listing_builder,
GetLeagueListingResult
);
set!(
set_live_league_games,
get_live_league_games_builder,
GetLiveLeagueGamesBuilder
);
// use `set` before `get`
get!(
get_live_league_games,
GetLiveLeagueGames,
get_live_league_games_builder,
GetLiveLeagueGamesResult
);
set!(
set_top_live_game,
get_top_live_game_builder,
GetTopLiveGameBuilder
);
// use `set` before `get`
pub fn get_top_live_game(&mut self) -> Result<GetTopLiveGame, Error> {
let response = self.get(&*self.get_top_live_game_builder.url.clone())?;
let data_result: GetTopLiveGame = serde_json::from_str(response.as_str())?;
let data = data_result;
Ok(data)
}
/// our get function to actually get the data from the api
fn get(&mut self, url: &str) -> Result<String, Error> {
let mut response = self.http_client.get(url).send()?;
let mut temp = String::new();
if response.status == StatusCode::Forbidden {
return Err(Error::Forbidden(
"Access is denied. Retrying will not help. Please check your API key.",
));
}
let _ = response.read_to_string(&mut temp);
Ok(temp)
}
}
//==============================================================================
//IEconDOTA2_570
//==============================================================================
builder!(
GetHeroesBuilder,
"http://api.steampowered.com/IEconDOTA2_570/GetHeroes/v1/?key={}&"
);
impl GetHeroesBuilder {
/// itemizedonly (Optional) (bool) : Return a list of itemized heroes only.
pub fn itemized_only(&mut self, param_value: bool) -> &mut Self {
self.url
.push_str(&*format!("itemizedonly={}&", param_value));
self
}
language!();
}
builder!(
GetGameItemsBuilder,
"http://api.steampowered.com/IEconDOTA2_570/GetGameItems/v1/?key={}&"
);
impl GetGameItemsBuilder {
language!();
}
builder!(
GetRaritiesBuilder,
"http://api.steampowered.com/IEconDOTA2_570/GetRarities/v1/?key={}&"
);
impl GetRaritiesBuilder {
language!();
}
builder!(
GetTournamentPrizePoolBuilder,
"http://api.steampowered.com/IEconDOTA2_570/GetTournamentPrizePool/v1/?key={}&"
);
impl GetTournamentPrizePoolBuilder {
/// leagueid (Optional) (int) : The ID of the league to get the prize pool of.
pub fn league_id(&mut self, param_value: usize) -> &mut Self {
self.url.push_str(&*format!("leagueid={}&", param_value));
self
}
language!();
}
//==============================================================================
//IDOTA2Match_205790
//==============================================================================
builder!(
GetLeagueListingBuilder,
"http://api.steampowered.com/IDOTA2Match_205790/GetLeagueListing/v1/?key={}&"
);
impl GetLeagueListingBuilder {
language!();
}
//==============================================================================
//IDOTA2Match_570
//==============================================================================
builder!(
GetLiveLeagueGamesBuilder,
"http://api.steampowered.com/IDOTA2Match_570/GetLiveLeagueGames/v1/?key={}&"
);
impl GetLiveLeagueGamesBuilder {
language!();
/// Only show matches of the specified league id
pub fn league_id(&mut self, param_value: usize) -> &mut Self {
self.url.push_str(&*format!("league_id={}&", param_value));
self
}
/// Only show matches of the specified match id
pub fn | match_id | identifier_name |
|
lib.rs | > tell us what your website is. You can just put your name in for now. Once you get a key, its what
//! > uniquely identifies you when accessing our WebAPI calls.
//!
//! In your `main.rs` or anywhere you intend to use the library create a non-mutable string of
//! you token pass first to use the library, there is no calls without the token.
//! ```rust
//! //main.rs
//! use dota2_webapi_bindings::Dota2Api;
//! static DOTA2_KEY: &str = "0123456789"; //example token
//!
//! fn main() {
//! let mut dota = Dota2Api::new(String::from(DOTA2_KEY));
//! // we use `set` to configure the URL first
//! dota.set_heroes().itemized_only(true).language("zh_zh");
//! // you can also write the above as just `dota.set_heroes();` or `dota.set_heroes().itemized_only(true);`
//! // or just `dota.set_heroes().language("zh_zh");` or `dota.set_heroes().language("zh_zh").itemized_only(true);`
//! // our builder like function takes care of optional parameters
//!
//! // and finally `get` to retrieve our struct
//! let data = dota.get_heroes().expect("something went wrong, ez mid");
//! }
//!
//! ```
//!
//! ##### Available calls :
//! * IEconDOTA2_570
//! * GetGameItems
//! * GetHeroes
//! * GetRarities
//! * GetTournamentPrizePool
//! * IDOTA2Match_205790
//! * GetLeagueListing
//! * IDOTA2Match_570
//! * GetLiveLeagueGames
//! * GetTopLiveGame
//!
//! **Note:** Try using `language()` with everything, just put in any string, it seems like its gives better readable name
//! and description for some reason, I have not set-up a default cause sometimes that might not be your intension.
#[macro_use]
extern crate serde_derive;
extern crate hyper;
extern crate serde_json;
pub mod dota;
use hyper::status::StatusCode;
use hyper::Client;
use std::io::Read;
use crate::dota::{
get_game_items::*, get_heroes::*, get_league_listing::*, get_live_league_games::*,
get_rarities::*, get_top_live_game::*, get_tournament_prize_pool::*,
};
/// language macro for easy implementation in various builder struct
///
/// The language to retrieve results in (default is en_us) (see http://en.wikipedia.org/wiki/ISO_639-1 for
/// the language codes (first two characters) and http://en.wikipedia.org/wiki/List_of_ISO_639-1_codes for
/// the country codes (last two characters))
///
/// language (Optional) (string) : The language to provide output in.
///
/// **Note:** Try using `language()` with everything, just put in any string, it seems like its gives better readable name
/// and description for some reason
macro_rules! language {
() => {
pub fn language(&mut self, param_value: &str) -> &mut Self {
self.url.push_str(&*format!("language={}&", param_value));
self
}
};
}
/// A `set!` macro to get our `set` functions
macro_rules! set {
($func: ident, $builder: ident, $build: ident) => {
pub fn $func(&mut self) -> &mut $build {
self.$builder = $build::build(&*self.key);
&mut self.$builder
}
};
}
/// A `get!` macro to get our `get` functions
macro_rules! get {
($func: ident, $return_type: ident, $builder: ident, $result: ident) => {
pub fn $func(&mut self) -> Result<$return_type, Error> {
let response = self.get(&*self.$builder.url.clone())?;
let data_result: $result = serde_json::from_str(response.as_str())?;
let data = data_result.result;
Ok(data)
}
};
}
/// builder to reduce boilerplate
macro_rules! builder {
($builder: ident, $url: expr) => {
#[derive(Debug, Default)]
pub struct $builder {
url: String,
}
impl $builder {
fn build(key: &str) -> Self {
Self {
url: format!($url, key),
}
}
}
};
}
/// different type of errors we can receive during either fetching of data or just unpacking JSON
#[derive(Debug)]
pub enum Error {
Http(hyper::Error),
Json(serde_json::Error),
Forbidden(&'static str),
Message(String),
}
impl From<hyper::Error> for Error {
fn from(e: hyper::Error) -> Error {
Error::Http(e)
}
}
impl From<serde_json::Error> for Error {
fn from(e: serde_json::Error) -> Error {
Error::Json(e)
}
}
/// The main `Dota2Api` of you library works by saving states of all the invoked URLs (you only call the one you need)
/// language macro for easy implementation in various builder struct
///
/// The language to retrieve results in (default is en_us) (see http://en.wikipedia.org/wiki/ISO_639-1 for
/// the language codes (first two characters) and http://en.wikipedia.org/wiki/List_of_ISO_639-1_codes for
/// the country codes (last two characters))
///
/// language (Optional) (string) : The language to provide output in.
#[derive(Debug, Default)]
pub struct Dota2Api {
http_client: Client,
pub key: String,
get_heroes_builder: GetHeroesBuilder,
get_game_items_builder: GetGameItemsBuilder,
get_rarities_builder: GetRaritiesBuilder,
get_tournament_prize_pool_builder: GetTournamentPrizePoolBuilder,
get_league_listing_builder: GetLeagueListingBuilder,
get_live_league_games_builder: GetLiveLeagueGamesBuilder,
get_top_live_game_builder: GetTopLiveGameBuilder,
}
impl Dota2Api {
pub fn new(key: String) -> Self {
Dota2Api {
http_client: Client::new(),
key,
..Default::default()
}
}
set!(set_heroes, get_heroes_builder, GetHeroesBuilder);
// use `set` before `get`
get!(get_heroes, GetHeroes, get_heroes_builder, GetHeroesResult);
set!(set_game_items, get_game_items_builder, GetGameItemsBuilder);
// use `set` before `get`
get!(
get_game_items,
GetGameItems,
get_game_items_builder,
GetGameItemsResult
);
set!(set_rarities, get_rarities_builder, GetRaritiesBuilder);
// use `set` before `get`
get!(
get_rarities,
GetRarities,
get_rarities_builder,
GetRaritiesResult
);
set!(
set_tournament_prize_pool,
get_tournament_prize_pool_builder,
GetTournamentPrizePoolBuilder
);
// use `set` before `get`
get!(
get_tournament_prize_pool,
GetTournamentPrizePool,
get_tournament_prize_pool_builder,
GetTournamentPrizePoolResult
);
set!(
set_league_listing,
get_league_listing_builder,
GetLeagueListingBuilder
);
// use `set` before `get`
get!(
get_league_listing,
GetLeagueListing,
get_league_listing_builder,
GetLeagueListingResult
);
set!(
set_live_league_games,
get_live_league_games_builder,
GetLiveLeagueGamesBuilder
);
// use `set` before `get`
get!(
get_live_league_games,
GetLiveLeagueGames,
get_live_league_games_builder,
GetLiveLeagueGamesResult
);
set!(
set_top_live_game,
get_top_live_game_builder,
GetTopLiveGameBuilder
);
// use `set` before `get`
pub fn get_top_live_game(&mut self) -> Result<GetTopLiveGame, Error> {
let response = self.get(&*self.get_top_live_game_builder.url.clone())?;
let data_result: GetTopLiveGame = serde_json::from_str(response.as_str())?;
let data = data_result;
Ok(data)
}
/// our get function to actually get the data from the api
fn get(&mut self, url: &str) -> Result<String, Error> {
let mut response = self.http_client.get(url).send()?;
let mut temp = String::new();
if response.status == StatusCode::Forbidden |
let _ = response.read_to_string(&mut temp);
Ok(temp)
}
}
//==============================================================================
//IEconDOTA2_570
//==============================================================================
builder!(
GetHeroesBuilder,
"http://api.steampowered.com/IEcon | {
return Err(Error::Forbidden(
"Access is denied. Retrying will not help. Please check your API key.",
));
} | conditional_block |
lib.rs | //!
//! Not running any extractors is also supported:
//!
//! ```rust
//! use axum::{
//! Router,
//! BoxError,
//! response::IntoResponse,
//! http::StatusCode,
//! routing::get,
//! };
//! use tower::{ServiceBuilder, timeout::error::Elapsed};
//! use std::time::Duration;
//! use axum_handle_error_extract::HandleErrorLayer;
//!
//! let app = Router::new()
//! .route("/", get(|| async {}))
//! .layer(
//! ServiceBuilder::new()
//! .layer(HandleErrorLayer::new(handle_error))
//! .timeout(Duration::from_secs(10))
//! );
//!
//! // this function just takes the error
//! async fn handle_error(error: BoxError) -> impl IntoResponse {
//! if error.is::<Elapsed>() {
//! (
//! StatusCode::REQUEST_TIMEOUT,
//! "Request timeout".to_string(),
//! )
//! } else {
//! (
//! StatusCode::INTERNAL_SERVER_ERROR,
//! format!("Unhandled internal error: {}", error),
//! )
//! }
//! }
//! # async {
//! # axum::Server::bind(&"".parse().unwrap()).serve(app.into_make_service()).await.unwrap();
//! # };
//! ```
//!
//! See [`axum::error_handling`] for more details on axum's error handling model and
//! [`axum::extract`] for more details on extractors.
//!
//! # The future
//!
//! In axum 0.4 this will replace the current [`axum::error_handling::HandleErrorLayer`].
#![warn(
clippy::all,
clippy::dbg_macro,
clippy::todo,
clippy::empty_enum,
clippy::enum_glob_use,
clippy::mem_forget,
clippy::unused_self,
clippy::filter_map_next,
clippy::needless_continue,
clippy::needless_borrow,
clippy::match_wildcard_for_single_variants,
clippy::if_let_mutex,
clippy::mismatched_target_os,
clippy::await_holding_lock,
clippy::match_on_vec_items, | clippy::suboptimal_flops,
clippy::lossy_float_literal,
clippy::rest_pat_in_fully_bound_structs,
clippy::fn_params_excessive_bools,
clippy::exit,
clippy::inefficient_to_string,
clippy::linkedlist,
clippy::macro_use_imports,
clippy::option_option,
clippy::verbose_file_reads,
clippy::unnested_or_patterns,
rust_2018_idioms,
future_incompatible,
nonstandard_style,
missing_debug_implementations,
missing_docs
)]
#![deny(unreachable_pub, private_in_public)]
#![allow(elided_lifetimes_in_paths, clippy::type_complexity)]
#![forbid(unsafe_code)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(test, allow(clippy::float_cmp))]
use axum::{
body::{box_body, BoxBody, Bytes, Full, HttpBody},
extract::{FromRequest, RequestParts},
http::{Request, Response, StatusCode},
response::IntoResponse,
BoxError,
};
use pin_project_lite::pin_project;
use std::{
convert::Infallible,
fmt,
future::Future,
marker::PhantomData,
pin::Pin,
task::{Context, Poll},
};
use tower::ServiceExt;
use tower_layer::Layer;
use tower_service::Service;
/// [`Layer`] that applies [`HandleError`] which is a [`Service`] adapter
/// that handles errors by converting them into responses.
///
/// See [module docs](self) for more details on axum's error handling model.
pub struct HandleErrorLayer<F, T> {
f: F,
_extractor: PhantomData<fn() -> T>,
}
impl<F, T> HandleErrorLayer<F, T> {
/// Create a new `HandleErrorLayer`.
pub fn new(f: F) -> Self {
Self {
f,
_extractor: PhantomData,
}
}
}
impl<F, T> Clone for HandleErrorLayer<F, T>
where
F: Clone,
{
fn clone(&self) -> Self {
Self {
f: self.f.clone(),
_extractor: PhantomData,
}
}
}
impl<F, E> fmt::Debug for HandleErrorLayer<F, E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("HandleErrorLayer")
.field("f", &format_args!("{}", std::any::type_name::<F>()))
.finish()
}
}
impl<S, F, T> Layer<S> for HandleErrorLayer<F, T>
where
F: Clone,
{
type Service = HandleError<S, F, T>;
fn layer(&self, inner: S) -> Self::Service {
HandleError::new(inner, self.f.clone())
}
}
/// A [`Service`] adapter that handles errors by converting them into responses.
///
/// See [module docs](self) for more details on axum's error handling model.
pub struct HandleError<S, F, T> {
inner: S,
f: F,
_extractor: PhantomData<fn() -> T>,
}
impl<S, F, T> HandleError<S, F, T> {
/// Create a new `HandleError`.
pub fn new(inner: S, f: F) -> Self {
Self {
inner,
f,
_extractor: PhantomData,
}
}
}
impl<S, F, T> Clone for HandleError<S, F, T>
where
S: Clone,
F: Clone,
{
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
f: self.f.clone(),
_extractor: PhantomData,
}
}
}
impl<S, F, E> fmt::Debug for HandleError<S, F, E>
where
S: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("HandleError")
.field("inner", &self.inner)
.field("f", &format_args!("{}", std::any::type_name::<F>()))
.finish()
}
}
impl<S, F, ReqBody, ResBody, Fut, Res> Service<Request<ReqBody>> for HandleError<S, F, ()>
where
S: Service<Request<ReqBody>, Response = Response<ResBody>> + Clone + Send + 'static,
S::Error: Send,
S::Future: Send,
F: FnOnce(S::Error) -> Fut + Clone + Send + 'static,
Fut: Future<Output = Res> + Send,
Res: IntoResponse,
ReqBody: Send + 'static,
ResBody: HttpBody<Data = Bytes> + Send + 'static,
ResBody::Error: Into<BoxError>,
{
type Response = Response<BoxBody>;
type Error = Infallible;
type Future = ResponseFuture;
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: Request<ReqBody>) -> Self::Future {
let f = self.f.clone();
let clone = self.inner.clone();
let inner = std::mem::replace(&mut self.inner, clone);
let future = Box::pin(async move {
match inner.oneshot(req).await {
Ok(res) => Ok(res.map(box_body)),
Err(err) => Ok(f(err).await.into_response().map(box_body)),
}
});
ResponseFuture { future }
}
}
#[allow(unused_macros)]
macro_rules! impl_service {
( $($ty:ident),* $(,)? ) => {
impl<S, F, ReqBody, ResBody, Res, Fut, $($ty,)*> Service<Request<ReqBody>>
for HandleError<S, F, ($($ty,)*)>
where
S: Service<Request<ReqBody>, Response = Response<ResBody>> + Clone + Send + 'static,
S::Error: Send,
S::Future: Send,
F: FnOnce($($ty),*, S::Error) -> Fut + Clone + Send + 'static,
Fut: Future<Output = Res> + Send,
Res: IntoResponse,
$( $ty: FromRequest<ReqBody> + Send,)*
ReqBody: Send + 'static,
ResBody: HttpBody<Data = Bytes> + Send + 'static,
ResBody::Error: Into<BoxError>,
{
type Response = Response<BoxBody>;
type Error = Infallible;
type Future = ResponseFuture;
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
#[allow(non_snake_case)]
fn call(&mut self, req: Request<ReqBody>) -> Self::Future {
let f = self.f.clone();
let clone = self.inner.clone();
let inner = std::mem::replace(&mut self.inner, clone);
let future = Box::pin(async move {
let mut req = RequestParts::new(req);
| clippy::imprecise_flops, | random_line_split |
lib.rs | //!
//! Not running any extractors is also supported:
//!
//! ```rust
//! use axum::{
//! Router,
//! BoxError,
//! response::IntoResponse,
//! http::StatusCode,
//! routing::get,
//! };
//! use tower::{ServiceBuilder, timeout::error::Elapsed};
//! use std::time::Duration;
//! use axum_handle_error_extract::HandleErrorLayer;
//!
//! let app = Router::new()
//! .route("/", get(|| async {}))
//! .layer(
//! ServiceBuilder::new()
//! .layer(HandleErrorLayer::new(handle_error))
//! .timeout(Duration::from_secs(10))
//! );
//!
//! // this function just takes the error
//! async fn handle_error(error: BoxError) -> impl IntoResponse {
//! if error.is::<Elapsed>() {
//! (
//! StatusCode::REQUEST_TIMEOUT,
//! "Request timeout".to_string(),
//! )
//! } else {
//! (
//! StatusCode::INTERNAL_SERVER_ERROR,
//! format!("Unhandled internal error: {}", error),
//! )
//! }
//! }
//! # async {
//! # axum::Server::bind(&"".parse().unwrap()).serve(app.into_make_service()).await.unwrap();
//! # };
//! ```
//!
//! See [`axum::error_handling`] for more details on axum's error handling model and
//! [`axum::extract`] for more details on extractors.
//!
//! # The future
//!
//! In axum 0.4 this will replace the current [`axum::error_handling::HandleErrorLayer`].
#![warn(
clippy::all,
clippy::dbg_macro,
clippy::todo,
clippy::empty_enum,
clippy::enum_glob_use,
clippy::mem_forget,
clippy::unused_self,
clippy::filter_map_next,
clippy::needless_continue,
clippy::needless_borrow,
clippy::match_wildcard_for_single_variants,
clippy::if_let_mutex,
clippy::mismatched_target_os,
clippy::await_holding_lock,
clippy::match_on_vec_items,
clippy::imprecise_flops,
clippy::suboptimal_flops,
clippy::lossy_float_literal,
clippy::rest_pat_in_fully_bound_structs,
clippy::fn_params_excessive_bools,
clippy::exit,
clippy::inefficient_to_string,
clippy::linkedlist,
clippy::macro_use_imports,
clippy::option_option,
clippy::verbose_file_reads,
clippy::unnested_or_patterns,
rust_2018_idioms,
future_incompatible,
nonstandard_style,
missing_debug_implementations,
missing_docs
)]
#![deny(unreachable_pub, private_in_public)]
#![allow(elided_lifetimes_in_paths, clippy::type_complexity)]
#![forbid(unsafe_code)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(test, allow(clippy::float_cmp))]
use axum::{
body::{box_body, BoxBody, Bytes, Full, HttpBody},
extract::{FromRequest, RequestParts},
http::{Request, Response, StatusCode},
response::IntoResponse,
BoxError,
};
use pin_project_lite::pin_project;
use std::{
convert::Infallible,
fmt,
future::Future,
marker::PhantomData,
pin::Pin,
task::{Context, Poll},
};
use tower::ServiceExt;
use tower_layer::Layer;
use tower_service::Service;
/// [`Layer`] that applies [`HandleError`] which is a [`Service`] adapter
/// that handles errors by converting them into responses.
///
/// See [module docs](self) for more details on axum's error handling model.
pub struct HandleErrorLayer<F, T> {
f: F,
_extractor: PhantomData<fn() -> T>,
}
impl<F, T> HandleErrorLayer<F, T> {
/// Create a new `HandleErrorLayer`.
pub fn new(f: F) -> Self {
Self {
f,
_extractor: PhantomData,
}
}
}
impl<F, T> Clone for HandleErrorLayer<F, T>
where
F: Clone,
{
fn | (&self) -> Self {
Self {
f: self.f.clone(),
_extractor: PhantomData,
}
}
}
impl<F, E> fmt::Debug for HandleErrorLayer<F, E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("HandleErrorLayer")
.field("f", &format_args!("{}", std::any::type_name::<F>()))
.finish()
}
}
impl<S, F, T> Layer<S> for HandleErrorLayer<F, T>
where
F: Clone,
{
type Service = HandleError<S, F, T>;
fn layer(&self, inner: S) -> Self::Service {
HandleError::new(inner, self.f.clone())
}
}
/// A [`Service`] adapter that handles errors by converting them into responses.
///
/// See [module docs](self) for more details on axum's error handling model.
pub struct HandleError<S, F, T> {
inner: S,
f: F,
_extractor: PhantomData<fn() -> T>,
}
impl<S, F, T> HandleError<S, F, T> {
/// Create a new `HandleError`.
pub fn new(inner: S, f: F) -> Self {
Self {
inner,
f,
_extractor: PhantomData,
}
}
}
impl<S, F, T> Clone for HandleError<S, F, T>
where
S: Clone,
F: Clone,
{
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
f: self.f.clone(),
_extractor: PhantomData,
}
}
}
impl<S, F, E> fmt::Debug for HandleError<S, F, E>
where
S: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("HandleError")
.field("inner", &self.inner)
.field("f", &format_args!("{}", std::any::type_name::<F>()))
.finish()
}
}
impl<S, F, ReqBody, ResBody, Fut, Res> Service<Request<ReqBody>> for HandleError<S, F, ()>
where
S: Service<Request<ReqBody>, Response = Response<ResBody>> + Clone + Send + 'static,
S::Error: Send,
S::Future: Send,
F: FnOnce(S::Error) -> Fut + Clone + Send + 'static,
Fut: Future<Output = Res> + Send,
Res: IntoResponse,
ReqBody: Send + 'static,
ResBody: HttpBody<Data = Bytes> + Send + 'static,
ResBody::Error: Into<BoxError>,
{
type Response = Response<BoxBody>;
type Error = Infallible;
type Future = ResponseFuture;
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: Request<ReqBody>) -> Self::Future {
let f = self.f.clone();
let clone = self.inner.clone();
let inner = std::mem::replace(&mut self.inner, clone);
let future = Box::pin(async move {
match inner.oneshot(req).await {
Ok(res) => Ok(res.map(box_body)),
Err(err) => Ok(f(err).await.into_response().map(box_body)),
}
});
ResponseFuture { future }
}
}
#[allow(unused_macros)]
macro_rules! impl_service {
( $($ty:ident),* $(,)? ) => {
impl<S, F, ReqBody, ResBody, Res, Fut, $($ty,)*> Service<Request<ReqBody>>
for HandleError<S, F, ($($ty,)*)>
where
S: Service<Request<ReqBody>, Response = Response<ResBody>> + Clone + Send + 'static,
S::Error: Send,
S::Future: Send,
F: FnOnce($($ty),*, S::Error) -> Fut + Clone + Send + 'static,
Fut: Future<Output = Res> + Send,
Res: IntoResponse,
$( $ty: FromRequest<ReqBody> + Send,)*
ReqBody: Send + 'static,
ResBody: HttpBody<Data = Bytes> + Send + 'static,
ResBody::Error: Into<BoxError>,
{
type Response = Response<BoxBody>;
type Error = Infallible;
type Future = ResponseFuture;
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
#[allow(non_snake_case)]
fn call(&mut self, req: Request<ReqBody>) -> Self::Future {
let f = self.f.clone();
let clone = self.inner.clone();
let inner = std::mem::replace(&mut self.inner, clone);
let future = Box::pin(async move {
let mut req = RequestParts::new(req);
| clone | identifier_name |
manifest.rs | TreeSet<ManifestId>,
build_dependencies: BTreeSet<ManifestId>,
dev_dependencies: BTreeSet<ManifestId>,
}
/// A reproducible package manifest.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Manifest {
package: Package,
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
env: BTreeMap<String, String>,
#[serde(rename = "output")]
outputs: Outputs,
#[serde(default, rename = "source", skip_serializing_if = "Sources::is_empty")]
sources: Sources,
}
impl Manifest {
/// Creates a `Manifest` with the given name, version, default output [`Hash`], and references.
///
/// [`Hash`]: ../struct.Hash.html
pub fn | <T, U>(name: T, version: T, default_output_hash: T, refs: U) -> ManifestBuilder
where
T: AsRef<str>,
U: IntoIterator<Item = OutputId>,
{
ManifestBuilder::new(name, version, default_output_hash, refs)
}
/// Computes the content-addressable ID of this manifest.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let id = manifest.compute_id();
/// assert_eq!(id, "[email protected]");
/// ```
#[inline]
pub fn compute_id(&self) -> ManifestId {
let name = self.package.name.clone();
let version = self.package.version.clone();
let hash = Hash::compute().input(&self.to_string()).finish();
ManifestId::new(name, version, hash)
}
/// Returns the name of the package.
///
/// This string is guaranteed not to be empty.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let name = manifest.name();
/// assert_eq!(name, "foo");
/// ```
#[inline]
pub fn name(&self) -> &str {
self.package.name.as_str()
}
/// Returns the semantic version of the package.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let version = manifest.version();
/// assert_eq!(version, "1.0.0");
/// ```
#[inline]
pub fn version(&self) -> &str {
&self.package.version
}
/// Iterates over the package's runtime dependencies.
#[inline]
pub fn dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.dependencies.iter()
}
/// Iterates over the package's build-time dependencies.
#[inline]
pub fn build_dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.build_dependencies.iter()
}
/// Iterates over the package's optional testing dependencies.
#[inline]
pub fn dev_dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.dev_dependencies.iter()
}
/// Iterates over the package builder's environment variables as key-value pairs.
#[inline]
pub fn env(&self) -> impl Iterator<Item = (&String, &String)> + '_ {
self.env.iter()
}
/// Iterates over the package's build outputs.
///
/// # Note
///
/// Every package is guaranteed to produce at least one default output and zero or more additional
/// outputs. When a manifest is built from source, all outputs are built together.
#[inline]
pub fn outputs(&self) -> impl Iterator<Item = OutputId> + '_ {
let name = self.package.name.clone();
let ver = self.package.version.clone();
self.outputs.iter_with(name, ver)
}
/// Iterates over the package's sources.
#[inline]
pub fn sources(&self) -> impl Iterator<Item = &Source> {
self.sources.iter()
}
}
impl Display for Manifest {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
toml::to_string(self)
.map_err(|e| {
println!("couldn't display self: {}", e);
FmtError::default()
})
.and_then(|s| write!(fmt, "{}", s))
}
}
impl FromStr for Manifest {
type Err = DeserializeError;
#[inline]
fn from_str(s: &str) -> Result<Self, Self::Err> {
toml::from_str(s)
}
}
/// Builder for creating new `Manifest`s.
#[derive(Clone, Debug)]
pub struct ManifestBuilder {
package: Result<Package, ()>,
env: BTreeMap<String, String>,
sources: Sources,
outputs: Result<Outputs, ()>,
}
impl ManifestBuilder {
/// Creates a `Manifest` with the given name, version, default output [`Hash`], and references.
///
/// [`Hash`]: ../struct.Hash.html
pub fn new<T, U>(name: T, version: T, default_output_hash: T, refs: U) -> Self
where
T: AsRef<str>,
U: IntoIterator<Item = OutputId>,
{
let package = name.as_ref().parse().map(|name| Package {
name,
version: version.as_ref().into(),
dependencies: BTreeSet::new(),
build_dependencies: BTreeSet::new(),
dev_dependencies: BTreeSet::new(),
});
let outputs = default_output_hash
.as_ref()
.parse()
.map(|hash| Outputs::new(hash, refs));
ManifestBuilder {
package,
env: BTreeMap::new(),
sources: Sources::new(),
outputs,
}
}
/// Adds a runtime dependency on `id`.
pub fn dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.dependencies.insert(id);
}
self
}
/// Adds a build dependency on `id`.
///
/// # Laziness
///
/// This kind of dependency is only downloaded when the package is being built from source.
/// Otherwise, the dependency is ignored. Artifacts from build dependencies cannot be linked to
/// at runtime.
pub fn build_dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.build_dependencies.insert(id);
}
self
}
/// Adds a test-only dependency on `id`.
///
/// # Laziness
///
/// This kind of dependency is only downloaded when the package is being built from source and
/// running tests is enabled. Otherwise, the dependency is ignored. Artifacts from dev
/// dependencies cannot be linked to at runtime, and they are never included in the final
/// output.
pub fn dev_dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.dev_dependencies.insert(id);
}
self
}
/// Declares an additional build output directory produced by this manifest.
///
/// Build output directories can accept other build outputs as refs, allowing them to be
/// symlinked into the directory structure for runtime dependencies.
///
/// By default, all manifests produce a single default output. This method allows for secondary
/// "named" outputs to be added with supplementary content, e.g. `doc` for HTML documentation,
/// `man` for man pages, `debug` for debug information, etc.
pub fn output<T>(mut self, name: Name, precomputed_hash: Hash, refs: T) -> Self
where
T: IntoIterator<Item = OutputId>,
{
if let Ok(ref mut out) = self.outputs {
out.append(name, precomputed_hash, refs);
}
self
}
/// Adds an external fetchable source to this manifest.
///
/// # Laziness
///
/// Sources are only downloaded when the package is being built from source. Otherwise, the
/// sources are essentially ignored.
pub fn source(mut self, source: Source) -> Self {
self.sources.insert(source);
self
}
/// Constructs and returns the new [`Manifest`].
///
/// If the package name is empty or contains invalid characters, or if the default output hash
/// is invalid, then this method will return `Err`.
///
/// [`Manifest`]: ./struct.Manifest.html
pub fn finish(self) -> Result<Manifest, ()> {
Ok(Manifest {
| build | identifier_name |
manifest.rs | TreeSet<ManifestId>,
build_dependencies: BTreeSet<ManifestId>,
dev_dependencies: BTreeSet<ManifestId>,
}
/// A reproducible package manifest.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Manifest {
package: Package,
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
env: BTreeMap<String, String>,
#[serde(rename = "output")]
outputs: Outputs,
#[serde(default, rename = "source", skip_serializing_if = "Sources::is_empty")]
sources: Sources,
}
impl Manifest {
/// Creates a `Manifest` with the given name, version, default output [`Hash`], and references.
///
/// [`Hash`]: ../struct.Hash.html
pub fn build<T, U>(name: T, version: T, default_output_hash: T, refs: U) -> ManifestBuilder
where
T: AsRef<str>,
U: IntoIterator<Item = OutputId>,
{
ManifestBuilder::new(name, version, default_output_hash, refs)
}
/// Computes the content-addressable ID of this manifest.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let id = manifest.compute_id();
/// assert_eq!(id, "[email protected]");
/// ```
#[inline]
pub fn compute_id(&self) -> ManifestId {
let name = self.package.name.clone();
let version = self.package.version.clone();
let hash = Hash::compute().input(&self.to_string()).finish();
ManifestId::new(name, version, hash)
}
/// Returns the name of the package.
///
/// This string is guaranteed not to be empty.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let name = manifest.name();
/// assert_eq!(name, "foo");
/// ```
#[inline]
pub fn name(&self) -> &str {
self.package.name.as_str()
}
/// Returns the semantic version of the package.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let version = manifest.version();
/// assert_eq!(version, "1.0.0");
/// ```
#[inline]
pub fn version(&self) -> &str {
&self.package.version
}
/// Iterates over the package's runtime dependencies.
#[inline]
pub fn dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.dependencies.iter()
}
/// Iterates over the package's build-time dependencies.
#[inline]
pub fn build_dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.build_dependencies.iter()
}
/// Iterates over the package's optional testing dependencies.
#[inline]
pub fn dev_dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.dev_dependencies.iter()
}
/// Iterates over the package builder's environment variables as key-value pairs.
#[inline]
pub fn env(&self) -> impl Iterator<Item = (&String, &String)> + '_ {
self.env.iter()
}
/// Iterates over the package's build outputs.
///
/// # Note
///
/// Every package is guaranteed to produce at least one default output and zero or more additional
/// outputs. When a manifest is built from source, all outputs are built together.
#[inline]
pub fn outputs(&self) -> impl Iterator<Item = OutputId> + '_ {
let name = self.package.name.clone();
let ver = self.package.version.clone();
self.outputs.iter_with(name, ver)
}
/// Iterates over the package's sources.
#[inline]
pub fn sources(&self) -> impl Iterator<Item = &Source> {
self.sources.iter()
}
}
impl Display for Manifest {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
toml::to_string(self)
.map_err(|e| {
println!("couldn't display self: {}", e);
FmtError::default()
})
.and_then(|s| write!(fmt, "{}", s))
}
}
impl FromStr for Manifest {
type Err = DeserializeError;
#[inline]
fn from_str(s: &str) -> Result<Self, Self::Err> |
}
/// Builder for creating new `Manifest`s.
#[derive(Clone, Debug)]
pub struct ManifestBuilder {
package: Result<Package, ()>,
env: BTreeMap<String, String>,
sources: Sources,
outputs: Result<Outputs, ()>,
}
impl ManifestBuilder {
/// Creates a `Manifest` with the given name, version, default output [`Hash`], and references.
///
/// [`Hash`]: ../struct.Hash.html
pub fn new<T, U>(name: T, version: T, default_output_hash: T, refs: U) -> Self
where
T: AsRef<str>,
U: IntoIterator<Item = OutputId>,
{
let package = name.as_ref().parse().map(|name| Package {
name,
version: version.as_ref().into(),
dependencies: BTreeSet::new(),
build_dependencies: BTreeSet::new(),
dev_dependencies: BTreeSet::new(),
});
let outputs = default_output_hash
.as_ref()
.parse()
.map(|hash| Outputs::new(hash, refs));
ManifestBuilder {
package,
env: BTreeMap::new(),
sources: Sources::new(),
outputs,
}
}
/// Adds a runtime dependency on `id`.
pub fn dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.dependencies.insert(id);
}
self
}
/// Adds a build dependency on `id`.
///
/// # Laziness
///
/// This kind of dependency is only downloaded when the package is being built from source.
/// Otherwise, the dependency is ignored. Artifacts from build dependencies cannot be linked to
/// at runtime.
pub fn build_dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.build_dependencies.insert(id);
}
self
}
/// Adds a test-only dependency on `id`.
///
/// # Laziness
///
/// This kind of dependency is only downloaded when the package is being built from source and
/// running tests is enabled. Otherwise, the dependency is ignored. Artifacts from dev
/// dependencies cannot be linked to at runtime, and they are never included in the final
/// output.
pub fn dev_dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.dev_dependencies.insert(id);
}
self
}
/// Declares an additional build output directory produced by this manifest.
///
/// Build output directories can accept other build outputs as refs, allowing them to be
/// symlinked into the directory structure for runtime dependencies.
///
/// By default, all manifests produce a single default output. This method allows for secondary
/// "named" outputs to be added with supplementary content, e.g. `doc` for HTML documentation,
/// `man` for man pages, `debug` for debug information, etc.
pub fn output<T>(mut self, name: Name, precomputed_hash: Hash, refs: T) -> Self
where
T: IntoIterator<Item = OutputId>,
{
if let Ok(ref mut out) = self.outputs {
out.append(name, precomputed_hash, refs);
}
self
}
/// Adds an external fetchable source to this manifest.
///
/// # Laziness
///
/// Sources are only downloaded when the package is being built from source. Otherwise, the
/// sources are essentially ignored.
pub fn source(mut self, source: Source) -> Self {
self.sources.insert(source);
self
}
/// Constructs and returns the new [`Manifest`].
///
/// If the package name is empty or contains invalid characters, or if the default output hash
/// is invalid, then this method will return `Err`.
///
/// [`Manifest`]: ./struct.Manifest.html
pub fn finish(self) -> Result<Manifest, ()> {
Ok(Manifest {
| {
toml::from_str(s)
} | identifier_body |
manifest.rs | name: Name,
version: String,
dependencies: BTreeSet<ManifestId>,
build_dependencies: BTreeSet<ManifestId>,
dev_dependencies: BTreeSet<ManifestId>,
}
/// A reproducible package manifest.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Manifest {
package: Package,
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
env: BTreeMap<String, String>,
#[serde(rename = "output")]
outputs: Outputs,
#[serde(default, rename = "source", skip_serializing_if = "Sources::is_empty")]
sources: Sources,
}
impl Manifest {
/// Creates a `Manifest` with the given name, version, default output [`Hash`], and references.
///
/// [`Hash`]: ../struct.Hash.html
pub fn build<T, U>(name: T, version: T, default_output_hash: T, refs: U) -> ManifestBuilder
where
T: AsRef<str>,
U: IntoIterator<Item = OutputId>,
{
ManifestBuilder::new(name, version, default_output_hash, refs)
}
/// Computes the content-addressable ID of this manifest.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let id = manifest.compute_id();
/// assert_eq!(id, "[email protected]");
/// ```
#[inline]
pub fn compute_id(&self) -> ManifestId {
let name = self.package.name.clone();
let version = self.package.version.clone();
let hash = Hash::compute().input(&self.to_string()).finish();
ManifestId::new(name, version, hash)
}
/// Returns the name of the package.
///
/// This string is guaranteed not to be empty.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let name = manifest.name();
/// assert_eq!(name, "foo");
/// ```
#[inline]
pub fn name(&self) -> &str {
self.package.name.as_str()
}
/// Returns the semantic version of the package.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let version = manifest.version();
/// assert_eq!(version, "1.0.0");
/// ```
#[inline]
pub fn version(&self) -> &str {
&self.package.version
}
/// Iterates over the package's runtime dependencies.
#[inline]
pub fn dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.dependencies.iter()
}
/// Iterates over the package's build-time dependencies.
#[inline]
pub fn build_dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.build_dependencies.iter()
}
/// Iterates over the package's optional testing dependencies.
#[inline]
pub fn dev_dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.dev_dependencies.iter()
}
/// Iterates over the package builder's environment variables as key-value pairs.
#[inline]
pub fn env(&self) -> impl Iterator<Item = (&String, &String)> + '_ {
self.env.iter()
}
/// Iterates over the package's build outputs.
///
/// # Note
///
/// Every package is guaranteed to produce at least one default output and zero or more additional
/// outputs. When a manifest is built from source, all outputs are built together.
#[inline]
pub fn outputs(&self) -> impl Iterator<Item = OutputId> + '_ {
let name = self.package.name.clone();
let ver = self.package.version.clone();
self.outputs.iter_with(name, ver)
}
/// Iterates over the package's sources.
#[inline]
pub fn sources(&self) -> impl Iterator<Item = &Source> {
self.sources.iter()
}
}
impl Display for Manifest {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
toml::to_string(self)
.map_err(|e| {
println!("couldn't display self: {}", e);
FmtError::default()
})
.and_then(|s| write!(fmt, "{}", s))
}
}
impl FromStr for Manifest {
type Err = DeserializeError;
#[inline]
fn from_str(s: &str) -> Result<Self, Self::Err> {
toml::from_str(s)
}
}
/// Builder for creating new `Manifest`s.
#[derive(Clone, Debug)]
pub struct ManifestBuilder {
package: Result<Package, ()>,
env: BTreeMap<String, String>,
sources: Sources,
outputs: Result<Outputs, ()>,
}
impl ManifestBuilder {
/// Creates a `Manifest` with the given name, version, default output [`Hash`], and references.
///
/// [`Hash`]: ../struct.Hash.html
pub fn new<T, U>(name: T, version: T, default_output_hash: T, refs: U) -> Self
where
T: AsRef<str>,
U: IntoIterator<Item = OutputId>,
{
let package = name.as_ref().parse().map(|name| Package {
name,
version: version.as_ref().into(),
dependencies: BTreeSet::new(),
build_dependencies: BTreeSet::new(),
dev_dependencies: BTreeSet::new(),
});
let outputs = default_output_hash
.as_ref()
.parse()
.map(|hash| Outputs::new(hash, refs));
ManifestBuilder {
package,
env: BTreeMap::new(),
sources: Sources::new(),
outputs,
}
}
/// Adds a runtime dependency on `id`.
pub fn dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.dependencies.insert(id);
}
self
}
/// Adds a build dependency on `id`.
///
/// # Laziness
///
/// This kind of dependency is only downloaded when the package is being built from source.
/// Otherwise, the dependency is ignored. Artifacts from build dependencies cannot be linked to
/// at runtime.
pub fn build_dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.build_dependencies.insert(id);
}
self
}
/// Adds a test-only dependency on `id`.
///
/// # Laziness
///
/// This kind of dependency is only downloaded when the package is being built from source and
/// running tests is enabled. Otherwise, the dependency is ignored. Artifacts from dev
/// dependencies cannot be linked to at runtime, and they are never included in the final
/// output.
pub fn dev_dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.dev_dependencies.insert(id);
}
self
}
/// Declares an additional build output directory produced by this manifest.
///
/// Build output directories can accept other build outputs as refs, allowing them to be
/// symlinked into the directory structure for runtime dependencies.
///
/// By default, all manifests produce a single default output. This method allows for secondary
/// "named" outputs to be added with supplementary content, e.g. `doc` for HTML documentation,
/// `man` for man pages, `debug` for debug information, etc.
pub fn output<T>(mut self, name: Name, precomputed_hash: Hash, refs: T) -> Self
where
T: IntoIterator<Item = OutputId>,
{
if let Ok(ref mut out) = self.outputs {
out.append(name, precomputed_hash, refs);
}
self
}
/// Adds an external fetchable source to this manifest.
///
/// # Laziness
///
/// Sources are only downloaded when the package is being built from source. Otherwise, the
/// sources are essentially ignored.
pub fn source(mut self, source: Source) -> Self {
self.sources.insert(source);
self
}
/// Constructs and returns the new [`Manifest`].
///
/// If the package name is empty or contains invalid characters, or if the default output hash
/// is invalid, then this method will return `Err`.
///
/// [`Manifest`]: ./struct.Manifest.html
pub fn finish | struct Package { | random_line_split |
|
manifest.rs | TreeSet<ManifestId>,
build_dependencies: BTreeSet<ManifestId>,
dev_dependencies: BTreeSet<ManifestId>,
}
/// A reproducible package manifest.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Manifest {
package: Package,
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
env: BTreeMap<String, String>,
#[serde(rename = "output")]
outputs: Outputs,
#[serde(default, rename = "source", skip_serializing_if = "Sources::is_empty")]
sources: Sources,
}
impl Manifest {
/// Creates a `Manifest` with the given name, version, default output [`Hash`], and references.
///
/// [`Hash`]: ../struct.Hash.html
pub fn build<T, U>(name: T, version: T, default_output_hash: T, refs: U) -> ManifestBuilder
where
T: AsRef<str>,
U: IntoIterator<Item = OutputId>,
{
ManifestBuilder::new(name, version, default_output_hash, refs)
}
/// Computes the content-addressable ID of this manifest.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let id = manifest.compute_id();
/// assert_eq!(id, "[email protected]");
/// ```
#[inline]
pub fn compute_id(&self) -> ManifestId {
let name = self.package.name.clone();
let version = self.package.version.clone();
let hash = Hash::compute().input(&self.to_string()).finish();
ManifestId::new(name, version, hash)
}
/// Returns the name of the package.
///
/// This string is guaranteed not to be empty.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let name = manifest.name();
/// assert_eq!(name, "foo");
/// ```
#[inline]
pub fn name(&self) -> &str {
self.package.name.as_str()
}
/// Returns the semantic version of the package.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let version = manifest.version();
/// assert_eq!(version, "1.0.0");
/// ```
#[inline]
pub fn version(&self) -> &str {
&self.package.version
}
/// Iterates over the package's runtime dependencies.
#[inline]
pub fn dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.dependencies.iter()
}
/// Iterates over the package's build-time dependencies.
#[inline]
pub fn build_dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.build_dependencies.iter()
}
/// Iterates over the package's optional testing dependencies.
#[inline]
pub fn dev_dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.dev_dependencies.iter()
}
/// Iterates over the package builder's environment variables as key-value pairs.
#[inline]
pub fn env(&self) -> impl Iterator<Item = (&String, &String)> + '_ {
self.env.iter()
}
/// Iterates over the package's build outputs.
///
/// # Note
///
/// Every package is guaranteed to produce at least one default output and zero or more additional
/// outputs. When a manifest is built from source, all outputs are built together.
#[inline]
pub fn outputs(&self) -> impl Iterator<Item = OutputId> + '_ {
let name = self.package.name.clone();
let ver = self.package.version.clone();
self.outputs.iter_with(name, ver)
}
/// Iterates over the package's sources.
#[inline]
pub fn sources(&self) -> impl Iterator<Item = &Source> {
self.sources.iter()
}
}
impl Display for Manifest {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
toml::to_string(self)
.map_err(|e| {
println!("couldn't display self: {}", e);
FmtError::default()
})
.and_then(|s| write!(fmt, "{}", s))
}
}
impl FromStr for Manifest {
type Err = DeserializeError;
#[inline]
fn from_str(s: &str) -> Result<Self, Self::Err> {
toml::from_str(s)
}
}
/// Builder for creating new `Manifest`s.
#[derive(Clone, Debug)]
pub struct ManifestBuilder {
package: Result<Package, ()>,
env: BTreeMap<String, String>,
sources: Sources,
outputs: Result<Outputs, ()>,
}
impl ManifestBuilder {
/// Creates a `Manifest` with the given name, version, default output [`Hash`], and references.
///
/// [`Hash`]: ../struct.Hash.html
pub fn new<T, U>(name: T, version: T, default_output_hash: T, refs: U) -> Self
where
T: AsRef<str>,
U: IntoIterator<Item = OutputId>,
{
let package = name.as_ref().parse().map(|name| Package {
name,
version: version.as_ref().into(),
dependencies: BTreeSet::new(),
build_dependencies: BTreeSet::new(),
dev_dependencies: BTreeSet::new(),
});
let outputs = default_output_hash
.as_ref()
.parse()
.map(|hash| Outputs::new(hash, refs));
ManifestBuilder {
package,
env: BTreeMap::new(),
sources: Sources::new(),
outputs,
}
}
/// Adds a runtime dependency on `id`.
pub fn dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package |
self
}
/// Adds a build dependency on `id`.
///
/// # Laziness
///
/// This kind of dependency is only downloaded when the package is being built from source.
/// Otherwise, the dependency is ignored. Artifacts from build dependencies cannot be linked to
/// at runtime.
pub fn build_dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.build_dependencies.insert(id);
}
self
}
/// Adds a test-only dependency on `id`.
///
/// # Laziness
///
/// This kind of dependency is only downloaded when the package is being built from source and
/// running tests is enabled. Otherwise, the dependency is ignored. Artifacts from dev
/// dependencies cannot be linked to at runtime, and they are never included in the final
/// output.
pub fn dev_dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.dev_dependencies.insert(id);
}
self
}
/// Declares an additional build output directory produced by this manifest.
///
/// Build output directories can accept other build outputs as refs, allowing them to be
/// symlinked into the directory structure for runtime dependencies.
///
/// By default, all manifests produce a single default output. This method allows for secondary
/// "named" outputs to be added with supplementary content, e.g. `doc` for HTML documentation,
/// `man` for man pages, `debug` for debug information, etc.
pub fn output<T>(mut self, name: Name, precomputed_hash: Hash, refs: T) -> Self
where
T: IntoIterator<Item = OutputId>,
{
if let Ok(ref mut out) = self.outputs {
out.append(name, precomputed_hash, refs);
}
self
}
/// Adds an external fetchable source to this manifest.
///
/// # Laziness
///
/// Sources are only downloaded when the package is being built from source. Otherwise, the
/// sources are essentially ignored.
pub fn source(mut self, source: Source) -> Self {
self.sources.insert(source);
self
}
/// Constructs and returns the new [`Manifest`].
///
/// If the package name is empty or contains invalid characters, or if the default output hash
/// is invalid, then this method will return `Err`.
///
/// [`Manifest`]: ./struct.Manifest.html
pub fn finish(self) -> Result<Manifest, ()> {
Ok(Manifest {
| {
p.dependencies.insert(id);
} | conditional_block |
bot.go | .YAGCommand{
Name: "Create",
CmdCategory: categoryRoleMenu,
Aliases: []string{"c"},
Description: "Set up a role menu.",
LongDescription: reqPerms + "Specify a message with -m to use an existing message instead of having the bot make one\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Group", Type: dcmd.String},
},
ArgSwitches: []*dcmd.ArgDef{
{Name: "m", Help: "Message ID", Type: dcmd.BigInt},
{Name: "nodm", Help: "Disable DM"},
{Name: "rr", Help: "Remove role on reaction removed"},
{Name: "skip", Help: "Number of roles to skip", Default: 0, Type: dcmd.Int},
},
RunFunc: cmdFuncRoleMenuCreate,
}
cmdRemoveRoleMenu := &commands.YAGCommand{
Name: "Remove",
CmdCategory: categoryRoleMenu,
Aliases: []string{"rm"},
Description: "Removes a rolemenu from a message.",
LongDescription: reqPerms + "The message won't be deleted and the bot will not do anything with reactions on that message\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuRemove,
}
cmdUpdate := &commands.YAGCommand{
Name: "Update",
CmdCategory: categoryRoleMenu,
Aliases: []string{"u"},
Description: reqPerms + "Updates a rolemenu, toggling the provided flags and adding missing options, aswell as updating the order.",
LongDescription: "\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
ArgSwitches: []*dcmd.ArgDef{
{Name: "nodm", Help: "Disable DM"},
{Name: "rr", Help: "Remove role on reaction removed"},
},
RunFunc: cmdFuncRoleMenuUpdate,
}
cmdResetReactions := &commands.YAGCommand{
Name: "ResetReactions",
CmdCategory: categoryRoleMenu,
Aliases: []string{"reset"},
Description: reqPerms + "Removes all reactions on the specified menu message and re-adds them.",
LongDescription: "Can be used to fix the order after updating it.\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuResetReactions,
}
cmdEditOption := &commands.YAGCommand{
Name: "EditOption",
CmdCategory: categoryRoleMenu,
Aliases: []string{"edit"},
Description: reqPerms + "Allows you to reassign the emoji of an option, tip: use ResetReactions afterwards.",
LongDescription: "\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuEditOption,
}
cmdFinishSetup := &commands.YAGCommand{
Name: "Complete",
CmdCategory: categoryRoleMenu,
Aliases: []string{"finish"},
Description: reqPerms + "Marks the menu as done.",
LongDescription: "\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuComplete,
}
cmdListGroups := &commands.YAGCommand{
Name: "Listgroups",
CmdCategory: categoryRoleMenu,
Aliases: []string{"list", "groups"},
Description: "Lists all role groups",
RequireDiscordPerms: []int64{discordgo.PermissionManageGuild},
RunFunc: cmdFuncRoleMenuListGroups,
}
menuContainer, t := commands.CommandSystem.Root.Sub("RoleMenu", "rmenu")
t.SetEnabledInThreads(true)
menuContainer.Description = "Command for managing role menus"
const notFoundMessage = "Unknown rolemenu command, if you've used this before it was recently revamped.\nTry almost the same command but `rolemenu create ...` and `rolemenu update ...` instead (replace '...' with the rest of the command).\nSee `help rolemenu` for all rolemenu commands."
menuContainer.NotFound = commands.CommonContainerNotFoundHandler(menuContainer, notFoundMessage)
//menuContainer.NotFound = commands.CommonContainerNotFoundHandler(menuContainer, "")
menuContainer.AddCommand(cmdCreate, cmdCreate.GetTrigger())
menuContainer.AddCommand(cmdRemoveRoleMenu, cmdRemoveRoleMenu.GetTrigger())
menuContainer.AddCommand(cmdUpdate, cmdUpdate.GetTrigger())
menuContainer.AddCommand(cmdResetReactions, cmdResetReactions.GetTrigger())
menuContainer.AddCommand(cmdEditOption, cmdEditOption.GetTrigger())
menuContainer.AddCommand(cmdFinishSetup, cmdFinishSetup.GetTrigger())
menuContainer.AddCommand(cmdListGroups, cmdListGroups.GetTrigger())
commands.RegisterSlashCommandsContainer(menuContainer, true, func(gs *dstate.GuildSet) ([]int64, error) {
return nil, nil
})
}
type ScheduledMemberRoleRemoveData struct {
GuildID int64 `json:"guild_id"`
GroupID int64 `json:"group_id"`
UserID int64 `json:"user_id"`
RoleID int64 `json:"role_id"`
}
type ScheduledEventUpdateMenuMessageData struct {
GuildID int64 `json:"guild_id"`
MessageID int64 `json:"message_id"`
}
func (p *Plugin) BotInit() {
eventsystem.AddHandlerAsyncLastLegacy(p, handleReactionAddRemove, eventsystem.EventMessageReactionAdd, eventsystem.EventMessageReactionRemove)
eventsystem.AddHandlerAsyncLastLegacy(p, handleMessageRemove, eventsystem.EventMessageDelete, eventsystem.EventMessageDeleteBulk)
scheduledevents2.RegisterHandler("remove_member_role", ScheduledMemberRoleRemoveData{}, handleRemoveMemberRole)
scheduledevents2.RegisterHandler("rolemenu_update_message", ScheduledEventUpdateMenuMessageData{}, handleUpdateRolemenuMessage)
pubsub.AddHandler("role_commands_evict_menus", func(evt *pubsub.Event) {
ClearRolemenuCache(evt.TargetGuildInt)
recentMenusTracker.GuildReset(evt.TargetGuildInt)
}, nil)
}
func CmdFuncRole(parsed *dcmd.Data) (interface{}, error) {
if parsed.Args[0].Value == nil {
return CmdFuncListCommands(parsed)
}
given, err := FindToggleRole(parsed.Context(), parsed.GuildData.MS, parsed.Args[0].Str())
if err != nil {
if err == sql.ErrNoRows {
resp, err := CmdFuncListCommands(parsed)
if v, ok := resp.(string); ok {
return "Role not found, " + v, err
}
return resp, err
}
return HumanizeAssignError(parsed.GuildData.GS, err)
}
go analytics.RecordActiveUnit(parsed.GuildData.GS.ID, &Plugin{}, "cmd_used")
if given {
return "Gave you the role!", nil
}
return "Took away your role!", nil
}
func HumanizeAssignError(guild *dstate.GuildSet, err error) (string, error) {
if IsRoleCommandError(err) {
if roleError, ok := err.(*RoleError); ok {
| return err.Error(), nil
}
if code, msg := common.DiscordError(err); code != 0 {
if code == discordgo.ErrCodeMissingPermissions {
return "The bot is below the role, contact the server admin", err
} else if code == discordgo.ErrCodeMissingAccess {
return "Bot does not have enough permissions to assign you this role, contact the server admin", err
}
return "An error occurred while assigning the role: " + msg, err
}
return "An error occurred while assigning the role", err
}
func CmdFuncListCommands(parsed *dcmd.Data) (interface{}, error) {
_, grouped, ungrouped, err := GetAllRoleCommandsSorted(parsed.Context(), parsed | return roleError.PrettyError(guild.Roles), nil
}
| conditional_block |
bot.go | .YAGCommand{
Name: "Create",
CmdCategory: categoryRoleMenu,
Aliases: []string{"c"},
Description: "Set up a role menu.",
LongDescription: reqPerms + "Specify a message with -m to use an existing message instead of having the bot make one\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Group", Type: dcmd.String},
},
ArgSwitches: []*dcmd.ArgDef{
{Name: "m", Help: "Message ID", Type: dcmd.BigInt},
{Name: "nodm", Help: "Disable DM"},
{Name: "rr", Help: "Remove role on reaction removed"},
{Name: "skip", Help: "Number of roles to skip", Default: 0, Type: dcmd.Int},
},
RunFunc: cmdFuncRoleMenuCreate,
}
cmdRemoveRoleMenu := &commands.YAGCommand{
Name: "Remove",
CmdCategory: categoryRoleMenu,
Aliases: []string{"rm"},
Description: "Removes a rolemenu from a message.",
LongDescription: reqPerms + "The message won't be deleted and the bot will not do anything with reactions on that message\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuRemove,
}
cmdUpdate := &commands.YAGCommand{
Name: "Update",
CmdCategory: categoryRoleMenu,
Aliases: []string{"u"},
Description: reqPerms + "Updates a rolemenu, toggling the provided flags and adding missing options, aswell as updating the order.",
LongDescription: "\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
ArgSwitches: []*dcmd.ArgDef{
{Name: "nodm", Help: "Disable DM"},
{Name: "rr", Help: "Remove role on reaction removed"},
},
RunFunc: cmdFuncRoleMenuUpdate,
}
cmdResetReactions := &commands.YAGCommand{
Name: "ResetReactions",
CmdCategory: categoryRoleMenu,
Aliases: []string{"reset"},
Description: reqPerms + "Removes all reactions on the specified menu message and re-adds them.",
LongDescription: "Can be used to fix the order after updating it.\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuResetReactions,
}
cmdEditOption := &commands.YAGCommand{
Name: "EditOption",
CmdCategory: categoryRoleMenu,
Aliases: []string{"edit"},
Description: reqPerms + "Allows you to reassign the emoji of an option, tip: use ResetReactions afterwards.",
LongDescription: "\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuEditOption,
}
cmdFinishSetup := &commands.YAGCommand{
Name: "Complete",
CmdCategory: categoryRoleMenu,
Aliases: []string{"finish"},
Description: reqPerms + "Marks the menu as done.",
LongDescription: "\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuComplete,
}
cmdListGroups := &commands.YAGCommand{
Name: "Listgroups",
CmdCategory: categoryRoleMenu,
Aliases: []string{"list", "groups"},
Description: "Lists all role groups",
RequireDiscordPerms: []int64{discordgo.PermissionManageGuild},
RunFunc: cmdFuncRoleMenuListGroups,
}
menuContainer, t := commands.CommandSystem.Root.Sub("RoleMenu", "rmenu")
t.SetEnabledInThreads(true)
menuContainer.Description = "Command for managing role menus"
const notFoundMessage = "Unknown rolemenu command, if you've used this before it was recently revamped.\nTry almost the same command but `rolemenu create ...` and `rolemenu update ...` instead (replace '...' with the rest of the command).\nSee `help rolemenu` for all rolemenu commands."
menuContainer.NotFound = commands.CommonContainerNotFoundHandler(menuContainer, notFoundMessage)
//menuContainer.NotFound = commands.CommonContainerNotFoundHandler(menuContainer, "")
menuContainer.AddCommand(cmdCreate, cmdCreate.GetTrigger())
menuContainer.AddCommand(cmdRemoveRoleMenu, cmdRemoveRoleMenu.GetTrigger())
menuContainer.AddCommand(cmdUpdate, cmdUpdate.GetTrigger())
menuContainer.AddCommand(cmdResetReactions, cmdResetReactions.GetTrigger())
menuContainer.AddCommand(cmdEditOption, cmdEditOption.GetTrigger())
menuContainer.AddCommand(cmdFinishSetup, cmdFinishSetup.GetTrigger())
menuContainer.AddCommand(cmdListGroups, cmdListGroups.GetTrigger())
commands.RegisterSlashCommandsContainer(menuContainer, true, func(gs *dstate.GuildSet) ([]int64, error) {
return nil, nil
})
}
type ScheduledMemberRoleRemoveData struct {
GuildID int64 `json:"guild_id"`
GroupID int64 `json:"group_id"`
UserID int64 `json:"user_id"`
RoleID int64 `json:"role_id"`
}
type ScheduledEventUpdateMenuMessageData struct {
GuildID int64 `json:"guild_id"`
MessageID int64 `json:"message_id"`
}
func (p *Plugin) BotInit() {
eventsystem.AddHandlerAsyncLastLegacy(p, handleReactionAddRemove, eventsystem.EventMessageReactionAdd, eventsystem.EventMessageReactionRemove)
eventsystem.AddHandlerAsyncLastLegacy(p, handleMessageRemove, eventsystem.EventMessageDelete, eventsystem.EventMessageDeleteBulk)
scheduledevents2.RegisterHandler("remove_member_role", ScheduledMemberRoleRemoveData{}, handleRemoveMemberRole)
scheduledevents2.RegisterHandler("rolemenu_update_message", ScheduledEventUpdateMenuMessageData{}, handleUpdateRolemenuMessage)
pubsub.AddHandler("role_commands_evict_menus", func(evt *pubsub.Event) {
ClearRolemenuCache(evt.TargetGuildInt)
recentMenusTracker.GuildReset(evt.TargetGuildInt)
}, nil)
}
func Cmd | rsed *dcmd.Data) (interface{}, error) {
if parsed.Args[0].Value == nil {
return CmdFuncListCommands(parsed)
}
given, err := FindToggleRole(parsed.Context(), parsed.GuildData.MS, parsed.Args[0].Str())
if err != nil {
if err == sql.ErrNoRows {
resp, err := CmdFuncListCommands(parsed)
if v, ok := resp.(string); ok {
return "Role not found, " + v, err
}
return resp, err
}
return HumanizeAssignError(parsed.GuildData.GS, err)
}
go analytics.RecordActiveUnit(parsed.GuildData.GS.ID, &Plugin{}, "cmd_used")
if given {
return "Gave you the role!", nil
}
return "Took away your role!", nil
}
func HumanizeAssignError(guild *dstate.GuildSet, err error) (string, error) {
if IsRoleCommandError(err) {
if roleError, ok := err.(*RoleError); ok {
return roleError.PrettyError(guild.Roles), nil
}
return err.Error(), nil
}
if code, msg := common.DiscordError(err); code != 0 {
if code == discordgo.ErrCodeMissingPermissions {
return "The bot is below the role, contact the server admin", err
} else if code == discordgo.ErrCodeMissingAccess {
return "Bot does not have enough permissions to assign you this role, contact the server admin", err
}
return "An error occurred while assigning the role: " + msg, err
}
return "An error occurred while assigning the role", err
}
func CmdFuncListCommands(parsed *dcmd.Data) (interface{}, error) {
_, grouped, ungrouped, err := GetAllRoleCommandsSorted(parsed.Context(), | FuncRole(pa | identifier_name |
bot.go | .YAGCommand{
Name: "Create",
CmdCategory: categoryRoleMenu,
Aliases: []string{"c"},
Description: "Set up a role menu.",
LongDescription: reqPerms + "Specify a message with -m to use an existing message instead of having the bot make one\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Group", Type: dcmd.String},
}, | {Name: "skip", Help: "Number of roles to skip", Default: 0, Type: dcmd.Int},
},
RunFunc: cmdFuncRoleMenuCreate,
}
cmdRemoveRoleMenu := &commands.YAGCommand{
Name: "Remove",
CmdCategory: categoryRoleMenu,
Aliases: []string{"rm"},
Description: "Removes a rolemenu from a message.",
LongDescription: reqPerms + "The message won't be deleted and the bot will not do anything with reactions on that message\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuRemove,
}
cmdUpdate := &commands.YAGCommand{
Name: "Update",
CmdCategory: categoryRoleMenu,
Aliases: []string{"u"},
Description: reqPerms + "Updates a rolemenu, toggling the provided flags and adding missing options, aswell as updating the order.",
LongDescription: "\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
ArgSwitches: []*dcmd.ArgDef{
{Name: "nodm", Help: "Disable DM"},
{Name: "rr", Help: "Remove role on reaction removed"},
},
RunFunc: cmdFuncRoleMenuUpdate,
}
cmdResetReactions := &commands.YAGCommand{
Name: "ResetReactions",
CmdCategory: categoryRoleMenu,
Aliases: []string{"reset"},
Description: reqPerms + "Removes all reactions on the specified menu message and re-adds them.",
LongDescription: "Can be used to fix the order after updating it.\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuResetReactions,
}
cmdEditOption := &commands.YAGCommand{
Name: "EditOption",
CmdCategory: categoryRoleMenu,
Aliases: []string{"edit"},
Description: reqPerms + "Allows you to reassign the emoji of an option, tip: use ResetReactions afterwards.",
LongDescription: "\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuEditOption,
}
cmdFinishSetup := &commands.YAGCommand{
Name: "Complete",
CmdCategory: categoryRoleMenu,
Aliases: []string{"finish"},
Description: reqPerms + "Marks the menu as done.",
LongDescription: "\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuComplete,
}
cmdListGroups := &commands.YAGCommand{
Name: "Listgroups",
CmdCategory: categoryRoleMenu,
Aliases: []string{"list", "groups"},
Description: "Lists all role groups",
RequireDiscordPerms: []int64{discordgo.PermissionManageGuild},
RunFunc: cmdFuncRoleMenuListGroups,
}
menuContainer, t := commands.CommandSystem.Root.Sub("RoleMenu", "rmenu")
t.SetEnabledInThreads(true)
menuContainer.Description = "Command for managing role menus"
const notFoundMessage = "Unknown rolemenu command, if you've used this before it was recently revamped.\nTry almost the same command but `rolemenu create ...` and `rolemenu update ...` instead (replace '...' with the rest of the command).\nSee `help rolemenu` for all rolemenu commands."
menuContainer.NotFound = commands.CommonContainerNotFoundHandler(menuContainer, notFoundMessage)
//menuContainer.NotFound = commands.CommonContainerNotFoundHandler(menuContainer, "")
menuContainer.AddCommand(cmdCreate, cmdCreate.GetTrigger())
menuContainer.AddCommand(cmdRemoveRoleMenu, cmdRemoveRoleMenu.GetTrigger())
menuContainer.AddCommand(cmdUpdate, cmdUpdate.GetTrigger())
menuContainer.AddCommand(cmdResetReactions, cmdResetReactions.GetTrigger())
menuContainer.AddCommand(cmdEditOption, cmdEditOption.GetTrigger())
menuContainer.AddCommand(cmdFinishSetup, cmdFinishSetup.GetTrigger())
menuContainer.AddCommand(cmdListGroups, cmdListGroups.GetTrigger())
commands.RegisterSlashCommandsContainer(menuContainer, true, func(gs *dstate.GuildSet) ([]int64, error) {
return nil, nil
})
}
type ScheduledMemberRoleRemoveData struct {
GuildID int64 `json:"guild_id"`
GroupID int64 `json:"group_id"`
UserID int64 `json:"user_id"`
RoleID int64 `json:"role_id"`
}
type ScheduledEventUpdateMenuMessageData struct {
GuildID int64 `json:"guild_id"`
MessageID int64 `json:"message_id"`
}
func (p *Plugin) BotInit() {
eventsystem.AddHandlerAsyncLastLegacy(p, handleReactionAddRemove, eventsystem.EventMessageReactionAdd, eventsystem.EventMessageReactionRemove)
eventsystem.AddHandlerAsyncLastLegacy(p, handleMessageRemove, eventsystem.EventMessageDelete, eventsystem.EventMessageDeleteBulk)
scheduledevents2.RegisterHandler("remove_member_role", ScheduledMemberRoleRemoveData{}, handleRemoveMemberRole)
scheduledevents2.RegisterHandler("rolemenu_update_message", ScheduledEventUpdateMenuMessageData{}, handleUpdateRolemenuMessage)
pubsub.AddHandler("role_commands_evict_menus", func(evt *pubsub.Event) {
ClearRolemenuCache(evt.TargetGuildInt)
recentMenusTracker.GuildReset(evt.TargetGuildInt)
}, nil)
}
func CmdFuncRole(parsed *dcmd.Data) (interface{}, error) {
if parsed.Args[0].Value == nil {
return CmdFuncListCommands(parsed)
}
given, err := FindToggleRole(parsed.Context(), parsed.GuildData.MS, parsed.Args[0].Str())
if err != nil {
if err == sql.ErrNoRows {
resp, err := CmdFuncListCommands(parsed)
if v, ok := resp.(string); ok {
return "Role not found, " + v, err
}
return resp, err
}
return HumanizeAssignError(parsed.GuildData.GS, err)
}
go analytics.RecordActiveUnit(parsed.GuildData.GS.ID, &Plugin{}, "cmd_used")
if given {
return "Gave you the role!", nil
}
return "Took away your role!", nil
}
func HumanizeAssignError(guild *dstate.GuildSet, err error) (string, error) {
if IsRoleCommandError(err) {
if roleError, ok := err.(*RoleError); ok {
return roleError.PrettyError(guild.Roles), nil
}
return err.Error(), nil
}
if code, msg := common.DiscordError(err); code != 0 {
if code == discordgo.ErrCodeMissingPermissions {
return "The bot is below the role, contact the server admin", err
} else if code == discordgo.ErrCodeMissingAccess {
return "Bot does not have enough permissions to assign you this role, contact the server admin", err
}
return "An error occurred while assigning the role: " + msg, err
}
return "An error occurred while assigning the role", err
}
func CmdFuncListCommands(parsed *dcmd.Data) (interface{}, error) {
_, grouped, ungrouped, err := GetAllRoleCommandsSorted(parsed.Context(), parsed.G | ArgSwitches: []*dcmd.ArgDef{
{Name: "m", Help: "Message ID", Type: dcmd.BigInt},
{Name: "nodm", Help: "Disable DM"},
{Name: "rr", Help: "Remove role on reaction removed"}, | random_line_split |
bot.go | after updating it.\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuResetReactions,
}
cmdEditOption := &commands.YAGCommand{
Name: "EditOption",
CmdCategory: categoryRoleMenu,
Aliases: []string{"edit"},
Description: reqPerms + "Allows you to reassign the emoji of an option, tip: use ResetReactions afterwards.",
LongDescription: "\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuEditOption,
}
cmdFinishSetup := &commands.YAGCommand{
Name: "Complete",
CmdCategory: categoryRoleMenu,
Aliases: []string{"finish"},
Description: reqPerms + "Marks the menu as done.",
LongDescription: "\n\n" + msgIDDocs,
RequireDiscordPerms: []int64{discordgo.PermissionManageServer},
RequiredArgs: 1,
Arguments: []*dcmd.ArgDef{
{Name: "Message-ID", Type: dcmd.BigInt},
},
RunFunc: cmdFuncRoleMenuComplete,
}
cmdListGroups := &commands.YAGCommand{
Name: "Listgroups",
CmdCategory: categoryRoleMenu,
Aliases: []string{"list", "groups"},
Description: "Lists all role groups",
RequireDiscordPerms: []int64{discordgo.PermissionManageGuild},
RunFunc: cmdFuncRoleMenuListGroups,
}
menuContainer, t := commands.CommandSystem.Root.Sub("RoleMenu", "rmenu")
t.SetEnabledInThreads(true)
menuContainer.Description = "Command for managing role menus"
const notFoundMessage = "Unknown rolemenu command, if you've used this before it was recently revamped.\nTry almost the same command but `rolemenu create ...` and `rolemenu update ...` instead (replace '...' with the rest of the command).\nSee `help rolemenu` for all rolemenu commands."
menuContainer.NotFound = commands.CommonContainerNotFoundHandler(menuContainer, notFoundMessage)
//menuContainer.NotFound = commands.CommonContainerNotFoundHandler(menuContainer, "")
menuContainer.AddCommand(cmdCreate, cmdCreate.GetTrigger())
menuContainer.AddCommand(cmdRemoveRoleMenu, cmdRemoveRoleMenu.GetTrigger())
menuContainer.AddCommand(cmdUpdate, cmdUpdate.GetTrigger())
menuContainer.AddCommand(cmdResetReactions, cmdResetReactions.GetTrigger())
menuContainer.AddCommand(cmdEditOption, cmdEditOption.GetTrigger())
menuContainer.AddCommand(cmdFinishSetup, cmdFinishSetup.GetTrigger())
menuContainer.AddCommand(cmdListGroups, cmdListGroups.GetTrigger())
commands.RegisterSlashCommandsContainer(menuContainer, true, func(gs *dstate.GuildSet) ([]int64, error) {
return nil, nil
})
}
type ScheduledMemberRoleRemoveData struct {
GuildID int64 `json:"guild_id"`
GroupID int64 `json:"group_id"`
UserID int64 `json:"user_id"`
RoleID int64 `json:"role_id"`
}
type ScheduledEventUpdateMenuMessageData struct {
GuildID int64 `json:"guild_id"`
MessageID int64 `json:"message_id"`
}
func (p *Plugin) BotInit() {
eventsystem.AddHandlerAsyncLastLegacy(p, handleReactionAddRemove, eventsystem.EventMessageReactionAdd, eventsystem.EventMessageReactionRemove)
eventsystem.AddHandlerAsyncLastLegacy(p, handleMessageRemove, eventsystem.EventMessageDelete, eventsystem.EventMessageDeleteBulk)
scheduledevents2.RegisterHandler("remove_member_role", ScheduledMemberRoleRemoveData{}, handleRemoveMemberRole)
scheduledevents2.RegisterHandler("rolemenu_update_message", ScheduledEventUpdateMenuMessageData{}, handleUpdateRolemenuMessage)
pubsub.AddHandler("role_commands_evict_menus", func(evt *pubsub.Event) {
ClearRolemenuCache(evt.TargetGuildInt)
recentMenusTracker.GuildReset(evt.TargetGuildInt)
}, nil)
}
func CmdFuncRole(parsed *dcmd.Data) (interface{}, error) {
if parsed.Args[0].Value == nil {
return CmdFuncListCommands(parsed)
}
given, err := FindToggleRole(parsed.Context(), parsed.GuildData.MS, parsed.Args[0].Str())
if err != nil {
if err == sql.ErrNoRows {
resp, err := CmdFuncListCommands(parsed)
if v, ok := resp.(string); ok {
return "Role not found, " + v, err
}
return resp, err
}
return HumanizeAssignError(parsed.GuildData.GS, err)
}
go analytics.RecordActiveUnit(parsed.GuildData.GS.ID, &Plugin{}, "cmd_used")
if given {
return "Gave you the role!", nil
}
return "Took away your role!", nil
}
func HumanizeAssignError(guild *dstate.GuildSet, err error) (string, error) {
if IsRoleCommandError(err) {
if roleError, ok := err.(*RoleError); ok {
return roleError.PrettyError(guild.Roles), nil
}
return err.Error(), nil
}
if code, msg := common.DiscordError(err); code != 0 {
if code == discordgo.ErrCodeMissingPermissions {
return "The bot is below the role, contact the server admin", err
} else if code == discordgo.ErrCodeMissingAccess {
return "Bot does not have enough permissions to assign you this role, contact the server admin", err
}
return "An error occurred while assigning the role: " + msg, err
}
return "An error occurred while assigning the role", err
}
func CmdFuncListCommands(parsed *dcmd.Data) (interface{}, error) {
_, grouped, ungrouped, err := GetAllRoleCommandsSorted(parsed.Context(), parsed.GuildData.GS.ID)
if err != nil {
return "Failed retrieving role commands", err
}
output := "Here is a list of available roles:\n"
didListCommands := false
for group, cmds := range grouped {
if len(cmds) < 1 {
continue
}
didListCommands = true
output += "**" + group.Name + "**\n"
output += StringCommands(cmds)
output += "\n"
}
if len(ungrouped) > 0 {
didListCommands = true
output += "**Ungrouped roles**\n"
output += StringCommands(ungrouped)
}
if !didListCommands {
output += "No role commands (self assignable roles) set up. You can set them up in the control panel."
}
return output, nil
}
// StringCommands pretty formats a bunch of commands into a string
func StringCommands(cmds []*models.RoleCommand) string {
stringedCommands := make([]int64, 0, len(cmds))
output := "```\n"
for _, cmd := range cmds {
if common.ContainsInt64Slice(stringedCommands, cmd.Role) {
continue
}
output += cmd.Name
// Check for duplicate roles
for _, cmd2 := range cmds {
if cmd.Role == cmd2.Role && cmd.Name != cmd2.Name {
output += "/ " + cmd2.Name
}
}
output += "\n"
stringedCommands = append(stringedCommands, cmd.Role)
}
return output + "```\n"
}
func handleUpdateRolemenuMessage(evt *schEvtsModels.ScheduledEvent, data interface{}) (retry bool, err error) {
dataCast := data.(*ScheduledEventUpdateMenuMessageData)
fullMenu, err := FindRolemenuFull(context.Background(), dataCast.MessageID, dataCast.GuildID)
if err != nil {
return false, err
}
err = UpdateRoleMenuMessage(context.Background(), fullMenu)
if err != nil {
return false, err
}
return false, nil
}
func handleRemoveMemberRole(evt *schEvtsModels.ScheduledEvent, data interface{}) (retry bool, err error) {
| dataCast := data.(*ScheduledMemberRoleRemoveData)
err = common.BotSession.GuildMemberRoleRemove(dataCast.GuildID, dataCast.UserID, dataCast.RoleID)
if err != nil {
return scheduledevents2.CheckDiscordErrRetry(err), err
}
// remove the reaction
menus, err := models.RoleMenus(
qm.Where("role_group_id = ? AND guild_id =?", dataCast.GroupID, dataCast.GuildID),
qm.OrderBy("message_id desc"),
qm.Limit(10),
qm.Load("RoleMenuOptions.RoleCommand")).AllG(context.Background())
if err != nil {
return false, err
}
OUTER:
for _, v := range menus {
for _, opt := range v.R.RoleMenuOptions {
if opt.R.RoleCommand.Role == dataCast.RoleID { | identifier_body |
|
lib.rs | ) -> Self::IntoIter {
BorrowedVectorIterator {
vector: &self,
index: -1isize as usize,
index_back: self.size,
}
}
}
impl<'a, T> IntoIterator for &'a mut Vector<T> {
type Item = &'a mut T;
type IntoIter = BorrowedVectorIteratorMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
let size = self.size;
BorrowedVectorIteratorMut {
vector: self,
index: -1isize as usize,
index_back: size,
}
}
}
impl<T> FromIterator<T> for Vector<T> {
fn from_iter<A: IntoIterator<Item = T>>(iter: A) -> Self {
let iter = iter.into_iter();
let (min, _) = iter.size_hint();
let mut vec = Vector::with_capacity(min);
for item in iter {
vec.push(item);
}
vec
}
}
impl<T> Drop for Vector<T> {
fn drop(&mut self) {
//Outside the loop to handle zero size types
self.clear();
if let Some(ptr) = self.data {
let ptr = ptr.as_ptr();
let layout = Layout::array::<T>(self.capacity)
.expect("Cannot recreate layout. Has capacity been changed?");
//Safety: Capacity is only changed on reallocation, pointer is trusted
// and iterators return to vectors for deallocation.
unsafe { alloc::dealloc(ptr as *mut u8, layout) }
}
}
}
impl<T> Vector<T> {
///Creates a new vector. Does not allocate till it's needed.
pub fn new() -> Self {
let capacity = if mem::size_of::<T>() == 0 {
usize::MAX
} else {
0
};
Vector {
data: None,
size: 0,
capacity,
}
}
///Creates a new vector with a preallocated buffer with space for `cap` elements.
pub fn with_capacity(cap: usize) -> Self {
let mut vec = Vector::new();
if mem::size_of::<T>() != 0 {
vec.reserve(cap);
}
vec
}
///Checks if the vector has no elements in it. Does not check if there is an allocated buffer or not.
pub fn is_empty(&self) -> bool {
self.size == 0
}
///Returns the amount of elements stored in the vector.
pub fn len(&self) -> usize {
self.size
}
///Allocates a new buffer for the vector of specified size.
///
/// Panics if `new_cap` is smaller than current size or overflows a `usize`. Has O(n) complexity.
fn reserve(&mut self, new_cap: usize) {
assert_ne!(
mem::size_of::<T>(),
0,
"Vector currently doesn't support storing 0 sized types"
);
let layout = Layout::array::<T>(new_cap).expect("Overflow");
//Safety: Layout is type and capacity checked.
let new_ptr = unsafe { alloc::alloc(layout) as *mut T };
assert!(
new_cap >= self.size,
"New capacity can't contain current vector"
);
assert!(!new_ptr.is_null());
let new_data = NonNull::new(new_ptr);
if let Some(old_ptr) = self.data {
unsafe {
//Safety: The new allocation is a seperate allocation, so the copy is guaranteed to not overlap.
ptr::copy_nonoverlapping(old_ptr.as_ptr(), new_ptr, self.size);
//Safety: The pointer is only changed here in allocation.
alloc::dealloc(
old_ptr.as_ptr() as *mut u8,
Layout::array::<T>(self.capacity)
.expect("Cannot recreate layout? Has capacity been edited?"),
);
}
}
self.data = new_data;
self.capacity = new_cap;
}
///Allocates a new buffer for the vector that is larger by `additional` elements.
///
/// Panics if `additional` causes it to overflow a `usize`. Has O(n) complexity.
pub fn reserve_additional(&mut self, additional: usize) {
if mem::size_of::<T>() == 0 {
return;
}
let new_cap = self
.capacity
.checked_add(additional)
.expect("New size overflowed usize");
new_cap
.checked_mul(mem::size_of::<T>())
.expect("New size overflowed usize");
self.reserve(new_cap);
}
///Inserts an element at the back of the vector.
///
/// Panics if the length of the vector is equal to usize::MAX. Has complexity O(1).
pub fn push(&mut self, elem: T) {
if self.data.is_none() && mem::size_of::<T>() != 0 {
self.reserve(2);
} else if self.size == self.capacity {
if self.capacity == usize::MAX {
panic!("Overflow");
}
self.reserve(
(self.capacity as f64 * GROWTH_RATE)
.ceil()
.min(usize::MAX as f64) as usize,
);
}
assert!(self.size < self.capacity);
assert!(self.data.is_some() || (mem::size_of::<T>() == 0));
//Safety: Length is checked. If the allocation was already full it is reallocated above.
unsafe {
self.as_ptr_mut()
.expect("Above assertion failed?")
.add(self.size)
.write(elem)
};
self.size += 1;
}
///Gets a reference to the element at index's position.
///
/// Returns `None` if index is greater than the length of the vector. Has complexity O(1).
pub fn get(&self, idx: usize) -> Option<&T> |
///Gets a mutable reference to the element at index's position.
///
/// Returns `None` if index is greater than the length of the vector. Has complexity O(1).
pub fn get_mut(&mut self, idx: usize) -> Option<&mut T> {
if idx >= self.size {
return None;
}
//Safety: Index is already checked.
unsafe { self.as_ptr_mut()?.add(idx).as_mut() }
}
///Inserts element in vector at index, moving everything after it to the right.
/// Will reallocate if length equals capacity.
///
/// Panics if the vector's length will overflow `usize::MAX`. Has O(n) complexity.
pub fn insert(&mut self, idx: usize, elem: T) {
if idx == self.size {
return self.push(elem);
}
if self.size == self.capacity {
if self.capacity == usize::MAX {
panic!("Overflow");
}
self.reserve(
(self.capacity as f64 * GROWTH_RATE)
.ceil()
.min(usize::MAX as f64) as usize,
);
} else if self.data.is_none() && mem::size_of::<T>() != 0 {
self.reserve(2);
}
assert!(self.size < self.capacity);
assert!(self.data.is_some() || mem::size_of::<T>() == 0);
let data_ptr = self
.as_ptr_mut()
.expect("Vector's data pointer is null despite being just checked?");
for i in (idx..self.size).rev() {
//Safety: Copies element by element within the size of the vector's allocation.
// `self.size` keeps this within `self.size`.
unsafe { data_ptr.add(i + 1).write(data_ptr.add(i).read()) };
}
//Safety: The element that was here has been moved, this is guaranteed in bounds.
unsafe { data_ptr.add(idx).write(elem) };
self.size += 1;
}
///Removes the last element in the vector
///
/// Returns `None` if the vector is empty. Has O(1) complexity.
pub fn pop(&mut self) -> Option<T> {
if self.size == 0 {
return None;
}
self.size -= 1;
let data_ptr = self.as_ptr_mut()?;
//Safety: Existing pointer is trusted.
Some(unsafe { data_ptr.add(self.size).read() })
}
///Removes the item at index, moving everything after that by one step to the left.
/// If you're removing several elements, consider using the `retain` function for O(n)
/// complexity instead of O(n²)
///
/// Panics if index >= to the vector's length. Has O(n) complexity.
pub fn remove(&mut self, idx: usize) -> T {
if idx >= self.size {
panic!("Index was out of bounds!");
}
if idx == self.size {
return self.pop().expect("Vector is empty");
}
if self.size == 0 || (self.data.is_none() && mem::size_of::<T | {
if idx >= self.size {
return None;
}
//Safety: Index is already checked.
unsafe { self.as_ptr()?.add(idx).as_ref() }
} | identifier_body |
lib.rs | (self) -> Self::IntoIter {
BorrowedVectorIterator {
vector: &self,
index: -1isize as usize,
index_back: self.size,
}
}
}
impl<'a, T> IntoIterator for &'a mut Vector<T> {
type Item = &'a mut T;
type IntoIter = BorrowedVectorIteratorMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
let size = self.size;
BorrowedVectorIteratorMut {
vector: self,
index: -1isize as usize,
index_back: size,
}
}
}
impl<T> FromIterator<T> for Vector<T> {
fn from_iter<A: IntoIterator<Item = T>>(iter: A) -> Self {
let iter = iter.into_iter();
let (min, _) = iter.size_hint();
let mut vec = Vector::with_capacity(min);
for item in iter {
vec.push(item);
}
vec
}
}
impl<T> Drop for Vector<T> {
fn drop(&mut self) {
//Outside the loop to handle zero size types
self.clear();
if let Some(ptr) = self.data {
let ptr = ptr.as_ptr();
let layout = Layout::array::<T>(self.capacity)
.expect("Cannot recreate layout. Has capacity been changed?");
//Safety: Capacity is only changed on reallocation, pointer is trusted
// and iterators return to vectors for deallocation.
unsafe { alloc::dealloc(ptr as *mut u8, layout) }
}
}
}
impl<T> Vector<T> {
///Creates a new vector. Does not allocate till it's needed.
pub fn new() -> Self {
let capacity = if mem::size_of::<T>() == 0 {
usize::MAX
} else {
0
};
Vector {
data: None,
size: 0,
capacity,
}
}
///Creates a new vector with a preallocated buffer with space for `cap` elements.
pub fn with_capacity(cap: usize) -> Self {
let mut vec = Vector::new();
if mem::size_of::<T>() != 0 {
vec.reserve(cap);
}
vec
}
///Checks if the vector has no elements in it. Does not check if there is an allocated buffer or not.
pub fn is_empty(&self) -> bool {
self.size == 0
}
///Returns the amount of elements stored in the vector.
pub fn len(&self) -> usize {
self.size
}
///Allocates a new buffer for the vector of specified size.
///
/// Panics if `new_cap` is smaller than current size or overflows a `usize`. Has O(n) complexity.
fn reserve(&mut self, new_cap: usize) {
assert_ne!(
mem::size_of::<T>(),
0,
"Vector currently doesn't support storing 0 sized types"
);
let layout = Layout::array::<T>(new_cap).expect("Overflow");
//Safety: Layout is type and capacity checked.
let new_ptr = unsafe { alloc::alloc(layout) as *mut T };
assert!(
new_cap >= self.size,
"New capacity can't contain current vector"
);
assert!(!new_ptr.is_null());
let new_data = NonNull::new(new_ptr);
if let Some(old_ptr) = self.data {
unsafe {
//Safety: The new allocation is a seperate allocation, so the copy is guaranteed to not overlap.
ptr::copy_nonoverlapping(old_ptr.as_ptr(), new_ptr, self.size);
//Safety: The pointer is only changed here in allocation.
alloc::dealloc(
old_ptr.as_ptr() as *mut u8,
Layout::array::<T>(self.capacity)
.expect("Cannot recreate layout? Has capacity been edited?"),
);
}
}
self.data = new_data;
self.capacity = new_cap;
}
///Allocates a new buffer for the vector that is larger by `additional` elements.
///
/// Panics if `additional` causes it to overflow a `usize`. Has O(n) complexity.
pub fn reserve_additional(&mut self, additional: usize) {
if mem::size_of::<T>() == 0 {
return;
}
let new_cap = self
.capacity
.checked_add(additional)
.expect("New size overflowed usize");
new_cap
.checked_mul(mem::size_of::<T>())
.expect("New size overflowed usize");
self.reserve(new_cap);
}
///Inserts an element at the back of the vector.
///
/// Panics if the length of the vector is equal to usize::MAX. Has complexity O(1).
pub fn push(&mut self, elem: T) {
if self.data.is_none() && mem::size_of::<T>() != 0 {
self.reserve(2);
} else if self.size == self.capacity {
if self.capacity == usize::MAX {
panic!("Overflow");
}
self.reserve(
(self.capacity as f64 * GROWTH_RATE)
.ceil()
.min(usize::MAX as f64) as usize,
);
}
assert!(self.size < self.capacity);
assert!(self.data.is_some() || (mem::size_of::<T>() == 0));
//Safety: Length is checked. If the allocation was already full it is reallocated above.
unsafe {
self.as_ptr_mut()
.expect("Above assertion failed?")
.add(self.size)
.write(elem)
};
self.size += 1;
}
///Gets a reference to the element at index's position.
///
/// Returns `None` if index is greater than the length of the vector. Has complexity O(1).
pub fn get(&self, idx: usize) -> Option<&T> {
if idx >= self.size {
return None;
}
//Safety: Index is already checked.
unsafe { self.as_ptr()?.add(idx).as_ref() }
}
///Gets a mutable reference to the element at index's position.
///
/// Returns `None` if index is greater than the length of the vector. Has complexity O(1).
pub fn get_mut(&mut self, idx: usize) -> Option<&mut T> {
if idx >= self.size {
return None;
}
//Safety: Index is already checked.
unsafe { self.as_ptr_mut()?.add(idx).as_mut() }
}
///Inserts element in vector at index, moving everything after it to the right.
/// Will reallocate if length equals capacity.
///
/// Panics if the vector's length will overflow `usize::MAX`. Has O(n) complexity.
pub fn | (&mut self, idx: usize, elem: T) {
if idx == self.size {
return self.push(elem);
}
if self.size == self.capacity {
if self.capacity == usize::MAX {
panic!("Overflow");
}
self.reserve(
(self.capacity as f64 * GROWTH_RATE)
.ceil()
.min(usize::MAX as f64) as usize,
);
} else if self.data.is_none() && mem::size_of::<T>() != 0 {
self.reserve(2);
}
assert!(self.size < self.capacity);
assert!(self.data.is_some() || mem::size_of::<T>() == 0);
let data_ptr = self
.as_ptr_mut()
.expect("Vector's data pointer is null despite being just checked?");
for i in (idx..self.size).rev() {
//Safety: Copies element by element within the size of the vector's allocation.
// `self.size` keeps this within `self.size`.
unsafe { data_ptr.add(i + 1).write(data_ptr.add(i).read()) };
}
//Safety: The element that was here has been moved, this is guaranteed in bounds.
unsafe { data_ptr.add(idx).write(elem) };
self.size += 1;
}
///Removes the last element in the vector
///
/// Returns `None` if the vector is empty. Has O(1) complexity.
pub fn pop(&mut self) -> Option<T> {
if self.size == 0 {
return None;
}
self.size -= 1;
let data_ptr = self.as_ptr_mut()?;
//Safety: Existing pointer is trusted.
Some(unsafe { data_ptr.add(self.size).read() })
}
///Removes the item at index, moving everything after that by one step to the left.
/// If you're removing several elements, consider using the `retain` function for O(n)
/// complexity instead of O(n²)
///
/// Panics if index >= to the vector's length. Has O(n) complexity.
pub fn remove(&mut self, idx: usize) -> T {
if idx >= self.size {
panic!("Index was out of bounds!");
}
if idx == self.size {
return self.pop().expect("Vector is empty");
}
if self.size == 0 || (self.data.is_none() && mem::size_of::<T | insert | identifier_name |
lib.rs | pub fn reserve_additional(&mut self, additional: usize) {
if mem::size_of::<T>() == 0 {
return;
}
let new_cap = self
.capacity
.checked_add(additional)
.expect("New size overflowed usize");
new_cap
.checked_mul(mem::size_of::<T>())
.expect("New size overflowed usize");
self.reserve(new_cap);
}
///Inserts an element at the back of the vector.
///
/// Panics if the length of the vector is equal to usize::MAX. Has complexity O(1).
pub fn push(&mut self, elem: T) {
if self.data.is_none() && mem::size_of::<T>() != 0 {
self.reserve(2);
} else if self.size == self.capacity {
if self.capacity == usize::MAX {
panic!("Overflow");
}
self.reserve(
(self.capacity as f64 * GROWTH_RATE)
.ceil()
.min(usize::MAX as f64) as usize,
);
}
assert!(self.size < self.capacity);
assert!(self.data.is_some() || (mem::size_of::<T>() == 0));
//Safety: Length is checked. If the allocation was already full it is reallocated above.
unsafe {
self.as_ptr_mut()
.expect("Above assertion failed?")
.add(self.size)
.write(elem)
};
self.size += 1;
}
///Gets a reference to the element at index's position.
///
/// Returns `None` if index is greater than the length of the vector. Has complexity O(1).
pub fn get(&self, idx: usize) -> Option<&T> {
if idx >= self.size {
return None;
}
//Safety: Index is already checked.
unsafe { self.as_ptr()?.add(idx).as_ref() }
}
///Gets a mutable reference to the element at index's position.
///
/// Returns `None` if index is greater than the length of the vector. Has complexity O(1).
pub fn get_mut(&mut self, idx: usize) -> Option<&mut T> {
if idx >= self.size {
return None;
}
//Safety: Index is already checked.
unsafe { self.as_ptr_mut()?.add(idx).as_mut() }
}
///Inserts element in vector at index, moving everything after it to the right.
/// Will reallocate if length equals capacity.
///
/// Panics if the vector's length will overflow `usize::MAX`. Has O(n) complexity.
pub fn insert(&mut self, idx: usize, elem: T) {
if idx == self.size {
return self.push(elem);
}
if self.size == self.capacity {
if self.capacity == usize::MAX {
panic!("Overflow");
}
self.reserve(
(self.capacity as f64 * GROWTH_RATE)
.ceil()
.min(usize::MAX as f64) as usize,
);
} else if self.data.is_none() && mem::size_of::<T>() != 0 {
self.reserve(2);
}
assert!(self.size < self.capacity);
assert!(self.data.is_some() || mem::size_of::<T>() == 0);
let data_ptr = self
.as_ptr_mut()
.expect("Vector's data pointer is null despite being just checked?");
for i in (idx..self.size).rev() {
//Safety: Copies element by element within the size of the vector's allocation.
// `self.size` keeps this within `self.size`.
unsafe { data_ptr.add(i + 1).write(data_ptr.add(i).read()) };
}
//Safety: The element that was here has been moved, this is guaranteed in bounds.
unsafe { data_ptr.add(idx).write(elem) };
self.size += 1;
}
///Removes the last element in the vector
///
/// Returns `None` if the vector is empty. Has O(1) complexity.
pub fn pop(&mut self) -> Option<T> {
if self.size == 0 {
return None;
}
self.size -= 1;
let data_ptr = self.as_ptr_mut()?;
//Safety: Existing pointer is trusted.
Some(unsafe { data_ptr.add(self.size).read() })
}
///Removes the item at index, moving everything after that by one step to the left.
/// If you're removing several elements, consider using the `retain` function for O(n)
/// complexity instead of O(n²)
///
/// Panics if index >= to the vector's length. Has O(n) complexity.
pub fn remove(&mut self, idx: usize) -> T {
if idx >= self.size {
panic!("Index was out of bounds!");
}
if idx == self.size {
return self.pop().expect("Vector is empty");
}
if self.size == 0 || (self.data.is_none() && mem::size_of::<T>() != 0) {
panic!("Vector is empty");
}
let data_ptr = self.as_ptr_mut().expect("Check above was incorrect?");
//Safety: Index is checked and pointer is trusted.
let ret = unsafe { data_ptr.add(idx).read() };
for i in idx..(self.size - 1) {
//Safety: Copies element by element within the size of the vector's allocation.
// `self.size - 1 + 1` keeps this within `self.size`.
unsafe { data_ptr.add(i).write(data_ptr.add(i + 1).read()) };
}
self.size -= 1;
ret
}
///Removes every element in the vector.
///
/// Has O(n) complexity.
pub fn clear(&mut self) {
while !self.is_empty() {
self.pop();
}
}
///Borrows the vector's allocation as an immutable slice.
///
/// Has complexity O(1).
pub fn as_slice(&self) -> &[T] {
if self.data.is_some() || mem::size_of::<T>() == 0 {
//Safety: Or existing pointer and size are trusted as they can't (safely)
// be set from outside.
unsafe {
ptr::slice_from_raw_parts(
self.as_ptr().expect("Cannot get pointer to create slice"),
self.size,
)
.as_ref()
.expect("Vector's internal NonNull pointer was null?")
}
} else {
assert!(self.size == 0);
&[]
}
}
///Borrows the vector's allocation as a mutable slice.
///
/// Has complexity O(1).
pub fn as_slice_mut(&mut self) -> &mut [T] {
if self.data.is_some() || mem::size_of::<T>() == 0 {
//Safety: Or existing pointer and size are trusted as they can't (safely)
// be set from outside.
unsafe {
ptr::slice_from_raw_parts_mut(
self.as_ptr_mut()
.expect("Cannot get pointer to create slice"),
self.size,
)
.as_mut()
.expect("Vector's internal NonNull pointer was null?")
}
} else {
assert!(self.size == 0);
&mut []
}
}
///Sets the length of the vector, within the existing capacity.
///
/// Has complexity O(1).
/// # Safety
/// Panics if len is greater than the vector's capacity.
/// Exposes potentially uninitialised memory if len is greater than the vector's length.
pub unsafe fn set_len(&mut self, len: usize) {
if len > self.capacity {
panic!();
}
self.size = len;
}
///Returns an iterator over borrowed elements of the vector.
///
/// Has complexity O(1).
pub fn iter(&self) -> BorrowedVectorIterator<'_, T> {
(&self).into_iter()
}
///Returns an iterator over mutably borrowed elements of the vector.
///
/// Has complexity O(1).
pub fn iter_mut(&mut self) -> BorrowedVectorIteratorMut<'_, T> {
(self).into_iter()
}
///Returns the pointer to the allocation of the Vector or
/// `None` if nothing has been allocated yet.
///
/// Has complexity O(1).
pub fn as_ptr(&self) -> Option<*const T> {
if mem::size_of::<T>() == 0 {
Some(self as *const Vector<T> as *const T)
} else {
self.data.map(|p| p.as_ptr() as *const _)
}
}
///Returns the pointer to the allocation of the Vector or
/// `None` if nothing has been allocated yet.
///
/// Has complexity O(1).
pub fn as_ptr_mut(&mut self) -> Option<*mut T> {
if mem::size_of::<T>() == 0 {
Some(self as *mut Vector<T> as *mut T)
} else { |
self.data.map(|p| p.as_ptr())
}
| conditional_block |
|
lib.rs | (self) -> Self::IntoIter {
BorrowedVectorIterator {
vector: &self,
index: -1isize as usize,
index_back: self.size,
}
}
}
impl<'a, T> IntoIterator for &'a mut Vector<T> {
type Item = &'a mut T;
type IntoIter = BorrowedVectorIteratorMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
let size = self.size;
BorrowedVectorIteratorMut {
vector: self,
index: -1isize as usize,
index_back: size,
}
}
}
impl<T> FromIterator<T> for Vector<T> {
fn from_iter<A: IntoIterator<Item = T>>(iter: A) -> Self {
let iter = iter.into_iter();
let (min, _) = iter.size_hint();
let mut vec = Vector::with_capacity(min);
for item in iter {
vec.push(item);
}
vec
}
}
impl<T> Drop for Vector<T> {
fn drop(&mut self) {
//Outside the loop to handle zero size types
self.clear();
if let Some(ptr) = self.data {
let ptr = ptr.as_ptr();
let layout = Layout::array::<T>(self.capacity)
.expect("Cannot recreate layout. Has capacity been changed?");
//Safety: Capacity is only changed on reallocation, pointer is trusted
// and iterators return to vectors for deallocation.
unsafe { alloc::dealloc(ptr as *mut u8, layout) }
}
}
}
impl<T> Vector<T> {
///Creates a new vector. Does not allocate till it's needed.
pub fn new() -> Self {
let capacity = if mem::size_of::<T>() == 0 {
usize::MAX
} else {
0
};
Vector {
data: None,
size: 0,
capacity,
}
}
///Creates a new vector with a preallocated buffer with space for `cap` elements.
pub fn with_capacity(cap: usize) -> Self {
let mut vec = Vector::new();
if mem::size_of::<T>() != 0 {
vec.reserve(cap);
}
vec
}
///Checks if the vector has no elements in it. Does not check if there is an allocated buffer or not.
pub fn is_empty(&self) -> bool {
self.size == 0
}
///Returns the amount of elements stored in the vector.
pub fn len(&self) -> usize {
self.size
}
///Allocates a new buffer for the vector of specified size.
///
/// Panics if `new_cap` is smaller than current size or overflows a `usize`. Has O(n) complexity.
fn reserve(&mut self, new_cap: usize) {
assert_ne!(
mem::size_of::<T>(),
0,
"Vector currently doesn't support storing 0 sized types"
);
let layout = Layout::array::<T>(new_cap).expect("Overflow");
//Safety: Layout is type and capacity checked.
let new_ptr = unsafe { alloc::alloc(layout) as *mut T };
assert!(
new_cap >= self.size,
"New capacity can't contain current vector"
);
assert!(!new_ptr.is_null());
let new_data = NonNull::new(new_ptr);
if let Some(old_ptr) = self.data {
unsafe {
//Safety: The new allocation is a seperate allocation, so the copy is guaranteed to not overlap.
ptr::copy_nonoverlapping(old_ptr.as_ptr(), new_ptr, self.size);
//Safety: The pointer is only changed here in allocation.
alloc::dealloc(
old_ptr.as_ptr() as *mut u8,
Layout::array::<T>(self.capacity)
.expect("Cannot recreate layout? Has capacity been edited?"),
);
}
}
self.data = new_data;
self.capacity = new_cap;
}
///Allocates a new buffer for the vector that is larger by `additional` elements.
///
/// Panics if `additional` causes it to overflow a `usize`. Has O(n) complexity.
pub fn reserve_additional(&mut self, additional: usize) {
if mem::size_of::<T>() == 0 {
return;
}
let new_cap = self
.capacity
.checked_add(additional)
.expect("New size overflowed usize");
new_cap
.checked_mul(mem::size_of::<T>())
.expect("New size overflowed usize");
self.reserve(new_cap);
}
///Inserts an element at the back of the vector.
///
/// Panics if the length of the vector is equal to usize::MAX. Has complexity O(1).
pub fn push(&mut self, elem: T) {
if self.data.is_none() && mem::size_of::<T>() != 0 {
self.reserve(2);
} else if self.size == self.capacity {
if self.capacity == usize::MAX {
panic!("Overflow");
}
self.reserve(
(self.capacity as f64 * GROWTH_RATE)
.ceil()
.min(usize::MAX as f64) as usize,
);
}
assert!(self.size < self.capacity);
assert!(self.data.is_some() || (mem::size_of::<T>() == 0));
//Safety: Length is checked. If the allocation was already full it is reallocated above.
unsafe { | self.as_ptr_mut()
.expect("Above assertion failed?")
.add(self.size)
.write(elem)
};
self.size += 1;
}
///Gets a reference to the element at index's position.
///
/// Returns `None` if index is greater than the length of the vector. Has complexity O(1).
pub fn get(&self, idx: usize) -> Option<&T> {
if idx >= self.size {
return None;
}
//Safety: Index is already checked.
unsafe { self.as_ptr()?.add(idx).as_ref() }
}
///Gets a mutable reference to the element at index's position.
///
/// Returns `None` if index is greater than the length of the vector. Has complexity O(1).
pub fn get_mut(&mut self, idx: usize) -> Option<&mut T> {
if idx >= self.size {
return None;
}
//Safety: Index is already checked.
unsafe { self.as_ptr_mut()?.add(idx).as_mut() }
}
///Inserts element in vector at index, moving everything after it to the right.
/// Will reallocate if length equals capacity.
///
/// Panics if the vector's length will overflow `usize::MAX`. Has O(n) complexity.
pub fn insert(&mut self, idx: usize, elem: T) {
if idx == self.size {
return self.push(elem);
}
if self.size == self.capacity {
if self.capacity == usize::MAX {
panic!("Overflow");
}
self.reserve(
(self.capacity as f64 * GROWTH_RATE)
.ceil()
.min(usize::MAX as f64) as usize,
);
} else if self.data.is_none() && mem::size_of::<T>() != 0 {
self.reserve(2);
}
assert!(self.size < self.capacity);
assert!(self.data.is_some() || mem::size_of::<T>() == 0);
let data_ptr = self
.as_ptr_mut()
.expect("Vector's data pointer is null despite being just checked?");
for i in (idx..self.size).rev() {
//Safety: Copies element by element within the size of the vector's allocation.
// `self.size` keeps this within `self.size`.
unsafe { data_ptr.add(i + 1).write(data_ptr.add(i).read()) };
}
//Safety: The element that was here has been moved, this is guaranteed in bounds.
unsafe { data_ptr.add(idx).write(elem) };
self.size += 1;
}
///Removes the last element in the vector
///
/// Returns `None` if the vector is empty. Has O(1) complexity.
pub fn pop(&mut self) -> Option<T> {
if self.size == 0 {
return None;
}
self.size -= 1;
let data_ptr = self.as_ptr_mut()?;
//Safety: Existing pointer is trusted.
Some(unsafe { data_ptr.add(self.size).read() })
}
///Removes the item at index, moving everything after that by one step to the left.
/// If you're removing several elements, consider using the `retain` function for O(n)
/// complexity instead of O(n²)
///
/// Panics if index >= to the vector's length. Has O(n) complexity.
pub fn remove(&mut self, idx: usize) -> T {
if idx >= self.size {
panic!("Index was out of bounds!");
}
if idx == self.size {
return self.pop().expect("Vector is empty");
}
if self.size == 0 || (self.data.is_none() && mem::size_of::<T>() | random_line_split |
|
data.ts | "job_role": "Operator"
// },
// {
// "id": 38,
// "first_name": "Brenda",
// "last_name": "Perry",
// "email": "[email protected]",
// "gender": "Female",
// "address": "9407 6th Hill",
// "job_role": "Environmental Tech"
// },
// {
// "id": 39,
// "first_name": "Rebecca",
// "last_name": "Fox",
// "email": "[email protected]",
// "gender": "Female",
// "address": "024 Buhler Place",
// "job_role": "Software Consultant"
// },
// {
// "id": 40,
// "first_name": "Richard",
// "last_name": "Lawson",
// "email": "[email protected]",
// "gender": "Male",
// "address": "56 Haas Street",
// "job_role": "Chief Design Engineer"
// },
// {
// "id": 41,
// "first_name": "Heather",
// "last_name": "Harris",
// "email": "[email protected]",
// "gender": "Female",
// "address": "3 Longview Point",
// "job_role": "Systems Administrator II"
// },
// {
// "id": 42,
// "first_name": "Alice",
// "last_name": "Martinez",
// "email": "[email protected]",
// "gender": "Female",
// "address": "4 Melby Way",
// "job_role": "Social Worker"
// },
// {
// "id": 43,
// "first_name": "Russell",
// "last_name": "Collins",
// "email": "[email protected]",
// "gender": "Male",
// "address": "4 Hermina Street",
// "job_role": "Web Developer I"
// },
// {
// "id": 44,
// "first_name": "Mark",
// "last_name": "Patterson",
// "email": "[email protected]",
// "gender": "Male",
// "address": "4949 North Place",
// "job_role": "Engineer I"
// },
// {
// "id": 45,
// "first_name": "Margaret",
// "last_name": "Walker",
// "email": "[email protected]",
// "gender": "Female",
// "address": "60 Rusk Drive",
// "job_role": "VP Sales"
// },
// {
// "id": 46,
// "first_name": "Paul",
// "last_name": "Hunter",
// "email": "[email protected]",
// "gender": "Male",
// "address": "709 Spenser Lane",
// "job_role": "VP Product Management"
// },
// {
// "id": 47,
// "first_name": "Jesse",
// "last_name": "Grant",
// "email": "[email protected]",
// "gender": "Male",
// "address": "57 Fuller Plaza",
// "job_role": "Structural Engineer"
// },
// {
// "id": 48,
// "first_name": "Kelly",
// "last_name": "Fowler",
// "email": "[email protected]",
// "gender": "Female",
// "address": "77 Eagle Crest Place",
// "job_role": "Electrical Engineer"
// },
// {
// "id": 49,
// "first_name": "Christopher",
// "last_name": "Burns",
// "email": "[email protected]",
// "gender": "Male",
// "address": "46 Michigan Place",
// "job_role": "Professor"
// },
// {
// "id": 50,
// "first_name": "Martin",
// "last_name": "Warren",
// "email": "[email protected]",
// "gender": "Male",
// "address": "23697 Ryan Road",
// "job_role": "Recruiter"
// }
// ];
constructor(http: Http) {
this.http = http;
}
login(username, password) {
let json = { username: username, password: password };
return new Promise(resolve => {
// hardcoded login
// if (username == "[email protected]" && password == "Trivento"){
// this.storage.set("username", username);
// this.storage.set("password", password);
// resolve({ success: true, errorMessage: null });
// } else {
// resolve({ success: false, errorMessage: "Inloggen mislukt. Gebruikersnaam of wachtwoord is niet correct." });
// }
this.http.post("https://lutsoft.nl/trivento/api/login/", JSON.stringify(json)).subscribe(response => {
let data = response.json();
if (data) {
if (data.hasOwnProperty("success") && data.success == true) {
this.storage.set("username", username);
this.storage.set("password", password);
resolve(data);
} else {
resolve({ success: false, errorMessage: "Inloggen mislukt. " + data["errorMessage"] });
}
} else {
resolve({ success: false, errorMessage: "Inloggen mislukt. Geen gegevens."});
}
}, error => {
resolve({ success: false, errorMessage: "Inloggen mislukt. " + error });
});
});
}
getEmployees() {
// get Employees from local storage. Load from server if there are none
return new Promise(resolve => {
this.storage.get("employees").then(data => {
if (data) {
this.employees = JSON.parse(data);
resolve(this.employees);
} else {
// hardcoded data
// //clone to make it a unique object
// this.employees = this.cloneObject(this.defaultEmployees);
// resolve(this.employees);
this.http.get("https://lutsoft.nl/trivento/api/data/").subscribe(response => {
let data = response.json();
if (data) {
this.employees = data;
resolve(data);
} else {
resolve([]);
}
});
}
});
});
}
setEmployee(alteredEmployee) {
// search for employee and overwrite
for (var employee in this.employees) {
if (employee["id"] == alteredEmployee.id) {
employee = alteredEmployee;
break;
}
}
// save data
return new Promise(resolve => {
this.storage.set("employees", JSON.stringify(this.employees)).then(result => {
if (!result) {
throw new Error("Fout bij opslaan");
}
});
});
}
filterEmployees(fullName, jobRole) | {
return this.employees.filter(employee => {
// search fullName and filter jobRole
let retVal = true;
let employeeFullName = employee.first_name + employee.last_name;
if(fullName){
if(employeeFullName.toLowerCase().indexOf(fullName.toLowerCase()) == -1){
retVal = false;
}
}
if(jobRole){
if(employee.job_role.toLowerCase().indexOf(jobRole.toLowerCase()) == -1 ){
retVal = false;
} else if(fullName && !retVal){
retVal = false;
} else {
retVal = true;
}
}
return retVal; | identifier_body |
|
data.ts | "email": "[email protected]",
// "gender": "Female",
// "address": "15125 Utah Circle",
// "job_role": "Structural Engineer"
// },
// {
// "id": 37,
// "first_name": "Wayne",
// "last_name": "Martinez",
// "email": "[email protected]",
// "gender": "Male",
// "address": "6056 Clyde Gallagher Circle",
// "job_role": "Operator"
// },
// {
// "id": 38,
// "first_name": "Brenda",
// "last_name": "Perry",
// "email": "[email protected]",
// "gender": "Female",
// "address": "9407 6th Hill",
// "job_role": "Environmental Tech"
// },
// {
// "id": 39,
// "first_name": "Rebecca",
// "last_name": "Fox",
// "email": "[email protected]",
// "gender": "Female",
// "address": "024 Buhler Place",
// "job_role": "Software Consultant"
// },
// {
// "id": 40,
// "first_name": "Richard",
// "last_name": "Lawson",
// "email": "[email protected]",
// "gender": "Male",
// "address": "56 Haas Street",
// "job_role": "Chief Design Engineer"
// },
// {
// "id": 41,
// "first_name": "Heather",
// "last_name": "Harris",
// "email": "[email protected]",
// "gender": "Female",
// "address": "3 Longview Point",
// "job_role": "Systems Administrator II"
// },
// {
// "id": 42,
// "first_name": "Alice",
// "last_name": "Martinez",
// "email": "[email protected]",
// "gender": "Female",
// "address": "4 Melby Way",
// "job_role": "Social Worker"
// },
// {
// "id": 43,
// "first_name": "Russell",
// "last_name": "Collins",
// "email": "[email protected]",
// "gender": "Male",
// "address": "4 Hermina Street",
// "job_role": "Web Developer I"
// },
// {
// "id": 44,
// "first_name": "Mark",
// "last_name": "Patterson",
// "email": "[email protected]",
// "gender": "Male",
// "address": "4949 North Place",
// "job_role": "Engineer I"
// },
// {
// "id": 45,
// "first_name": "Margaret",
// "last_name": "Walker",
// "email": "[email protected]",
// "gender": "Female",
// "address": "60 Rusk Drive",
// "job_role": "VP Sales"
// },
// {
// "id": 46,
// "first_name": "Paul",
// "last_name": "Hunter",
// "email": "[email protected]",
// "gender": "Male",
// "address": "709 Spenser Lane",
// "job_role": "VP Product Management"
// },
// {
// "id": 47,
// "first_name": "Jesse",
// "last_name": "Grant",
// "email": "[email protected]",
// "gender": "Male",
// "address": "57 Fuller Plaza",
// "job_role": "Structural Engineer"
// },
// {
// "id": 48,
// "first_name": "Kelly",
// "last_name": "Fowler",
// "email": "[email protected]",
// "gender": "Female",
// "address": "77 Eagle Crest Place",
// "job_role": "Electrical Engineer"
// },
// {
// "id": 49,
// "first_name": "Christopher",
// "last_name": "Burns",
// "email": "[email protected]",
// "gender": "Male",
// "address": "46 Michigan Place",
// "job_role": "Professor"
// },
// {
// "id": 50,
// "first_name": "Martin",
// "last_name": "Warren",
// "email": "[email protected]",
// "gender": "Male",
// "address": "23697 Ryan Road",
// "job_role": "Recruiter"
// }
// ];
constructor(http: Http) {
this.http = http;
}
login(username, password) {
let json = { username: username, password: password };
return new Promise(resolve => {
// hardcoded login
// if (username == "[email protected]" && password == "Trivento"){
// this.storage.set("username", username);
// this.storage.set("password", password);
// resolve({ success: true, errorMessage: null });
// } else {
// resolve({ success: false, errorMessage: "Inloggen mislukt. Gebruikersnaam of wachtwoord is niet correct." });
// }
this.http.post("https://lutsoft.nl/trivento/api/login/", JSON.stringify(json)).subscribe(response => {
let data = response.json();
if (data) {
if (data.hasOwnProperty("success") && data.success == true) {
this.storage.set("username", username);
this.storage.set("password", password);
resolve(data);
} else {
resolve({ success: false, errorMessage: "Inloggen mislukt. " + data["errorMessage"] });
}
} else {
resolve({ success: false, errorMessage: "Inloggen mislukt. Geen gegevens."});
}
}, error => {
resolve({ success: false, errorMessage: "Inloggen mislukt. " + error });
});
});
}
getEmployees() {
// get Employees from local storage. Load from server if there are none
return new Promise(resolve => {
this.storage.get("employees").then(data => {
if (data) {
this.employees = JSON.parse(data);
resolve(this.employees);
} else {
// hardcoded data
// //clone to make it a unique object
// this.employees = this.cloneObject(this.defaultEmployees);
// resolve(this.employees);
this.http.get("https://lutsoft.nl/trivento/api/data/").subscribe(response => {
let data = response.json();
if (data) {
this.employees = data;
resolve(data);
} else {
resolve([]);
}
});
}
});
});
}
setEmployee(alteredEmployee) {
// search for employee and overwrite
for (var employee in this.employees) {
if (employee["id"] == alteredEmployee.id) {
employee = alteredEmployee;
break;
}
}
// save data
return new Promise(resolve => {
this.storage.set("employees", JSON.stringify(this.employees)).then(result => {
if (!result) {
throw new Error("Fout bij opslaan");
}
});
});
}
| filterEmployees | identifier_name |
|
data.ts | 6 Briar Crest Place",
// "job_role": "Food Chemist"
// },
// {
// "id": 33,
// "first_name": "Christopher",
// "last_name": "Reed",
// "email": "[email protected]",
// "gender": "Male",
// "address": "19798 Lakewood Gardens Avenue",
// "job_role": "Media Manager III"
// },
// {
// "id": 34,
// "first_name": "Matthew",
// "last_name": "Ford",
// "email": "[email protected]",
// "gender": "Male",
// "address": "5022 Valley Edge Center",
// "job_role": "Paralegal"
// },
// {
// "id": 35,
// "first_name": "Nancy",
// "last_name": "Alexander",
// "email": "[email protected]",
// "gender": "Female",
// "address": "81924 Raven Terrace",
// "job_role": "Community Outreach Specialist"
// },
// {
// "id": 36,
// "first_name": "Emily",
// "last_name": "Gray",
// "email": "[email protected]",
// "gender": "Female",
// "address": "15125 Utah Circle",
// "job_role": "Structural Engineer"
// },
// {
// "id": 37,
// "first_name": "Wayne",
// "last_name": "Martinez",
// "email": "[email protected]",
// "gender": "Male",
// "address": "6056 Clyde Gallagher Circle",
// "job_role": "Operator"
// },
// {
// "id": 38,
// "first_name": "Brenda",
// "last_name": "Perry",
// "email": "[email protected]",
// "gender": "Female",
// "address": "9407 6th Hill",
// "job_role": "Environmental Tech"
// },
// {
// "id": 39,
// "first_name": "Rebecca",
// "last_name": "Fox",
// "email": "[email protected]",
// "gender": "Female",
// "address": "024 Buhler Place",
// "job_role": "Software Consultant"
// },
// {
// "id": 40,
// "first_name": "Richard",
// "last_name": "Lawson",
// "email": "[email protected]",
// "gender": "Male",
// "address": "56 Haas Street",
// "job_role": "Chief Design Engineer"
// },
// {
// "id": 41,
// "first_name": "Heather",
// "last_name": "Harris",
// "email": "[email protected]",
// "gender": "Female",
// "address": "3 Longview Point",
// "job_role": "Systems Administrator II"
// },
// {
// "id": 42,
// "first_name": "Alice",
// "last_name": "Martinez",
// "email": "[email protected]",
// "gender": "Female",
// "address": "4 Melby Way",
// "job_role": "Social Worker"
// },
// {
// "id": 43,
// "first_name": "Russell",
// "last_name": "Collins",
// "email": "[email protected]",
// "gender": "Male",
// "address": "4 Hermina Street",
// "job_role": "Web Developer I"
// },
// {
// "id": 44,
// "first_name": "Mark",
// "last_name": "Patterson",
// "email": "[email protected]",
// "gender": "Male",
// "address": "4949 North Place",
// "job_role": "Engineer I"
// },
// {
// "id": 45,
// "first_name": "Margaret",
// "last_name": "Walker",
// "email": "[email protected]",
// "gender": "Female",
// "address": "60 Rusk Drive",
// "job_role": "VP Sales"
// },
// {
// "id": 46,
// "first_name": "Paul",
// "last_name": "Hunter",
// "email": "[email protected]",
// "gender": "Male",
// "address": "709 Spenser Lane",
// "job_role": "VP Product Management"
// },
// {
// "id": 47,
// "first_name": "Jesse",
// "last_name": "Grant",
// "email": "[email protected]",
// "gender": "Male",
// "address": "57 Fuller Plaza",
// "job_role": "Structural Engineer"
// },
// {
// "id": 48,
// "first_name": "Kelly",
// "last_name": "Fowler",
// "email": "[email protected]",
// "gender": "Female",
// "address": "77 Eagle Crest Place",
// "job_role": "Electrical Engineer"
// },
// {
// "id": 49,
// "first_name": "Christopher",
// "last_name": "Burns",
// "email": "[email protected]",
// "gender": "Male",
// "address": "46 Michigan Place",
// "job_role": "Professor"
// },
// {
// "id": 50,
// "first_name": "Martin",
// "last_name": "Warren",
// "email": "[email protected]",
// "gender": "Male",
// "address": "23697 Ryan Road",
// "job_role": "Recruiter"
// }
// ];
constructor(http: Http) {
this.http = http;
}
login(username, password) {
let json = { username: username, password: password };
return new Promise(resolve => {
// hardcoded login
// if (username == "[email protected]" && password == "Trivento"){
// this.storage.set("username", username);
// this.storage.set("password", password);
// resolve({ success: true, errorMessage: null });
// } else {
// resolve({ success: false, errorMessage: "Inloggen mislukt. Gebruikersnaam of wachtwoord is niet correct." });
// }
this.http.post("https://lutsoft.nl/trivento/api/login/", JSON.stringify(json)).subscribe(response => {
let data = response.json();
if (data) {
if (data.hasOwnProperty("success") && data.success == true) {
this.storage.set("username", username);
this.storage.set("password", password);
resolve(data);
} else | {
resolve({ success: false, errorMessage: "Inloggen mislukt. " + data["errorMessage"] });
} | conditional_block |
|
data.ts | "job_role": "Structural Engineer"
// },
// {
// "id": 37,
// "first_name": "Wayne",
// "last_name": "Martinez",
// "email": "[email protected]",
// "gender": "Male",
// "address": "6056 Clyde Gallagher Circle",
// "job_role": "Operator"
// },
// {
// "id": 38,
// "first_name": "Brenda",
// "last_name": "Perry",
// "email": "[email protected]",
// "gender": "Female",
// "address": "9407 6th Hill",
// "job_role": "Environmental Tech"
// },
// {
// "id": 39,
// "first_name": "Rebecca",
// "last_name": "Fox",
// "email": "[email protected]",
// "gender": "Female",
// "address": "024 Buhler Place",
// "job_role": "Software Consultant"
// },
// {
// "id": 40,
// "first_name": "Richard",
// "last_name": "Lawson",
// "email": "[email protected]",
// "gender": "Male",
// "address": "56 Haas Street",
// "job_role": "Chief Design Engineer"
// },
// {
// "id": 41,
// "first_name": "Heather",
// "last_name": "Harris",
// "email": "[email protected]",
// "gender": "Female",
// "address": "3 Longview Point",
// "job_role": "Systems Administrator II"
// },
// {
// "id": 42,
// "first_name": "Alice",
// "last_name": "Martinez",
// "email": "[email protected]",
// "gender": "Female",
// "address": "4 Melby Way",
// "job_role": "Social Worker"
// },
// {
// "id": 43,
// "first_name": "Russell",
// "last_name": "Collins",
// "email": "[email protected]",
// "gender": "Male",
// "address": "4 Hermina Street",
// "job_role": "Web Developer I"
// },
// {
// "id": 44,
// "first_name": "Mark",
// "last_name": "Patterson",
// "email": "[email protected]",
// "gender": "Male",
// "address": "4949 North Place",
// "job_role": "Engineer I"
// },
// {
// "id": 45,
// "first_name": "Margaret",
// "last_name": "Walker",
// "email": "[email protected]",
// "gender": "Female",
// "address": "60 Rusk Drive",
// "job_role": "VP Sales"
// },
// {
// "id": 46,
// "first_name": "Paul",
// "last_name": "Hunter",
// "email": "[email protected]",
// "gender": "Male",
// "address": "709 Spenser Lane",
// "job_role": "VP Product Management"
// },
// {
// "id": 47,
// "first_name": "Jesse",
// "last_name": "Grant",
// "email": "[email protected]",
// "gender": "Male",
// "address": "57 Fuller Plaza",
// "job_role": "Structural Engineer"
// },
// {
// "id": 48,
// "first_name": "Kelly",
// "last_name": "Fowler",
// "email": "[email protected]",
// "gender": "Female",
// "address": "77 Eagle Crest Place",
// "job_role": "Electrical Engineer"
// },
// {
// "id": 49,
// "first_name": "Christopher",
// "last_name": "Burns",
// "email": "[email protected]",
// "gender": "Male",
// "address": "46 Michigan Place",
// "job_role": "Professor"
// },
// {
// "id": 50,
// "first_name": "Martin",
// "last_name": "Warren",
// "email": "[email protected]",
// "gender": "Male",
// "address": "23697 Ryan Road",
// "job_role": "Recruiter"
// }
// ];
constructor(http: Http) {
this.http = http;
}
login(username, password) {
let json = { username: username, password: password };
return new Promise(resolve => {
// hardcoded login
// if (username == "[email protected]" && password == "Trivento"){
// this.storage.set("username", username);
// this.storage.set("password", password);
// resolve({ success: true, errorMessage: null });
// } else {
// resolve({ success: false, errorMessage: "Inloggen mislukt. Gebruikersnaam of wachtwoord is niet correct." });
// }
this.http.post("https://lutsoft.nl/trivento/api/login/", JSON.stringify(json)).subscribe(response => {
let data = response.json();
if (data) {
if (data.hasOwnProperty("success") && data.success == true) {
this.storage.set("username", username);
this.storage.set("password", password);
resolve(data);
} else {
resolve({ success: false, errorMessage: "Inloggen mislukt. " + data["errorMessage"] });
}
} else {
resolve({ success: false, errorMessage: "Inloggen mislukt. Geen gegevens."});
}
}, error => {
resolve({ success: false, errorMessage: "Inloggen mislukt. " + error });
});
});
}
getEmployees() {
// get Employees from local storage. Load from server if there are none
return new Promise(resolve => {
this.storage.get("employees").then(data => {
if (data) {
this.employees = JSON.parse(data);
resolve(this.employees);
} else {
// hardcoded data
// //clone to make it a unique object
// this.employees = this.cloneObject(this.defaultEmployees);
// resolve(this.employees);
this.http.get("https://lutsoft.nl/trivento/api/data/").subscribe(response => {
let data = response.json();
if (data) {
this.employees = data;
resolve(data);
} else {
resolve([]);
}
});
}
});
});
}
setEmployee(alteredEmployee) {
// search for employee and overwrite
for (var employee in this.employees) {
if (employee["id"] == alteredEmployee.id) {
employee = alteredEmployee;
break;
}
}
// save data
return new Promise(resolve => {
this.storage.set("employees", JSON.stringify(this.employees)).then(result => {
if (!result) {
throw new Error("Fout bij opslaan");
}
});
});
}
| filterEmployees(fullName, jobRole){
return this.employees.filter(employee => {
// search fullName and filter jobRole
let retVal = true;
let employeeFullName = employee.first_name + employee.last_name; | random_line_split |
|
leap_tracker.py | , s, ns):
rospy.loginfo(self.build(s, ns))
def d(self, s, ns):
rospy.logdebug(self.build(s, ns))
def e(self, s, ns):
rospy.logerr(self.build(s, ns))
def c(self, s, ns):
rospy.logwarn(self.build(s, ns))
def build(self, s, ns):
return "\x1B[1m[{}]\x1B[0m {}".format(ns, s)
LOG = Logger()
class LeapServer(Leap.Listener):
"""
@brief Main class to get data from the LEAP Motion controller.
It extends the Leap.Listener class and implements all
the event methods defined in it. For more info, check the LEAP Motion API:
https://developer.leapmotion.com/documentation/skeletal/python/index.html
"""
def on_init(self, controller):
LOG.v("Initialized", "on_init")
self.t = 0 # time var for automated testing
# Initialize empty frame
self.frame = None
# Initialize fingers and hands
self.hand = Leap.Hand()
self.fingers = { FINGER_NAMES[i] : Leap.Finger()
for i in range(5)}
# Initialize joint names for JointState messages
self.joint_names = []
# Initialize node
rospy.init_node('hand_tracker', anonymous=True)
# Initialize publishers
self.js_pub = rospy.Publisher(JS_TOPIC, JointState, queue_size=10)
self.ps_pub = rospy.Publisher(PS_TOPIC, PoseStamped, queue_size=10)
self.ts_pub = rospy.Publisher(TS_TOPIC, TwistStamped, queue_size=10)
def on_connect(self, controller):
LOG.v("Connected", "on_connect")
def on_disconnect(self, controller):
# Note: not dispatched when running in a debugger.
LOG.v("Disconnected", "on_disconnect")
def on_exit(self, controller):
LOG.v("END", "on_exit")
def on_frame(self, controller):
# Get the most recent frame and fill data structures
frame = controller.frame()
selected_finger = None
if not frame.hands.is_empty:
# Get the first hand
hand = frame.hands[0]
self.hand = hand
# Check if the hand has any fingers
fingers = hand.fingers
if not fingers.is_empty:
# Iterate fingers from leftmost to rightmost
for i, finger in enumerate(sorted(fingers, key=lambda f: f.type())):
# Identify thumb and pinky
if finger.type() == Leap.Finger.TYPE_THUMB:
selected_finger = FINGER_NAMES[0]
elif finger.type() == Leap.Finger.TYPE_PINKY:
selected_finger = FINGER_NAMES[-1]
else:
selected_finger = FINGER_NAMES[finger.type()]
# Set selected finger's properties
self.fingers[selected_finger] = finger
# Show data through stdout
self.show_data(['hand'])
def show_data(self, what=['hand'] + FINGER_NAMES):
"""
@brief Shows tracking data on the standard output via
the logging system.
"""
if 'hand' in what:
normal = self.hand.palm_normal
direction = self.hand.direction
position = self.hand.palm_position
LOG.v(("hand:\n" +
"\tpitch: {:>6.2f} | x: {:>6.2f}\n" + \
"\t yaw: {:>6.2f} | y: {:>6.2f}\n" + \
"\t roll: {:>6.2f} | z: {:>6.2f}")\
.format(direction.pitch, position.x,
direction.yaw, position.y,
normal.roll, position.z), "show_data")
for name in FINGER_NAMES:
if name in what:
finger = self.fingers[name]
for b, bone_name in enumerate(FINGER_BONES):
bone = finger.bone(b)
direction = bone.direction
LOG.v(("{}.{}:\n" +
"\tpitch: {:>6.2f}")\
.format(name, bone_name, direction.pitch), "show_data")
def start_transmit(self):
"""
@brief Starts transmission of tracking data.
Starts sending the current tracking values via ROS topics
'leap_tracker/joint_state_out', 'leap_tracker/pose_stamped_out' and
'leap_tracker/twist_stamped_out' to whichever LEAP tracking conversion
services listening to these topics.
"""
# Set publishing rate
self.r = rospy.Rate(50) # 50Hz
quitting = False
while not rospy.is_shutdown() and not quitting:
try:
# JointState message to publish joint positions
js_msg = self.build_joint_state_msg()
# PoseStamped messages to publish position and
# orientation of each joint
ps_msg = self.build_pose_stamped_msg()
# TODO: TwistStamped messages to publish linear and
# angular velocities of each joint
ts_msg = TwistStamped()
# Publish the messages
self.js_pub.publish(js_msg)
self.ps_pub.publish(ps_msg)
# TODO: Publish TwistStamped
# self.ts_pub.publish(ts_msg)
self.r.sleep()
self.t += 0.01 # automated tests time var
except KeyboardInterrupt:
LOG.e("KeyboardInterrupt detected", "start_transmit")
quitting = True
LOG.d("Quit command sent to client", "main")
raise QuitMessageException("Quit message received from client")
def build_joint_state_msg(self):
"""
@brief JointState message builder.
Builds a JointState message with the current position of the finger
joints and its names.
"""
js_msg = JointState()
js_msg.header.stamp = rospy.Time.now()
if self.joint_names == []:
self.joint_names = ["{}.{}".format('hand', attr)
for attr in ORI_ATTRIBUTES] + \
["{}.{}.{}".format(finger, bone, ori)
for finger in FINGER_NAMES
for bone in FINGER_BONES
for ori in ORI_ATTRIBUTES]
LOG.v("Publishing JointState for the following joints: {}".format(self.joint_names), "start_transmit")
js_msg.position = [0.0] * len(self.joint_names)
pos = 0
# Build JointState. First the hand...
for i, attr in enumerate(ORI_ATTRIBUTES):
js_msg.name.append('hand.' + str(attr))
# Roll precision hack
if attr == 'roll':
vector = self.hand.palm_normal
else:
vector = self.hand.direction
js_msg.position[pos] = getattr(vector, attr)
pos += 1
# ...then the fingers
for i, finger_name, finger in \
[(i, finger_name, self.fingers[finger_name]) \
for i, finger_name in enumerate(FINGER_NAMES)]:
# LEAP API v2.0: Skeletal model
# Get bones
for j, bone_name, bone in \
[(j, bone_name, finger.bone(j)) \
for j, bone_name in enumerate(FINGER_BONES)]:
# Fill the joint values one by one
for k, attr in enumerate(ORI_ATTRIBUTES):
joint_name = "{}.{}.{}".format(finger_name, bone_name, attr)
joint_value = getattr(bone.direction, attr)
js_msg.name.append(joint_name)
js_msg.position[pos] = joint_value
pos += 1
# return the JointState message
return js_msg
def build_pose_stamped_msg(self):
"""
@brief PoseStamped builder
Builds a PoseStamped message with the current position of the hand
and its pose.
"""
# Hand first
ps_msg = PoseStamped()
ps_msg.header.stamp = rospy.Time.now()
ps_msg.header.frame_id = FRAME_ID
if not DEBUG_TEST:
position = self.hand.palm_position
# Set position values in the message
for j, attr in enumerate(POS_ATTRIBUTES):
val = getattr(position, attr)
setattr(ps_msg.pose.position, attr, val)
# Get pose
direction = self.hand.direction
normal = self.hand.palm_normal
# Get orientation values from hand vectors
roll = normal.roll
pitch = normal.pitch
yaw = direction.yaw
else:
((x, y, z), (pitch, yaw, roll)) = self.test_pose()
ps_msg.pose.position.x = x
ps_msg.pose.position.y = y
ps_msg.pose.position.z = z
# Convert RPY to Quaternion
quaternion = transformations.quaternion_from_euler(roll, pitch, yaw)
# Set orientation quaternion in the message
ps_msg.pose.orientation.x = quaternion[0]
ps_msg.pose.orientation.y = quaternion[1]
ps_msg.pose.orientation.z = quaternion[2]
ps_msg.pose.orientation.w = quaternion[3]
# return the PoseStamped messages
print ps_msg
return ps_msg
def test_pose(self):
"""
@brief Generates test values for the pose messages. | """ | random_line_split |
|
leap_tracker.py | :
"""
Fixes libraries path to properly import the LEAP Motion controller and
its Python wrapper
"""
import sys, os, struct
bit_size = struct.calcsize("P") * 8
ARCH = '/x86' if bit_size == 32 else '/x64'
LEAP_PATH = os.path.dirname(__file__) + '/leap'
sys.path.extend([LEAP_PATH, LEAP_PATH + ARCH])
# Fix import path to properly import Leap controller and wrapper
fix_import_path()
import Leap, rospy, math
from exc import QuitMessageException
from std_msgs.msg import String
from sensor_msgs.msg import JointState
from geometry_msgs.msg import TwistStamped, PoseStamped
from tf import transformations
# Initialize consts and vars
NODE_NAME = 'leap_tracker'
FRAME_ID = NODE_NAME
JS_TOPIC = '%s/joint_state_out' % NODE_NAME
PS_TOPIC = '%s/pose_stamped_out' % NODE_NAME
TS_TOPIC = '%s/twist_stamped_out' % NODE_NAME
FINGER_NAMES = ['thumb', 'index', 'middle', 'ring', 'pinky']
FINGER_BONES = ['meta', 'prox', 'mid', 'dist']
POS_ATTRIBUTES = ['x', 'y', 'z']
ORI_ATTRIBUTES = ['roll', 'pitch', 'yaw']
# Debug flags
DEBUG_TEST = False
class Logger:
"""
@brief Wrapper for ROS logging class.
Adds color to the output.
"""
def v(self, s, ns):
rospy.loginfo(self.build(s, ns))
def d(self, s, ns):
rospy.logdebug(self.build(s, ns))
def e(self, s, ns):
rospy.logerr(self.build(s, ns))
def c(self, s, ns):
rospy.logwarn(self.build(s, ns))
def build(self, s, ns):
return "\x1B[1m[{}]\x1B[0m {}".format(ns, s)
LOG = Logger()
class LeapServer(Leap.Listener):
"""
@brief Main class to get data from the LEAP Motion controller.
It extends the Leap.Listener class and implements all
the event methods defined in it. For more info, check the LEAP Motion API:
https://developer.leapmotion.com/documentation/skeletal/python/index.html
"""
def on_init(self, controller):
LOG.v("Initialized", "on_init")
self.t = 0 # time var for automated testing
# Initialize empty frame
self.frame = None
# Initialize fingers and hands
self.hand = Leap.Hand()
self.fingers = { FINGER_NAMES[i] : Leap.Finger()
for i in range(5)}
# Initialize joint names for JointState messages
self.joint_names = []
# Initialize node
rospy.init_node('hand_tracker', anonymous=True)
# Initialize publishers
self.js_pub = rospy.Publisher(JS_TOPIC, JointState, queue_size=10)
self.ps_pub = rospy.Publisher(PS_TOPIC, PoseStamped, queue_size=10)
self.ts_pub = rospy.Publisher(TS_TOPIC, TwistStamped, queue_size=10)
def on_connect(self, controller):
LOG.v("Connected", "on_connect")
def on_disconnect(self, controller):
# Note: not dispatched when running in a debugger.
LOG.v("Disconnected", "on_disconnect")
def on_exit(self, controller):
LOG.v("END", "on_exit")
def on_frame(self, controller):
# Get the most recent frame and fill data structures
frame = controller.frame()
selected_finger = None
if not frame.hands.is_empty:
# Get the first hand
hand = frame.hands[0]
self.hand = hand
# Check if the hand has any fingers
fingers = hand.fingers
if not fingers.is_empty:
# Iterate fingers from leftmost to rightmost
for i, finger in enumerate(sorted(fingers, key=lambda f: f.type())):
# Identify thumb and pinky
if finger.type() == Leap.Finger.TYPE_THUMB:
selected_finger = FINGER_NAMES[0]
elif finger.type() == Leap.Finger.TYPE_PINKY:
selected_finger = FINGER_NAMES[-1]
else:
selected_finger = FINGER_NAMES[finger.type()]
# Set selected finger's properties
self.fingers[selected_finger] = finger
# Show data through stdout
self.show_data(['hand'])
def show_data(self, what=['hand'] + FINGER_NAMES):
"""
@brief Shows tracking data on the standard output via
the logging system.
"""
if 'hand' in what:
normal = self.hand.palm_normal
direction = self.hand.direction
position = self.hand.palm_position
LOG.v(("hand:\n" +
"\tpitch: {:>6.2f} | x: {:>6.2f}\n" + \
"\t yaw: {:>6.2f} | y: {:>6.2f}\n" + \
"\t roll: {:>6.2f} | z: {:>6.2f}")\
.format(direction.pitch, position.x,
direction.yaw, position.y,
normal.roll, position.z), "show_data")
for name in FINGER_NAMES:
if name in what:
finger = self.fingers[name]
for b, bone_name in enumerate(FINGER_BONES):
bone = finger.bone(b)
direction = bone.direction
LOG.v(("{}.{}:\n" +
"\tpitch: {:>6.2f}")\
.format(name, bone_name, direction.pitch), "show_data")
def start_transmit(self):
"""
@brief Starts transmission of tracking data.
Starts sending the current tracking values via ROS topics
'leap_tracker/joint_state_out', 'leap_tracker/pose_stamped_out' and
'leap_tracker/twist_stamped_out' to whichever LEAP tracking conversion
services listening to these topics.
"""
# Set publishing rate
self.r = rospy.Rate(50) # 50Hz
quitting = False
while not rospy.is_shutdown() and not quitting:
try:
# JointState message to publish joint positions
js_msg = self.build_joint_state_msg()
# PoseStamped messages to publish position and
# orientation of each joint
ps_msg = self.build_pose_stamped_msg()
# TODO: TwistStamped messages to publish linear and
# angular velocities of each joint
ts_msg = TwistStamped()
# Publish the messages
self.js_pub.publish(js_msg)
self.ps_pub.publish(ps_msg)
# TODO: Publish TwistStamped
# self.ts_pub.publish(ts_msg)
self.r.sleep()
self.t += 0.01 # automated tests time var
except KeyboardInterrupt:
LOG.e("KeyboardInterrupt detected", "start_transmit")
quitting = True
LOG.d("Quit command sent to client", "main")
raise QuitMessageException("Quit message received from client")
def build_joint_state_msg(self):
"""
@brief JointState message builder.
Builds a JointState message with the current position of the finger
joints and its names.
"""
js_msg = JointState()
js_msg.header.stamp = rospy.Time.now()
if self.joint_names == []:
self.joint_names = ["{}.{}".format('hand', attr)
for attr in ORI_ATTRIBUTES] + \
["{}.{}.{}".format(finger, bone, ori)
for finger in FINGER_NAMES
for bone in FINGER_BONES
for ori in ORI_ATTRIBUTES]
LOG.v("Publishing JointState for the following joints: {}".format(self.joint_names), "start_transmit")
js_msg.position = [0.0] * len(self.joint_names)
pos = 0
# Build JointState. First the hand...
for i, attr in enumerate(ORI_ATTRIBUTES):
js_msg.name.append('hand.' + str(attr))
# Roll precision hack
if attr == 'roll':
vector = self.hand.palm_normal
else:
vector = self.hand.direction
js_msg.position[pos] = getattr(vector, attr)
pos += 1
# ...then the fingers
for i, finger_name, finger in \
[(i, finger_name, self.fingers[finger_name]) \
for i, finger_name in enumerate(FINGER_NAMES)]:
# LEAP API v2.0: Skeletal model
# Get bones
for j, bone_name, bone in \
[(j, bone_name, finger.bone(j)) \
for j, bone_name in enumerate(FINGER_BONES)]:
# Fill the joint values one by one
for k, attr in enumerate(ORI_ATTRIBUTES):
joint_name = "{}.{}.{}".format(finger_name, bone_name, attr)
joint_value = getattr(bone.direction, attr)
js_msg.name.append(joint_name)
js_msg.position[pos] = joint_value
pos += 1
# return the JointState message
return js_msg
def build_pose_stamped_msg(self):
"""
@brief PoseStamped builder | x_import_path() | identifier_name |
|
leap_tracker.py | "on_disconnect")
def on_exit(self, controller):
LOG.v("END", "on_exit")
def on_frame(self, controller):
# Get the most recent frame and fill data structures
frame = controller.frame()
selected_finger = None
if not frame.hands.is_empty:
# Get the first hand
hand = frame.hands[0]
self.hand = hand
# Check if the hand has any fingers
fingers = hand.fingers
if not fingers.is_empty:
# Iterate fingers from leftmost to rightmost
for i, finger in enumerate(sorted(fingers, key=lambda f: f.type())):
# Identify thumb and pinky
if finger.type() == Leap.Finger.TYPE_THUMB:
selected_finger = FINGER_NAMES[0]
elif finger.type() == Leap.Finger.TYPE_PINKY:
selected_finger = FINGER_NAMES[-1]
else:
selected_finger = FINGER_NAMES[finger.type()]
# Set selected finger's properties
self.fingers[selected_finger] = finger
# Show data through stdout
self.show_data(['hand'])
def show_data(self, what=['hand'] + FINGER_NAMES):
"""
@brief Shows tracking data on the standard output via
the logging system.
"""
if 'hand' in what:
normal = self.hand.palm_normal
direction = self.hand.direction
position = self.hand.palm_position
LOG.v(("hand:\n" +
"\tpitch: {:>6.2f} | x: {:>6.2f}\n" + \
"\t yaw: {:>6.2f} | y: {:>6.2f}\n" + \
"\t roll: {:>6.2f} | z: {:>6.2f}")\
.format(direction.pitch, position.x,
direction.yaw, position.y,
normal.roll, position.z), "show_data")
for name in FINGER_NAMES:
if name in what:
finger = self.fingers[name]
for b, bone_name in enumerate(FINGER_BONES):
bone = finger.bone(b)
direction = bone.direction
LOG.v(("{}.{}:\n" +
"\tpitch: {:>6.2f}")\
.format(name, bone_name, direction.pitch), "show_data")
def start_transmit(self):
"""
@brief Starts transmission of tracking data.
Starts sending the current tracking values via ROS topics
'leap_tracker/joint_state_out', 'leap_tracker/pose_stamped_out' and
'leap_tracker/twist_stamped_out' to whichever LEAP tracking conversion
services listening to these topics.
"""
# Set publishing rate
self.r = rospy.Rate(50) # 50Hz
quitting = False
while not rospy.is_shutdown() and not quitting:
try:
# JointState message to publish joint positions
js_msg = self.build_joint_state_msg()
# PoseStamped messages to publish position and
# orientation of each joint
ps_msg = self.build_pose_stamped_msg()
# TODO: TwistStamped messages to publish linear and
# angular velocities of each joint
ts_msg = TwistStamped()
# Publish the messages
self.js_pub.publish(js_msg)
self.ps_pub.publish(ps_msg)
# TODO: Publish TwistStamped
# self.ts_pub.publish(ts_msg)
self.r.sleep()
self.t += 0.01 # automated tests time var
except KeyboardInterrupt:
LOG.e("KeyboardInterrupt detected", "start_transmit")
quitting = True
LOG.d("Quit command sent to client", "main")
raise QuitMessageException("Quit message received from client")
def build_joint_state_msg(self):
"""
@brief JointState message builder.
Builds a JointState message with the current position of the finger
joints and its names.
"""
js_msg = JointState()
js_msg.header.stamp = rospy.Time.now()
if self.joint_names == []:
self.joint_names = ["{}.{}".format('hand', attr)
for attr in ORI_ATTRIBUTES] + \
["{}.{}.{}".format(finger, bone, ori)
for finger in FINGER_NAMES
for bone in FINGER_BONES
for ori in ORI_ATTRIBUTES]
LOG.v("Publishing JointState for the following joints: {}".format(self.joint_names), "start_transmit")
js_msg.position = [0.0] * len(self.joint_names)
pos = 0
# Build JointState. First the hand...
for i, attr in enumerate(ORI_ATTRIBUTES):
js_msg.name.append('hand.' + str(attr))
# Roll precision hack
if attr == 'roll':
vector = self.hand.palm_normal
else:
vector = self.hand.direction
js_msg.position[pos] = getattr(vector, attr)
pos += 1
# ...then the fingers
for i, finger_name, finger in \
[(i, finger_name, self.fingers[finger_name]) \
for i, finger_name in enumerate(FINGER_NAMES)]:
# LEAP API v2.0: Skeletal model
# Get bones
for j, bone_name, bone in \
[(j, bone_name, finger.bone(j)) \
for j, bone_name in enumerate(FINGER_BONES)]:
# Fill the joint values one by one
for k, attr in enumerate(ORI_ATTRIBUTES):
joint_name = "{}.{}.{}".format(finger_name, bone_name, attr)
joint_value = getattr(bone.direction, attr)
js_msg.name.append(joint_name)
js_msg.position[pos] = joint_value
pos += 1
# return the JointState message
return js_msg
def build_pose_stamped_msg(self):
"""
@brief PoseStamped builder
Builds a PoseStamped message with the current position of the hand
and its pose.
"""
# Hand first
ps_msg = PoseStamped()
ps_msg.header.stamp = rospy.Time.now()
ps_msg.header.frame_id = FRAME_ID
if not DEBUG_TEST:
position = self.hand.palm_position
# Set position values in the message
for j, attr in enumerate(POS_ATTRIBUTES):
val = getattr(position, attr)
setattr(ps_msg.pose.position, attr, val)
# Get pose
direction = self.hand.direction
normal = self.hand.palm_normal
# Get orientation values from hand vectors
roll = normal.roll
pitch = normal.pitch
yaw = direction.yaw
else:
((x, y, z), (pitch, yaw, roll)) = self.test_pose()
ps_msg.pose.position.x = x
ps_msg.pose.position.y = y
ps_msg.pose.position.z = z
# Convert RPY to Quaternion
quaternion = transformations.quaternion_from_euler(roll, pitch, yaw)
# Set orientation quaternion in the message
ps_msg.pose.orientation.x = quaternion[0]
ps_msg.pose.orientation.y = quaternion[1]
ps_msg.pose.orientation.z = quaternion[2]
ps_msg.pose.orientation.w = quaternion[3]
# return the PoseStamped messages
print ps_msg
return ps_msg
def test_pose(self):
"""
@brief Generates test values for the pose messages.
"""
t = self.t
# Cyclic functions for orientation and position values
delta = math.sin(t) * 1000
alpha = math.cos(t) * math.pi * 2
# Default values
x = 0
y = 0
z = 0
pitch = 0
yaw = 0
roll = 0
# assign values cyclically
if t % (math.pi * 12) < math.pi * 2:
x = delta
elif t % (math.pi * 12) < math.pi * 4:
y = delta
elif t % (math.pi * 12) < math.pi * 6:
z = delta
elif t % (math.pi * 12) < math.pi * 8:
pitch = alpha
elif t % (math.pi * 12) < math.pi * 10:
yaw = alpha
elif t % (math.pi * 12) < math.pi * 12:
roll = alpha
else:
# Reset counter
self.t = 0.0
return ((x, y, z), (pitch, yaw, roll))
def main():
# Init the server and controller
le | ap_server = LeapServer()
controller = Leap.Controller()
# Have the sample listener receive events from the controller
controller.add_listener(leap_server)
# Keep this process running until quit from client or Ctrl^C
LOG.v("Press ^C to quit...", "main")
try:
# Start communication
leap_server.start_transmit()
except QuitMessageException as e:
LOG.e(e, "main")
except KeyboardInterrupt as e:
LOG.e("Interrupted by user", "main")
# Remove the sample listener when done
controller.remove_listener(leap_server)
| identifier_body |
|
leap_tracker.py | for i in range(5)}
# Initialize joint names for JointState messages
self.joint_names = []
# Initialize node
rospy.init_node('hand_tracker', anonymous=True)
# Initialize publishers
self.js_pub = rospy.Publisher(JS_TOPIC, JointState, queue_size=10)
self.ps_pub = rospy.Publisher(PS_TOPIC, PoseStamped, queue_size=10)
self.ts_pub = rospy.Publisher(TS_TOPIC, TwistStamped, queue_size=10)
def on_connect(self, controller):
LOG.v("Connected", "on_connect")
def on_disconnect(self, controller):
# Note: not dispatched when running in a debugger.
LOG.v("Disconnected", "on_disconnect")
def on_exit(self, controller):
LOG.v("END", "on_exit")
def on_frame(self, controller):
# Get the most recent frame and fill data structures
frame = controller.frame()
selected_finger = None
if not frame.hands.is_empty:
# Get the first hand
hand = frame.hands[0]
self.hand = hand
# Check if the hand has any fingers
fingers = hand.fingers
if not fingers.is_empty:
# Iterate fingers from leftmost to rightmost
for i, finger in enumerate(sorted(fingers, key=lambda f: f.type())):
# Identify thumb and pinky
if finger.type() == Leap.Finger.TYPE_THUMB:
selected_finger = FINGER_NAMES[0]
elif finger.type() == Leap.Finger.TYPE_PINKY:
selected_finger = FINGER_NAMES[-1]
else:
selected_finger = FINGER_NAMES[finger.type()]
# Set selected finger's properties
self.fingers[selected_finger] = finger
# Show data through stdout
self.show_data(['hand'])
def show_data(self, what=['hand'] + FINGER_NAMES):
"""
@brief Shows tracking data on the standard output via
the logging system.
"""
if 'hand' in what:
normal = self.hand.palm_normal
direction = self.hand.direction
position = self.hand.palm_position
LOG.v(("hand:\n" +
"\tpitch: {:>6.2f} | x: {:>6.2f}\n" + \
"\t yaw: {:>6.2f} | y: {:>6.2f}\n" + \
"\t roll: {:>6.2f} | z: {:>6.2f}")\
.format(direction.pitch, position.x,
direction.yaw, position.y,
normal.roll, position.z), "show_data")
for name in FINGER_NAMES:
if name in what:
finger = self.fingers[name]
for b, bone_name in enumerate(FINGER_BONES):
bone = finger.bone(b)
direction = bone.direction
LOG.v(("{}.{}:\n" +
"\tpitch: {:>6.2f}")\
.format(name, bone_name, direction.pitch), "show_data")
def start_transmit(self):
"""
@brief Starts transmission of tracking data.
Starts sending the current tracking values via ROS topics
'leap_tracker/joint_state_out', 'leap_tracker/pose_stamped_out' and
'leap_tracker/twist_stamped_out' to whichever LEAP tracking conversion
services listening to these topics.
"""
# Set publishing rate
self.r = rospy.Rate(50) # 50Hz
quitting = False
while not rospy.is_shutdown() and not quitting:
try:
# JointState message to publish joint positions
js_msg = self.build_joint_state_msg()
# PoseStamped messages to publish position and
# orientation of each joint
ps_msg = self.build_pose_stamped_msg()
# TODO: TwistStamped messages to publish linear and
# angular velocities of each joint
ts_msg = TwistStamped()
# Publish the messages
self.js_pub.publish(js_msg)
self.ps_pub.publish(ps_msg)
# TODO: Publish TwistStamped
# self.ts_pub.publish(ts_msg)
self.r.sleep()
self.t += 0.01 # automated tests time var
except KeyboardInterrupt:
LOG.e("KeyboardInterrupt detected", "start_transmit")
quitting = True
LOG.d("Quit command sent to client", "main")
raise QuitMessageException("Quit message received from client")
def build_joint_state_msg(self):
"""
@brief JointState message builder.
Builds a JointState message with the current position of the finger
joints and its names.
"""
js_msg = JointState()
js_msg.header.stamp = rospy.Time.now()
if self.joint_names == []:
self.joint_names = ["{}.{}".format('hand', attr)
for attr in ORI_ATTRIBUTES] + \
["{}.{}.{}".format(finger, bone, ori)
for finger in FINGER_NAMES
for bone in FINGER_BONES
for ori in ORI_ATTRIBUTES]
LOG.v("Publishing JointState for the following joints: {}".format(self.joint_names), "start_transmit")
js_msg.position = [0.0] * len(self.joint_names)
pos = 0
# Build JointState. First the hand...
for i, attr in enumerate(ORI_ATTRIBUTES):
js_msg.name.append('hand.' + str(attr))
# Roll precision hack
if attr == 'roll':
vector = self.hand.palm_normal
else:
vector = self.hand.direction
js_msg.position[pos] = getattr(vector, attr)
pos += 1
# ...then the fingers
for i, finger_name, finger in \
[(i, finger_name, self.fingers[finger_name]) \
for i, finger_name in enumerate(FINGER_NAMES)]:
# LEAP API v2.0: Skeletal model
# Get bones
for j, bone_name, bone in \
[(j, bone_name, finger.bone(j)) \
for j, bone_name in enumerate(FINGER_BONES)]:
# Fill the joint values one by one
for k, attr in enumerate(ORI_ATTRIBUTES):
joint_name = "{}.{}.{}".format(finger_name, bone_name, attr)
joint_value = getattr(bone.direction, attr)
js_msg.name.append(joint_name)
js_msg.position[pos] = joint_value
pos += 1
# return the JointState message
return js_msg
def build_pose_stamped_msg(self):
"""
@brief PoseStamped builder
Builds a PoseStamped message with the current position of the hand
and its pose.
"""
# Hand first
ps_msg = PoseStamped()
ps_msg.header.stamp = rospy.Time.now()
ps_msg.header.frame_id = FRAME_ID
if not DEBUG_TEST:
position = self.hand.palm_position
# Set position values in the message
for j, attr in enumerate(POS_ATTRIBUTES):
val = getattr(position, attr)
setattr(ps_msg.pose.position, attr, val)
# Get pose
direction = self.hand.direction
normal = self.hand.palm_normal
# Get orientation values from hand vectors
roll = normal.roll
pitch = normal.pitch
yaw = direction.yaw
else:
((x, y, z), (pitch, yaw, roll)) = self.test_pose()
ps_msg.pose.position.x = x
ps_msg.pose.position.y = y
ps_msg.pose.position.z = z
# Convert RPY to Quaternion
quaternion = transformations.quaternion_from_euler(roll, pitch, yaw)
# Set orientation quaternion in the message
ps_msg.pose.orientation.x = quaternion[0]
ps_msg.pose.orientation.y = quaternion[1]
ps_msg.pose.orientation.z = quaternion[2]
ps_msg.pose.orientation.w = quaternion[3]
# return the PoseStamped messages
print ps_msg
return ps_msg
def test_pose(self):
"""
@brief Generates test values for the pose messages.
"""
t = self.t
# Cyclic functions for orientation and position values
delta = math.sin(t) * 1000
alpha = math.cos(t) * math.pi * 2
# Default values
x = 0
y = 0
z = 0
pitch = 0
yaw = 0
roll = 0
# assign values cyclically
if t % (math.pi * 12) < math.pi * 2:
x = delta
elif t % (math.pi * 12) < math.pi * 4:
y = delta
elif t % (math.pi * 12) < math.pi * 6:
z = delta
elif t % (math.pi * 12) < math.pi * 8:
pitch = alpha
elif t % (math.pi * 12) < math.pi * 10:
yaw = alpha
elif t % (math.pi * 12) < math.pi * 12:
roll = alpha
else:
# Reset counter
se | lf.t = 0.0
| conditional_block |
|
en.ts | voting for less than 36.",
"voting-rules-2": "Each voting transaction costs only a small fee of 0.0004 ELA.",
"voting-rules-3": "There is no lock in period. However, if you send coins out of your wallet your votes will be cancelled and you will need to vote again.",
"voting-rules-4": "As an incentive to vote, some delegates choose to share a portion of their ELA income with their voters. Since this is completely optional, there is no standard share percentage, payment frequency, or distribution method. This is where ELAnodes is designed to help.",
"supernodes-1": "In the supernode section you will find the current delegate rankings.",
"supernodes-2": "The rewards column details the estimated annual rate of return you will earn for voting for a particular supernode delegate. Underneath is the percent of income the delegate shares with their voters.",
"supernodes-3": "To add or remove a supernode from your voting list, simply tap an entry.",
"supernodes-4": "More detailed information about each supernode operator may be found by opening the detail slider.",
"staking-1": "The staking tools menu can be opened using the navigation tab or swiping from the right edge of your device. This menu is accessible anywhere within the app.",
"staking-2": "Here you will find summary statistics for your selected supernodes.",
"staking-3": "Several presets are available that will automatically generate voting lists. After successfully submitting a vote through the app, you will be able to recover your selections during future visits.",
"staking-4": "When you're ready to vote, choose from a supported wallet and follow the prompts.",
"rewards-1": "The rewards section is your personal data dashboard.",
"rewards-2": "You can add and store as many addresses as you wish. Addresses are stored on your device only. Aliases are optional.",
"rewards-3": "The table categorizes all staking rewards received by the selected wallet. The columns are sortable and each entry is expandable.",
"rewards-4": "The advanced section includes summary statistics and visual representations of your earnings history.",
"analytics-1": "The analytics section presents current and historical data for the Elastos mainchain.",
"analytics-2": "You can use the options to toggle the charts between various datasets.",
"analytics-3": "That's it. If you need to reference this tutorial again you can find it under settings. Enjoy!",
"tutorial-complete": "Complete Tutorial",
// Language Page
"language-title": "Language",
// Notification Page
"notification-title": "Notification Options",
"notification-health-title": "Health Check Frequency",
"notification-instant": "Every Session",
"notification-daily": "Daily",
"notification-weekly": "Weekly",
"notification-never": "Never",
"notification-report-title": "End of Week Report",
"notification-change-title": "Payout Change Detection",
"notification-optimal-title": "Optimal Configuration Detection",
"notification-revote-title": "Vote Cancellation Alerts",
"notification-storedAddress-title": "Address Storage Alerts",
// Notifications
"notification-health-header": "Staking Health Status",
"notification-health-sub36-1": "Vote for",
"notification-health-sub36-2": "more nodes to maximize your return.",
"notification-health-pass": "Excellent! All selections online.",
"notification-health-offline-1": "Warning -",
"notification-health-offline-2": "is currently inactive.",
"notification-health-canceled-1": "Alert -",
"notification-health-canceled-2": "was canceled.",
"notification-health-illegal-1": "Alert -",
"notification-health-illegal-2": "is in illegal status (suspended indefinitely).",
"notification-report-header": "Earnings Report",
"notification-report": "Weekly report ready! Tap to view.",
"notification-change-header": "Payment Change Detection",
"notification-change-1": "Significant payout change detected.",
"notification-change-2": "payout",
"notification-change-3": "from",
"notification-change-4": "to",
"notification-change-reduced": "reduced",
"notification-change-increased": "increased",
"notification-optimal-header": "Staking Configuration Alert",
"notification-optimal": "A more optimal voting configuration is available. Tap to increase your annual return rate by up to",
"notification-cancellation-header": "Vote Status Alert",
"notification-cancellation-1": "An outgoing transaction of",
"notification-cancellation-2": "ELA occurred on",
"notification-cancellation-3": "without a voting payload. You may need to recast your votes!",
"notification-noAddress-header": "Address Storage Alert",
"notification-noAddress": "No voting address in local storage. Tap to add one and receive status alerts.",
// Settings Page
"settings-title": "Settings",
"general-text": "GENERAL",
"data-text": "DATA",
"other-text": "OTHER",
"community-text": "COMMUNITY",
"language-route-label": "Language",
"notifications-route-label": "Notification Options",
"wallets-route-label": "Wallets",
"about-route-label": "About",
"tutorial-route-label": "Tutorial",
"faq-route-label": "F.A.Q",
"donate-route-label": "Donate",
// Analytics Page
"staking-chart-title": "STAKING PARTICIPATION",
"staking-chart-coins": "COINS",
"staking-chart-supply": "SUPPLY",
"staking-chart-voters": "VOTERS",
"all-text": "ALL",
"supply-pie-chart": "STAKED SUPPLY",
"hashrate-bar-chart": "HASHRATE",
"mainchain-chart-title": "MAINCHAIN ACTIVITY",
"mainchain-chart-hashrate": "HASHRATE",
"mainchain-chart-addresses": "ADDRESSES",
"mainchain-chart-tph": "TX PER HR",
// Vote Page
"table-header-rank": "Rank",
"table-header-delegate": "Delegate",
"table-header-rewards": "Rewards",
"table-no-data": "No Data",
// Wallets Page
"wallets-title": "Wallets",
"no-wallets-text": "No wallets saved",
"remove-wallet-button": "Remove",
"add-wallet-button": "Add Wallet",
"add-alert-header": "Add wallet",
"add-alert-alias": "Alias (optional)",
"add-alert-address": "Address",
"add-alert-cancel": "Cancel",
"add-alert-add": "Add",
"remove-alert-header": "Remove wallet",
"remove-alert-message": "Wallet address will be cleared from device storage.",
"remove-alert-cancel": "Cancel",
"remove-alert-remove": "Remove",
// Data Service
"duplicate-toast-error": "Duplicate address detected. Please remove the old one before updating the alias.",
"invalid-address-toast-error": "Not a valid address. Please try again.",
"balance-fetch-toast-error": "Balance fetch error",
'no-rewards': "No rewards found. Have any votes been cast from this address?",
"unknown-toast-error": "There was an error retrieving data. Please check your connection. If the problem persists the node may be offline.",
"toast-ok": "Ok",
// Staking Tools Menu
"menu-title": "Staking Tools",
"statistics-text": "STATISTICS",
"nodes-text": "NODES",
"of-votes-text": "OF VOTES",
"annual-return-text": "ANNUAL RETURN",
"earnings-share-text": "AVG. EARNINGS SHARE",
"geography-text": "GEOGRAPHY",
"north-america-text": "NORTH AMERICA",
"south-america-text": "SOUTH AMERICA",
"europe-text": "EUROPE",
"asia-text": "ASIA",
"oceania-text": "OCEANIA",
"africa-text": "AFRICA",
"bundles-text": "PRESET BUNDLES",
"max-return-button": "Max Return",
"top-36-button": "Top 36",
"last-vote-button": "Last Vote",
"clear-button": "Clear",
"vote-text": "VOTE",
"elastOS-button": "elastOS",
"elephant-button": "Elephant",
"no-voting-history": "No voting history found",
"votes-canceled-toast": "Votes were canceled",
"wallet-failed-toast": "No response from wallet",
"vote-success-header": "Votes successfully submitted",
"vote-failed-toast": "There was an error sending votes",
"select-36-toast": "Please select up to 36 nodes in order to vote",
// Node Slider Component
"node-rank-text": "Rank",
"node-votes-text": "Votes",
"node-voters-text": "Voters",
"node-active": "Active",
"node-standby": "Standby",
"node-inactive": "Inactive", | "node-state-text": "State", | random_line_split |
|
practice.js | return false;
}
// const testOne = areThereDuplicates(1, 2, 3);
// console.log(testOne);
// const testTwo = areThereDuplicates(1, 2, 2);
// console.log(testTwo);
const testThree = areThereDuplicates("a", "b", "c", "a");
//console.log(testThree);
//areThereDuplicates One Liner Solution
function areThereDuplicates() {
return new Set(arguments).size !== arguments.length;
}
//areThereDuplicates Solution (Multiple Pointers)
function areThereDuplicates(...args) {
// Two pointers
args.sort((a, b) => a > b);
let start = 0;
let next = 1;
while (next < args.length) {
if (args[start] === args[next]) {
return true;
}
start++;
next++;
}
return false;
}
//areThereDuplicates Solution (Frequency Counter)
function areThereDuplicates() {
let collection = {};
for (let val in arguments) {
collection[arguments[val]] = (collection[arguments[val]] || 0) + 1;
}
for (let key in collection) {
if (collection[key] > 1) return true;
}
return false;
}
//sameFrequency Solution
function sameFrequency(num1, num2) {
let strNum1 = num1.toString();
let strNum2 = num2.toString();
if (strNum1.length !== strNum2.length) return false;
let countNum1 = {};
let countNum2 = {};
for (let i = 0; i < strNum1.length; i++) {
countNum1[strNum1[i]] = (countNum1[strNum1[i]] || 0) + 1;
}
for (let j = 0; j < strNum1.length; j++) {
countNum2[strNum2[j]] = (countNum2[strNum2[j]] || 0) + 1;
}
for (let key in countNum1) {
if (countNum1[key] !== countNum2[key]) return false;
}
return true;
}
function factorial(num) {
if (num === 1) return 1;
// recursively the first instance of num * factorial(num -1) waits for the second instance of
// num * factorial(num -1 ) until completion
return num * factorial(num - 1);
}
const sumAll = factorial(1);
//console.log(sumAll);
function collectOddValues(arr) {
// Though newArr is everytime set to lenght of zero,
// it's value is held in concat recursively below
let newArr = [];
if (arr.length === 0) return newArr;
if (arr[0] % 2 !== 0) {
newArr.push(arr[0]);
}
newArr = newArr.concat(collectOddValues(arr.slice(1)));
return newArr;
}
//POWER SOLUTION
function power(base, exponent) {
if (exponent === 0) return 1;
return base * power(base, exponent - 1);
}
//FACTORIAL SOLUTION
function factorial(x) {
if (x < 0) return 0;
if (x <= 1) return 1;
return x * factorial(x - 1);
}
////PRODUCT OF ARRAY SOLUTION
function productOfArray(arr) {
if (arr.length === 0) {
return 1;
}
return arr[0] * productOfArray(arr.slice(1));
}
//RECURSIVE RANGE SOLUTION
function recursiveRange(x) |
//FIBONACCI SOLUTION
function fib(n) {
if (n <= 2) return 1;
return fib(n - 1) + fib(n - 2);
}
// REVERSE
function reverse(str) {
// add whatever parameters you deem necessary - good luck!
let lastChar = str.charAt(str.length - 1);
let withoutLastChar = str.substring(0, str.length - 1);
console.log(lastChar, withoutLastChar);
if (str.length === 0) return "";
return lastChar + reverse(withoutLastChar);
}
//console.log(reverse("rithmschool")); // 'emosewa'
// reverse('rithmschool') // 'loohcsmhtir'
// Is Palindrom
//Reverse Solution
function reverse(str) {
if (str.length <= 1) return str;
return reverse(str.slice(1)) + str[0];
}
//isPalindrome Solution
function isPalindrome(str) {
if (str.length === 1) return true;
if (str.length === 2) return str[0] === str[1];
if (str[0] === str.slice(-1)) return isPalindrome(str.slice(1, -1));
return false;
}
// Searching An Array
// Linear search
function linearSearch(arr, num) {
// add whatever parameters you deem necessary - good luck!
let indexOfItem = -1;
for (let i = 0; i < arr.length; i++) {
if (arr[i] === num) indexOfItem = i;
}
return indexOfItem;
}
//console.log(linearSearch([9, 12, 6, 7, 90, 25, 4], 7));
// Binary search # Array has to be sorted to work
// Binary serach is divide and conquer
// We have the left, the right and the middle
function binarySearch(arr, num) {
let end = arr.length - 1;
let start = 0;
let middle = Math.floor((start + end) / 2);
while (arr[middle] !== num && start <= end) {
if (num < arr[middle]) end = middle - 1;
else start = middle + 1;
middle = Math.floor((start + end) / 2);
}
if (arr[middle] === num) return middle;
return -1;
}
//console.log(binarySearch([2, 5, 6, 9, 13, 15, 28, 30], 2));
// Search Naive string
function naiveSearch(long, short) {
let count = 0;
for (let i = 0; i < long.length; i++) {
for (var j = 0; j < short.length; j++) {
if (short[j] !== long[i + j]) break;
if (j === short.length - 1) count++;
}
}
return count;
}
//console.log(naiveSearch("lorie loled", "pop"));
// BUBBLE Sort
// Maximum data is accumulated at the back
function bubbleSort(arr) {
let noSwaps;
for (let i = arr.length; i > 0; i--) {
for (let j = 0; j < i - 1; j++) {
if (arr[j] > arr[j + 1]) {
let temp = arr[j];
arr[j] = arr[j + 1];
arr[j + 1] = temp;
noSwaps = false;
}
}
if (noSwaps) break;
}
return arr;
}
//console.log(bubbleSort([37, 45, 29, 8, -1, 0, 62]));
// SElection Sort
// Sorted data is accumulated at the begining
// Time complexity => O(n^2)
function selectionSort(arr) {
let foundSmaller;
for (let i = 0; i < arr.length; i++) {
let lowest = i;
for (let j = i + 1; j < arr.length; j++) {
if (arr[lowest] > arr[j]) {
lowest = j;
foundSmaller = true;
}
}
if (foundSmaller) {
let temp = arr[i];
arr[i] = arr[lowest];
arr[lowest] = temp;
}
}
return arr;
}
//console.log(selectionSort([37, 45, 29, 8, -1, 62]));
// Insertion Sort
// I builds up the sort by gradually place an element where it should go in our sorted half.
// Start by picking the second element in the array
// compare it with the one before it and swap if necessary
// Continue to the next element and if it is incorrect order, iterate through the sorted portion and
// place the element in the correct place => repeat until is it sorted and return that array
// Time complexity O(n^2)
function insertionSort(arr) {
for (let i = 1; i < arr.length; i++) {
let currentVal = arr[i];
// condition is in the for loop condition
for (var j = i - 1; j >= 0 && arr[j] > currentVal; j--) {
arr[j + 1] = arr[j];
}
arr[j + 1] = currentVal;
}
return arr;
}
//console.log(insertionSort([2, 1, 9, 76, 4]));
// Merge Sort
// Combination of splitting, merging and sorting
// Explots the fact that arrays of | {
if (x === 0) return 0;
return x + recursiveRange(x - 1);
} | identifier_body |
practice.js | return false;
}
// const testOne = areThereDuplicates(1, 2, 3);
// console.log(testOne);
// const testTwo = areThereDuplicates(1, 2, 2);
// console.log(testTwo);
const testThree = areThereDuplicates("a", "b", "c", "a");
//console.log(testThree);
//areThereDuplicates One Liner Solution
function areThereDuplicates() {
return new Set(arguments).size !== arguments.length;
}
//areThereDuplicates Solution (Multiple Pointers)
function areThereDuplicates(...args) {
// Two pointers
args.sort((a, b) => a > b);
let start = 0;
let next = 1;
while (next < args.length) {
if (args[start] === args[next]) {
return true;
}
start++;
next++;
}
return false;
}
//areThereDuplicates Solution (Frequency Counter)
function areThereDuplicates() {
let collection = {};
for (let val in arguments) {
collection[arguments[val]] = (collection[arguments[val]] || 0) + 1;
}
for (let key in collection) {
if (collection[key] > 1) return true;
}
return false;
}
//sameFrequency Solution
function sameFrequency(num1, num2) {
let strNum1 = num1.toString();
let strNum2 = num2.toString();
if (strNum1.length !== strNum2.length) return false;
let countNum1 = {};
let countNum2 = {};
for (let i = 0; i < strNum1.length; i++) {
countNum1[strNum1[i]] = (countNum1[strNum1[i]] || 0) + 1;
}
for (let j = 0; j < strNum1.length; j++) {
countNum2[strNum2[j]] = (countNum2[strNum2[j]] || 0) + 1;
}
for (let key in countNum1) {
if (countNum1[key] !== countNum2[key]) return false;
}
return true;
}
function factorial(num) {
if (num === 1) return 1;
// recursively the first instance of num * factorial(num -1) waits for the second instance of
// num * factorial(num -1 ) until completion
return num * factorial(num - 1);
}
const sumAll = factorial(1);
//console.log(sumAll);
function collectOddValues(arr) {
// Though newArr is everytime set to lenght of zero,
// it's value is held in concat recursively below
let newArr = [];
if (arr.length === 0) return newArr;
if (arr[0] % 2 !== 0) {
newArr.push(arr[0]);
}
newArr = newArr.concat(collectOddValues(arr.slice(1)));
return newArr;
}
//POWER SOLUTION
function power(base, exponent) {
if (exponent === 0) return 1;
return base * power(base, exponent - 1);
}
//FACTORIAL SOLUTION | ////PRODUCT OF ARRAY SOLUTION
function productOfArray(arr) {
if (arr.length === 0) {
return 1;
}
return arr[0] * productOfArray(arr.slice(1));
}
//RECURSIVE RANGE SOLUTION
function recursiveRange(x) {
if (x === 0) return 0;
return x + recursiveRange(x - 1);
}
//FIBONACCI SOLUTION
function fib(n) {
if (n <= 2) return 1;
return fib(n - 1) + fib(n - 2);
}
// REVERSE
function reverse(str) {
// add whatever parameters you deem necessary - good luck!
let lastChar = str.charAt(str.length - 1);
let withoutLastChar = str.substring(0, str.length - 1);
console.log(lastChar, withoutLastChar);
if (str.length === 0) return "";
return lastChar + reverse(withoutLastChar);
}
//console.log(reverse("rithmschool")); // 'emosewa'
// reverse('rithmschool') // 'loohcsmhtir'
// Is Palindrom
//Reverse Solution
function reverse(str) {
if (str.length <= 1) return str;
return reverse(str.slice(1)) + str[0];
}
//isPalindrome Solution
function isPalindrome(str) {
if (str.length === 1) return true;
if (str.length === 2) return str[0] === str[1];
if (str[0] === str.slice(-1)) return isPalindrome(str.slice(1, -1));
return false;
}
// Searching An Array
// Linear search
function linearSearch(arr, num) {
// add whatever parameters you deem necessary - good luck!
let indexOfItem = -1;
for (let i = 0; i < arr.length; i++) {
if (arr[i] === num) indexOfItem = i;
}
return indexOfItem;
}
//console.log(linearSearch([9, 12, 6, 7, 90, 25, 4], 7));
// Binary search # Array has to be sorted to work
// Binary serach is divide and conquer
// We have the left, the right and the middle
function binarySearch(arr, num) {
let end = arr.length - 1;
let start = 0;
let middle = Math.floor((start + end) / 2);
while (arr[middle] !== num && start <= end) {
if (num < arr[middle]) end = middle - 1;
else start = middle + 1;
middle = Math.floor((start + end) / 2);
}
if (arr[middle] === num) return middle;
return -1;
}
//console.log(binarySearch([2, 5, 6, 9, 13, 15, 28, 30], 2));
// Search Naive string
function naiveSearch(long, short) {
let count = 0;
for (let i = 0; i < long.length; i++) {
for (var j = 0; j < short.length; j++) {
if (short[j] !== long[i + j]) break;
if (j === short.length - 1) count++;
}
}
return count;
}
//console.log(naiveSearch("lorie loled", "pop"));
// BUBBLE Sort
// Maximum data is accumulated at the back
function bubbleSort(arr) {
let noSwaps;
for (let i = arr.length; i > 0; i--) {
for (let j = 0; j < i - 1; j++) {
if (arr[j] > arr[j + 1]) {
let temp = arr[j];
arr[j] = arr[j + 1];
arr[j + 1] = temp;
noSwaps = false;
}
}
if (noSwaps) break;
}
return arr;
}
//console.log(bubbleSort([37, 45, 29, 8, -1, 0, 62]));
// SElection Sort
// Sorted data is accumulated at the begining
// Time complexity => O(n^2)
function selectionSort(arr) {
let foundSmaller;
for (let i = 0; i < arr.length; i++) {
let lowest = i;
for (let j = i + 1; j < arr.length; j++) {
if (arr[lowest] > arr[j]) {
lowest = j;
foundSmaller = true;
}
}
if (foundSmaller) {
let temp = arr[i];
arr[i] = arr[lowest];
arr[lowest] = temp;
}
}
return arr;
}
//console.log(selectionSort([37, 45, 29, 8, -1, 62]));
// Insertion Sort
// I builds up the sort by gradually place an element where it should go in our sorted half.
// Start by picking the second element in the array
// compare it with the one before it and swap if necessary
// Continue to the next element and if it is incorrect order, iterate through the sorted portion and
// place the element in the correct place => repeat until is it sorted and return that array
// Time complexity O(n^2)
function insertionSort(arr) {
for (let i = 1; i < arr.length; i++) {
let currentVal = arr[i];
// condition is in the for loop condition
for (var j = i - 1; j >= 0 && arr[j] > currentVal; j--) {
arr[j + 1] = arr[j];
}
arr[j + 1] = currentVal;
}
return arr;
}
//console.log(insertionSort([2, 1, 9, 76, 4]));
// Merge Sort
// Combination of splitting, merging and sorting
// Explots the fact that arrays of 0 | function factorial(x) {
if (x < 0) return 0;
if (x <= 1) return 1;
return x * factorial(x - 1);
} | random_line_split |
practice.js | return false;
}
// const testOne = areThereDuplicates(1, 2, 3);
// console.log(testOne);
// const testTwo = areThereDuplicates(1, 2, 2);
// console.log(testTwo);
const testThree = areThereDuplicates("a", "b", "c", "a");
//console.log(testThree);
//areThereDuplicates One Liner Solution
function areThereDuplicates() {
return new Set(arguments).size !== arguments.length;
}
//areThereDuplicates Solution (Multiple Pointers)
function areThereDuplicates(...args) {
// Two pointers
args.sort((a, b) => a > b);
let start = 0;
let next = 1;
while (next < args.length) {
if (args[start] === args[next]) {
return true;
}
start++;
next++;
}
return false;
}
//areThereDuplicates Solution (Frequency Counter)
function areThereDuplicates() {
let collection = {};
for (let val in arguments) {
collection[arguments[val]] = (collection[arguments[val]] || 0) + 1;
}
for (let key in collection) {
if (collection[key] > 1) return true;
}
return false;
}
//sameFrequency Solution
function sameFrequency(num1, num2) {
let strNum1 = num1.toString();
let strNum2 = num2.toString();
if (strNum1.length !== strNum2.length) return false;
let countNum1 = {};
let countNum2 = {};
for (let i = 0; i < strNum1.length; i++) {
countNum1[strNum1[i]] = (countNum1[strNum1[i]] || 0) + 1;
}
for (let j = 0; j < strNum1.length; j++) {
countNum2[strNum2[j]] = (countNum2[strNum2[j]] || 0) + 1;
}
for (let key in countNum1) {
if (countNum1[key] !== countNum2[key]) return false;
}
return true;
}
function factorial(num) {
if (num === 1) return 1;
// recursively the first instance of num * factorial(num -1) waits for the second instance of
// num * factorial(num -1 ) until completion
return num * factorial(num - 1);
}
const sumAll = factorial(1);
//console.log(sumAll);
function collectOddValues(arr) {
// Though newArr is everytime set to lenght of zero,
// it's value is held in concat recursively below
let newArr = [];
if (arr.length === 0) return newArr;
if (arr[0] % 2 !== 0) {
newArr.push(arr[0]);
}
newArr = newArr.concat(collectOddValues(arr.slice(1)));
return newArr;
}
//POWER SOLUTION
function power(base, exponent) {
if (exponent === 0) return 1;
return base * power(base, exponent - 1);
}
//FACTORIAL SOLUTION
function factorial(x) {
if (x < 0) return 0;
if (x <= 1) return 1;
return x * factorial(x - 1);
}
////PRODUCT OF ARRAY SOLUTION
function productOfArray(arr) {
if (arr.length === 0) |
return arr[0] * productOfArray(arr.slice(1));
}
//RECURSIVE RANGE SOLUTION
function recursiveRange(x) {
if (x === 0) return 0;
return x + recursiveRange(x - 1);
}
//FIBONACCI SOLUTION
function fib(n) {
if (n <= 2) return 1;
return fib(n - 1) + fib(n - 2);
}
// REVERSE
function reverse(str) {
// add whatever parameters you deem necessary - good luck!
let lastChar = str.charAt(str.length - 1);
let withoutLastChar = str.substring(0, str.length - 1);
console.log(lastChar, withoutLastChar);
if (str.length === 0) return "";
return lastChar + reverse(withoutLastChar);
}
//console.log(reverse("rithmschool")); // 'emosewa'
// reverse('rithmschool') // 'loohcsmhtir'
// Is Palindrom
//Reverse Solution
function reverse(str) {
if (str.length <= 1) return str;
return reverse(str.slice(1)) + str[0];
}
//isPalindrome Solution
function isPalindrome(str) {
if (str.length === 1) return true;
if (str.length === 2) return str[0] === str[1];
if (str[0] === str.slice(-1)) return isPalindrome(str.slice(1, -1));
return false;
}
// Searching An Array
// Linear search
function linearSearch(arr, num) {
// add whatever parameters you deem necessary - good luck!
let indexOfItem = -1;
for (let i = 0; i < arr.length; i++) {
if (arr[i] === num) indexOfItem = i;
}
return indexOfItem;
}
//console.log(linearSearch([9, 12, 6, 7, 90, 25, 4], 7));
// Binary search # Array has to be sorted to work
// Binary serach is divide and conquer
// We have the left, the right and the middle
function binarySearch(arr, num) {
let end = arr.length - 1;
let start = 0;
let middle = Math.floor((start + end) / 2);
while (arr[middle] !== num && start <= end) {
if (num < arr[middle]) end = middle - 1;
else start = middle + 1;
middle = Math.floor((start + end) / 2);
}
if (arr[middle] === num) return middle;
return -1;
}
//console.log(binarySearch([2, 5, 6, 9, 13, 15, 28, 30], 2));
// Search Naive string
function naiveSearch(long, short) {
let count = 0;
for (let i = 0; i < long.length; i++) {
for (var j = 0; j < short.length; j++) {
if (short[j] !== long[i + j]) break;
if (j === short.length - 1) count++;
}
}
return count;
}
//console.log(naiveSearch("lorie loled", "pop"));
// BUBBLE Sort
// Maximum data is accumulated at the back
function bubbleSort(arr) {
let noSwaps;
for (let i = arr.length; i > 0; i--) {
for (let j = 0; j < i - 1; j++) {
if (arr[j] > arr[j + 1]) {
let temp = arr[j];
arr[j] = arr[j + 1];
arr[j + 1] = temp;
noSwaps = false;
}
}
if (noSwaps) break;
}
return arr;
}
//console.log(bubbleSort([37, 45, 29, 8, -1, 0, 62]));
// SElection Sort
// Sorted data is accumulated at the begining
// Time complexity => O(n^2)
function selectionSort(arr) {
let foundSmaller;
for (let i = 0; i < arr.length; i++) {
let lowest = i;
for (let j = i + 1; j < arr.length; j++) {
if (arr[lowest] > arr[j]) {
lowest = j;
foundSmaller = true;
}
}
if (foundSmaller) {
let temp = arr[i];
arr[i] = arr[lowest];
arr[lowest] = temp;
}
}
return arr;
}
//console.log(selectionSort([37, 45, 29, 8, -1, 62]));
// Insertion Sort
// I builds up the sort by gradually place an element where it should go in our sorted half.
// Start by picking the second element in the array
// compare it with the one before it and swap if necessary
// Continue to the next element and if it is incorrect order, iterate through the sorted portion and
// place the element in the correct place => repeat until is it sorted and return that array
// Time complexity O(n^2)
function insertionSort(arr) {
for (let i = 1; i < arr.length; i++) {
let currentVal = arr[i];
// condition is in the for loop condition
for (var j = i - 1; j >= 0 && arr[j] > currentVal; j--) {
arr[j + 1] = arr[j];
}
arr[j + 1] = currentVal;
}
return arr;
}
//console.log(insertionSort([2, 1, 9, 76, 4]));
// Merge Sort
// Combination of splitting, merging and sorting
// Explots the fact that arrays of | {
return 1;
} | conditional_block |
practice.js | return false;
}
// const testOne = areThereDuplicates(1, 2, 3);
// console.log(testOne);
// const testTwo = areThereDuplicates(1, 2, 2);
// console.log(testTwo);
const testThree = areThereDuplicates("a", "b", "c", "a");
//console.log(testThree);
//areThereDuplicates One Liner Solution
function areThereDuplicates() {
return new Set(arguments).size !== arguments.length;
}
//areThereDuplicates Solution (Multiple Pointers)
function areThereDuplicates(...args) {
// Two pointers
args.sort((a, b) => a > b);
let start = 0;
let next = 1;
while (next < args.length) {
if (args[start] === args[next]) {
return true;
}
start++;
next++;
}
return false;
}
//areThereDuplicates Solution (Frequency Counter)
function areThereDuplicates() {
let collection = {};
for (let val in arguments) {
collection[arguments[val]] = (collection[arguments[val]] || 0) + 1;
}
for (let key in collection) {
if (collection[key] > 1) return true;
}
return false;
}
//sameFrequency Solution
function sameFrequency(num1, num2) {
let strNum1 = num1.toString();
let strNum2 = num2.toString();
if (strNum1.length !== strNum2.length) return false;
let countNum1 = {};
let countNum2 = {};
for (let i = 0; i < strNum1.length; i++) {
countNum1[strNum1[i]] = (countNum1[strNum1[i]] || 0) + 1;
}
for (let j = 0; j < strNum1.length; j++) {
countNum2[strNum2[j]] = (countNum2[strNum2[j]] || 0) + 1;
}
for (let key in countNum1) {
if (countNum1[key] !== countNum2[key]) return false;
}
return true;
}
function factorial(num) {
if (num === 1) return 1;
// recursively the first instance of num * factorial(num -1) waits for the second instance of
// num * factorial(num -1 ) until completion
return num * factorial(num - 1);
}
const sumAll = factorial(1);
//console.log(sumAll);
function collectOddValues(arr) {
// Though newArr is everytime set to lenght of zero,
// it's value is held in concat recursively below
let newArr = [];
if (arr.length === 0) return newArr;
if (arr[0] % 2 !== 0) {
newArr.push(arr[0]);
}
newArr = newArr.concat(collectOddValues(arr.slice(1)));
return newArr;
}
//POWER SOLUTION
function power(base, exponent) {
if (exponent === 0) return 1;
return base * power(base, exponent - 1);
}
//FACTORIAL SOLUTION
function factorial(x) {
if (x < 0) return 0;
if (x <= 1) return 1;
return x * factorial(x - 1);
}
////PRODUCT OF ARRAY SOLUTION
function productOfArray(arr) {
if (arr.length === 0) {
return 1;
}
return arr[0] * productOfArray(arr.slice(1));
}
//RECURSIVE RANGE SOLUTION
function recursiveRange(x) {
if (x === 0) return 0;
return x + recursiveRange(x - 1);
}
//FIBONACCI SOLUTION
function fib(n) {
if (n <= 2) return 1;
return fib(n - 1) + fib(n - 2);
}
// REVERSE
function reverse(str) {
// add whatever parameters you deem necessary - good luck!
let lastChar = str.charAt(str.length - 1);
let withoutLastChar = str.substring(0, str.length - 1);
console.log(lastChar, withoutLastChar);
if (str.length === 0) return "";
return lastChar + reverse(withoutLastChar);
}
//console.log(reverse("rithmschool")); // 'emosewa'
// reverse('rithmschool') // 'loohcsmhtir'
// Is Palindrom
//Reverse Solution
function reverse(str) {
if (str.length <= 1) return str;
return reverse(str.slice(1)) + str[0];
}
//isPalindrome Solution
function isPalindrome(str) {
if (str.length === 1) return true;
if (str.length === 2) return str[0] === str[1];
if (str[0] === str.slice(-1)) return isPalindrome(str.slice(1, -1));
return false;
}
// Searching An Array
// Linear search
function linearSearch(arr, num) {
// add whatever parameters you deem necessary - good luck!
let indexOfItem = -1;
for (let i = 0; i < arr.length; i++) {
if (arr[i] === num) indexOfItem = i;
}
return indexOfItem;
}
//console.log(linearSearch([9, 12, 6, 7, 90, 25, 4], 7));
// Binary search # Array has to be sorted to work
// Binary serach is divide and conquer
// We have the left, the right and the middle
function binarySearch(arr, num) {
let end = arr.length - 1;
let start = 0;
let middle = Math.floor((start + end) / 2);
while (arr[middle] !== num && start <= end) {
if (num < arr[middle]) end = middle - 1;
else start = middle + 1;
middle = Math.floor((start + end) / 2);
}
if (arr[middle] === num) return middle;
return -1;
}
//console.log(binarySearch([2, 5, 6, 9, 13, 15, 28, 30], 2));
// Search Naive string
function naiveSearch(long, short) {
let count = 0;
for (let i = 0; i < long.length; i++) {
for (var j = 0; j < short.length; j++) {
if (short[j] !== long[i + j]) break;
if (j === short.length - 1) count++;
}
}
return count;
}
//console.log(naiveSearch("lorie loled", "pop"));
// BUBBLE Sort
// Maximum data is accumulated at the back
function bubbleSort(arr) {
let noSwaps;
for (let i = arr.length; i > 0; i--) {
for (let j = 0; j < i - 1; j++) {
if (arr[j] > arr[j + 1]) {
let temp = arr[j];
arr[j] = arr[j + 1];
arr[j + 1] = temp;
noSwaps = false;
}
}
if (noSwaps) break;
}
return arr;
}
//console.log(bubbleSort([37, 45, 29, 8, -1, 0, 62]));
// SElection Sort
// Sorted data is accumulated at the begining
// Time complexity => O(n^2)
function | (arr) {
let foundSmaller;
for (let i = 0; i < arr.length; i++) {
let lowest = i;
for (let j = i + 1; j < arr.length; j++) {
if (arr[lowest] > arr[j]) {
lowest = j;
foundSmaller = true;
}
}
if (foundSmaller) {
let temp = arr[i];
arr[i] = arr[lowest];
arr[lowest] = temp;
}
}
return arr;
}
//console.log(selectionSort([37, 45, 29, 8, -1, 62]));
// Insertion Sort
// I builds up the sort by gradually place an element where it should go in our sorted half.
// Start by picking the second element in the array
// compare it with the one before it and swap if necessary
// Continue to the next element and if it is incorrect order, iterate through the sorted portion and
// place the element in the correct place => repeat until is it sorted and return that array
// Time complexity O(n^2)
function insertionSort(arr) {
for (let i = 1; i < arr.length; i++) {
let currentVal = arr[i];
// condition is in the for loop condition
for (var j = i - 1; j >= 0 && arr[j] > currentVal; j--) {
arr[j + 1] = arr[j];
}
arr[j + 1] = currentVal;
}
return arr;
}
//console.log(insertionSort([2, 1, 9, 76, 4]));
// Merge Sort
// Combination of splitting, merging and sorting
// Explots the fact that arrays of | selectionSort | identifier_name |
all_phases.rs | js);
Ok(())
})
}
};
}
extern_case!(snudown_js, "js/snudown.js");
case!(
basic,
r#"
function f(x) {
while (true);
x = y.bar;
z.foo = x ? true : 'hi';
return +[1 || x, { x }, f + 1, ++g];
}
f(1), true;
"#,
@r###"
(function f() {
for(;;);
var _val = y.bar;
var _obj = z;
var _val$1;
_val$1 = _val ? true : "hi";
_obj.foo = _val$1;
var _wri = g + 1;
g = _wri;
return +[
1,
{
x: _val
},
f + 1, _wri];
})(1);
"###);
case!(
assign_to_expr,
r#"
e |= 0;
foo().x |= 1;
"#,
@r###"
e = e | 0;
var _obj = foo();
_obj.x = _obj.x | 1;
"###);
case!(
labels,
r#"
outer: for (;;) {
inner: for (;;) {
if (foo) continue inner;
if (bar) break outer;
}
}
"#,
@r###"
outer: for(;;)inner: for(;;){
if (foo) continue inner;
if (bar) break outer;
}
"###);
case!(
nested_no_side_effects,
r#"
let x = 1;
if (foo) {
g = just_read_global_state;
}
log(x);
let y = 1;
if (foo) {
function maybe_change_y() {
if (bar) y = 10;
}
maybe_change_y();
}
log(y);
"#,
@r###"
if (foo) g = just_read_global_state;
log(1);
var y = 1;
if (foo) {
if (bar) y = 10;
}
log(y);
"###);
case!(
snudown_js_like,
r#"
var r;
g = something;
r || (r = {});
var s = {};
var o;
for (o in r) s[o] = r[o];
r.x = 1;
for (o in s) r[o] = s[o];
var stuff = (function(r_inner) {
return {
xy: r_inner.x * 2
};
})(r);
var xy = stuff.xy;
window.foo = function foo(z) {
return z + xy;
};
"#,
@r###"
g = something;
window.foo = function(z) { | };
"###);
case!(
snudown_js_like2,
r#"
var o, c = {}, s = {};
for (o in c) c.hasOwnProperty(o) && (s[o] = c[o]);
var u = console.log.bind(console), b = console.warn.bind(console);
for (o in s) s.hasOwnProperty(o) && (c[o] = s[o]);
s = null;
var k, v, d, h = 0, w = !1;
k = c.buffer ? c.buffer : new ArrayBuffer(16777216), c.HEAP8 = v = new Int8Array(k), c.HEAP32 = s = new Int32Array(k), c.HEAPU8 = d = new Uint8Array(k), s[2340] = 5252272;
var m = [], _ = [], p = [], y = [];
c.preloadedImages = {}, c.preloadedAudios = {}, s = null, s = '\0\0\0\0\0';
var g = c._default_renderer = k._default_renderer, A = c._free = k._free;
c._i64Add = k._i64Add, c._i64Subtract = k._i64Subtract;
var C = c._wiki_renderer = k._wiki_renderer;
c.establishStackSpace = k.establishStackSpace;
var S, x = c.stackAlloc = k.stackAlloc, E = c.stackRestore = k.stackRestore, I = c.stackSave = k.stackSave;
c.dynCall_iii = k.dynCall_iii, c.dynCall_iiii = k.dynCall_iiii, c.asm = k;
s && (function (r) {
var e, i = r.length;
for (e = 0; e < i; ++e) d[8 + e] = r.charCodeAt(e)
})(s);
"#,
@r###"
console.log.bind(console);
console.warn.bind(console);
var _alt = new ArrayBuffer(16777216);
new Int8Array(_alt);
var _val = new Int32Array(_alt);
var _val$1 = new Uint8Array(_alt);
_val[2340] = 5252272;
_alt._default_renderer;
_alt._free;
_alt._i64Add;
_alt._i64Subtract;
_alt._wiki_renderer;
_alt.establishStackSpace;
_alt.stackAlloc;
_alt.stackRestore;
_alt.stackSave;
_alt.dynCall_iii;
_alt.dynCall_iiii;
var e = 0;
for(; e < 5;){
var _prp = 8 + e;
_val$1[_prp] = "\0\0\0\0\0".charCodeAt(e);
e = e + 1;
}
"###);
case!(
fn_scopes_do_not_deter_ssa_inlining,
r#"
let x = foo();
function f() {
something();
}
g = x;
f();
f();
"#,
@r###"
var _fun = function() {
something();
};
g = foo();
_fun();
_fun();
"###);
case!(
inline_into_if_but_not_past_effects,
r#"
let x = g;
if (foo) {
log(x);
}
let y = h;
if (bar()) {
log(y);
}
i = function() { return x = y = 1; }
"#,
@r###"
if (foo) log(g);
var y = h;
if (bar()) log(y);
i = function() {
y = 1;
return 1;
};
"###);
case!(
dont_inline_into_loop,
r#"
let x = g;
do {
log(x);
g = 1;
} while (foo);
"#,
@r###"
var x = g;
for(;;){
log(x);
g = 1;
if (foo) ;
else break;
}
"###);
case!(
completely_redundant_var,
r#"
var x = 0;
x += 1;
var n = x;
if (foo) {
x += 1;
log(x);
} else {
log(n);
}
"#,
@r###"
if (foo) log(2);
else log(1);
"###);
case!(
deconflict_nan,
r#"
g1 = 0 / 0;
{
let NaN = 1;
if (foo) {
NaN = 2;
}
g3 = NaN;
}
"#,
@r###"
g1 = NaN;
var NaN$1 = 1;
if (foo) NaN$1 = 2;
g3 = NaN$1;
"###);
case!(
referencing_outer_scope_moved_later,
r#"
var x; // converted to ssa, moved down to x = 0
g = function() {
x();
};
x = foo;
"#,
@r###"
g = function() {
x();
};
var x = foo;
"###);
case!(
referencing_outer_scope_moved_later2,
r#"
var x; // stays mutable, moved down to x = 0
g = function() {
x();
};
x = foo;
g2 = function() {
x = 1;
};
"#,
@r###"
g = function() {
x();
};
var x = foo;
g2 = function() {
x = 1;
};
"###);
case!(
mutually_recursive_fns,
r#"
function a() { b(); }
function b() { c(); }
function c() { a(); }
g1 = a;
g2 = b;
g3 = c;
"#,
@r###"
var _fun = function() {
_fun$1();
};
var _fun$1 = function() {
_fun$2();
};
var _fun$2 = function() {
_fun();
};
g1 = _fun;
g2 = _fun$1;
g3 = _fun$2;
"###);
case!(
fn_hoisting_toplevel,
r#"
foo();
function foo() { foo_(); }
(function() {
bar();
function bar() { bar_(); }
})();
"#,
@r###"
foo | return z + 2; | random_line_split |
build_assets.py | p' as an argument. Additionally, if you would like to clean all
generated files, you can call this script with the argument 'clean'.
"""
import distutils.spawn
import glob
import os
import platform
import subprocess
import sys
# The project root directory, which is one level up from this script's
# directory.
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.path.pardir))
PREBUILTS_ROOT = os.path.abspath(os.path.join(os.path.join(PROJECT_ROOT),
os.path.pardir, os.path.pardir,
os.path.pardir, os.path.pardir,
'prebuilts'))
# Directories that may contains the FlatBuffers compiler.
FLATBUFFERS_PATHS = [
os.path.join(PROJECT_ROOT, 'bin'),
os.path.join(PROJECT_ROOT, 'bin', 'Release'),
os.path.join(PROJECT_ROOT, 'bin', 'Debug'),
]
# Directory that contains the cwebp tool.
CWEBP_BINARY_IN_PATH = distutils.spawn.find_executable('cwebp')
CWEBP_PATHS = [
os.path.join(PROJECT_ROOT, 'bin'),
os.path.join(PROJECT_ROOT, 'bin', 'Release'),
os.path.join(PROJECT_ROOT, 'bin', 'Debug'),
os.path.join(PREBUILTS_ROOT, 'libwebp',
'%s-x86' % platform.system().lower(),
'libwebp-0.4.1-%s-x86-32' % platform.system().lower(), 'bin'),
os.path.dirname(CWEBP_BINARY_IN_PATH) if CWEBP_BINARY_IN_PATH else '',
]
# Directory to place processed assets.
ASSETS_PATH = os.path.join(PROJECT_ROOT, 'assets')
# Directory where unprocessed assets can be found.
RAW_ASSETS_PATH = os.path.join(PROJECT_ROOT, 'src', 'rawassets')
# Directory where processed sound flatbuffer data can be found.
SOUND_PATH = os.path.join(ASSETS_PATH, 'sounds')
# Directory where unprocessed sound flatbuffer data can be found.
RAW_SOUND_PATH = os.path.join(RAW_ASSETS_PATH, 'sounds')
# Directory where processed material flatbuffer data can be found.
MATERIAL_PATH = os.path.join(ASSETS_PATH, 'materials')
# Directory where unprocessed material flatbuffer data can be found.
RAW_MATERIAL_PATH = os.path.join(RAW_ASSETS_PATH, 'materials')
# Directory where processed textures can be found.
TEXTURE_PATH = os.path.join(ASSETS_PATH, 'textures')
# Directory where unprocessed textures can be found.
RAW_TEXTURE_PATH = os.path.join(RAW_ASSETS_PATH, 'textures')
# Directory where unprocessed assets can be found.
SCHEMA_PATH = os.path.join(PROJECT_ROOT, 'src', 'flatbufferschemas')
# Windows uses the .exe extension on executables.
EXECUTABLE_EXTENSION = '.exe' if platform.system() == 'Windows' else ''
# Name of the flatbuffer executable.
FLATC_EXECUTABLE_NAME = 'flatc' + EXECUTABLE_EXTENSION
# Name of the cwebp executable.
CWEBP_EXECUTABLE_NAME = 'cwebp' + EXECUTABLE_EXTENSION
# What level of quality we want to apply to the webp files.
# Ranges from 0 to 100.
WEBP_QUALITY = 90
def processed_json_dir(path):
"""Take the path to a raw json asset and convert it to target directory."""
return os.path.dirname(path.replace(RAW_ASSETS_PATH, ASSETS_PATH))
class FlatbuffersConversionData(object):
"""Holds data needed to convert a set of json files to flatbuffer binaries.
Attributes:
schema: The path to the flatbuffer schema file.
input_files: A list of input files to convert.
output_path: The path to the output directory where the converted files will
be placed.
"""
def __init__(self, schema, input_files, output_path):
"""Initializes this object's schema, input_files and output_path."""
self.schema = schema
self.input_files = input_files
self.output_path = output_path
# A list of json files and their schemas that will be converted to binary files
# by the flatbuffer compiler.
FLATBUFFERS_CONVERSION_DATA = [
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'config.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'config.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'buses.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'buses.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'sound_assets.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'sound_assets.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'character_state_machine_def.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH,
'character_state_machine_def.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'sound_collection_def.fbs'),
input_files=glob.glob(os.path.join(RAW_SOUND_PATH, '*.json')),
output_path=SOUND_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'materials.fbs'),
input_files=glob.glob(os.path.join(RAW_MATERIAL_PATH, '*.json')),
output_path=MATERIAL_PATH)
]
def processed_texture_path(path):
"""Take the path to a raw png asset and convert it to target webp path."""
return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('png', 'webp')
# PNG files to convert to webp.
PNG_TEXTURES = {
'input_files': glob.glob(os.path.join(RAW_TEXTURE_PATH, '*.png')),
'output_files': [processed_texture_path(png_path)
for png_path in glob.glob(os.path.join(RAW_TEXTURE_PATH,
'*.png'))]
}
def find_executable(name, paths):
"""Searches for a file with named `name` in the given paths and returns it."""
for path in paths:
full_path = os.path.join(path, name)
if os.path.isfile(full_path):
|
# If not found, just assume it's in the PATH.
return name
# Location of FlatBuffers compiler.
FLATC = find_executable(FLATC_EXECUTABLE_NAME, FLATBUFFERS_PATHS)
# Location of webp compression tool.
CWEBP = find_executable(CWEBP_EXECUTABLE_NAME, CWEBP_PATHS)
class BuildError(Exception):
"""Error indicating there was a problem building assets."""
def __init__(self, argv, error_code):
Exception.__init__(self)
self.argv = argv
self.error_code = error_code
def run_subprocess(argv):
process = subprocess.Popen(argv)
process.wait()
if process.returncode:
raise BuildError(argv, process.returncode)
def convert_json_to_flatbuffer_binary(json, schema, out_dir):
"""Run the flatbuffer compiler on the given json file and schema.
Args:
json: The path to the json file to convert to a flatbuffer binary.
schema: The path to the schema to use in the conversion process.
out_dir: The directory to write the flatbuffer binary.
Raises:
BuildError: Process return code was nonzero.
"""
command = [FLATC, '-o', out_dir, '-b', schema, json]
run_subprocess(command)
def convert_png_image_to_webp(png, out, quality=80):
"""Run the webp converter on the given png file.
Args:
png: The path to the png file to convert into a webp file.
out: The path of the webp to write to.
quality: The quality of the processed image, where quality is between 0
(poor) to 100 (very good). Typical value is around 80.
Raises:
BuildError: Process return code was nonzero.
"""
command = [CWEBP, '-q', str(quality), png, '-o', out]
run_subprocess(command)
def needs_rebuild(source, target):
"""Checks if the source file needs to be rebuilt.
Args:
source: The source file to be compared.
target: The target file which we may need to rebuild.
Returns:
True if the source file is newer than the target, or if the target file does
not exist.
"""
return not os.path.isfile(target) or (
os.path.getmtime(source) > os.path.getmtime(target))
def processed_json_path(path):
"""Take the path to a raw json asset and convert it to target bin path."""
return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('.json', '.bin')
def generate_flatbuffer_binaries():
"""Run the flatbuffer compiler on the all of the flatbuffer json files."""
for element in FLATBUFFERS_CONVERSION_DATA:
schema = element.schema
output_path = element.output_path
if not os.path.exists(output_path):
os.makedirs(output_path)
for json in element.input_files:
target = processed_json_path(json)
if needs_rebuild(json, target) or needs_rebuild(schema, target):
convert_json_to_flatbuffer_binary(
json, schema, output_path)
def | return full_path | conditional_block |
build_assets.py | p' as an argument. Additionally, if you would like to clean all
generated files, you can call this script with the argument 'clean'.
"""
import distutils.spawn
import glob
import os
import platform
import subprocess
import sys
# The project root directory, which is one level up from this script's
# directory.
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.path.pardir))
PREBUILTS_ROOT = os.path.abspath(os.path.join(os.path.join(PROJECT_ROOT),
os.path.pardir, os.path.pardir,
os.path.pardir, os.path.pardir,
'prebuilts'))
# Directories that may contains the FlatBuffers compiler.
FLATBUFFERS_PATHS = [
os.path.join(PROJECT_ROOT, 'bin'),
os.path.join(PROJECT_ROOT, 'bin', 'Release'),
os.path.join(PROJECT_ROOT, 'bin', 'Debug'),
]
# Directory that contains the cwebp tool.
CWEBP_BINARY_IN_PATH = distutils.spawn.find_executable('cwebp')
CWEBP_PATHS = [
os.path.join(PROJECT_ROOT, 'bin'),
os.path.join(PROJECT_ROOT, 'bin', 'Release'),
os.path.join(PROJECT_ROOT, 'bin', 'Debug'),
os.path.join(PREBUILTS_ROOT, 'libwebp',
'%s-x86' % platform.system().lower(),
'libwebp-0.4.1-%s-x86-32' % platform.system().lower(), 'bin'),
os.path.dirname(CWEBP_BINARY_IN_PATH) if CWEBP_BINARY_IN_PATH else '',
]
# Directory to place processed assets.
ASSETS_PATH = os.path.join(PROJECT_ROOT, 'assets')
# Directory where unprocessed assets can be found.
RAW_ASSETS_PATH = os.path.join(PROJECT_ROOT, 'src', 'rawassets')
# Directory where processed sound flatbuffer data can be found.
SOUND_PATH = os.path.join(ASSETS_PATH, 'sounds')
# Directory where unprocessed sound flatbuffer data can be found.
RAW_SOUND_PATH = os.path.join(RAW_ASSETS_PATH, 'sounds')
# Directory where processed material flatbuffer data can be found.
MATERIAL_PATH = os.path.join(ASSETS_PATH, 'materials')
# Directory where unprocessed material flatbuffer data can be found.
RAW_MATERIAL_PATH = os.path.join(RAW_ASSETS_PATH, 'materials')
# Directory where processed textures can be found.
TEXTURE_PATH = os.path.join(ASSETS_PATH, 'textures')
# Directory where unprocessed textures can be found.
RAW_TEXTURE_PATH = os.path.join(RAW_ASSETS_PATH, 'textures')
# Directory where unprocessed assets can be found.
SCHEMA_PATH = os.path.join(PROJECT_ROOT, 'src', 'flatbufferschemas')
# Windows uses the .exe extension on executables.
EXECUTABLE_EXTENSION = '.exe' if platform.system() == 'Windows' else ''
# Name of the flatbuffer executable.
FLATC_EXECUTABLE_NAME = 'flatc' + EXECUTABLE_EXTENSION
# Name of the cwebp executable.
CWEBP_EXECUTABLE_NAME = 'cwebp' + EXECUTABLE_EXTENSION
# What level of quality we want to apply to the webp files.
# Ranges from 0 to 100.
WEBP_QUALITY = 90
def processed_json_dir(path):
"""Take the path to a raw json asset and convert it to target directory."""
return os.path.dirname(path.replace(RAW_ASSETS_PATH, ASSETS_PATH))
class FlatbuffersConversionData(object):
"""Holds data needed to convert a set of json files to flatbuffer binaries.
Attributes:
schema: The path to the flatbuffer schema file.
input_files: A list of input files to convert.
output_path: The path to the output directory where the converted files will
be placed.
"""
def __init__(self, schema, input_files, output_path):
"""Initializes this object's schema, input_files and output_path."""
self.schema = schema
self.input_files = input_files
self.output_path = output_path | # A list of json files and their schemas that will be converted to binary files
# by the flatbuffer compiler.
FLATBUFFERS_CONVERSION_DATA = [
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'config.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'config.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'buses.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'buses.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'sound_assets.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'sound_assets.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'character_state_machine_def.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH,
'character_state_machine_def.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'sound_collection_def.fbs'),
input_files=glob.glob(os.path.join(RAW_SOUND_PATH, '*.json')),
output_path=SOUND_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'materials.fbs'),
input_files=glob.glob(os.path.join(RAW_MATERIAL_PATH, '*.json')),
output_path=MATERIAL_PATH)
]
def processed_texture_path(path):
"""Take the path to a raw png asset and convert it to target webp path."""
return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('png', 'webp')
# PNG files to convert to webp.
PNG_TEXTURES = {
'input_files': glob.glob(os.path.join(RAW_TEXTURE_PATH, '*.png')),
'output_files': [processed_texture_path(png_path)
for png_path in glob.glob(os.path.join(RAW_TEXTURE_PATH,
'*.png'))]
}
def find_executable(name, paths):
"""Searches for a file with named `name` in the given paths and returns it."""
for path in paths:
full_path = os.path.join(path, name)
if os.path.isfile(full_path):
return full_path
# If not found, just assume it's in the PATH.
return name
# Location of FlatBuffers compiler.
FLATC = find_executable(FLATC_EXECUTABLE_NAME, FLATBUFFERS_PATHS)
# Location of webp compression tool.
CWEBP = find_executable(CWEBP_EXECUTABLE_NAME, CWEBP_PATHS)
class BuildError(Exception):
"""Error indicating there was a problem building assets."""
def __init__(self, argv, error_code):
Exception.__init__(self)
self.argv = argv
self.error_code = error_code
def run_subprocess(argv):
process = subprocess.Popen(argv)
process.wait()
if process.returncode:
raise BuildError(argv, process.returncode)
def convert_json_to_flatbuffer_binary(json, schema, out_dir):
"""Run the flatbuffer compiler on the given json file and schema.
Args:
json: The path to the json file to convert to a flatbuffer binary.
schema: The path to the schema to use in the conversion process.
out_dir: The directory to write the flatbuffer binary.
Raises:
BuildError: Process return code was nonzero.
"""
command = [FLATC, '-o', out_dir, '-b', schema, json]
run_subprocess(command)
def convert_png_image_to_webp(png, out, quality=80):
"""Run the webp converter on the given png file.
Args:
png: The path to the png file to convert into a webp file.
out: The path of the webp to write to.
quality: The quality of the processed image, where quality is between 0
(poor) to 100 (very good). Typical value is around 80.
Raises:
BuildError: Process return code was nonzero.
"""
command = [CWEBP, '-q', str(quality), png, '-o', out]
run_subprocess(command)
def needs_rebuild(source, target):
"""Checks if the source file needs to be rebuilt.
Args:
source: The source file to be compared.
target: The target file which we may need to rebuild.
Returns:
True if the source file is newer than the target, or if the target file does
not exist.
"""
return not os.path.isfile(target) or (
os.path.getmtime(source) > os.path.getmtime(target))
def processed_json_path(path):
"""Take the path to a raw json asset and convert it to target bin path."""
return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('.json', '.bin')
def generate_flatbuffer_binaries():
"""Run the flatbuffer compiler on the all of the flatbuffer json files."""
for element in FLATBUFFERS_CONVERSION_DATA:
schema = element.schema
output_path = element.output_path
if not os.path.exists(output_path):
os.makedirs(output_path)
for json in element.input_files:
target = processed_json_path(json)
if needs_rebuild(json, target) or needs_rebuild(schema, target):
convert_json_to_flatbuffer_binary(
json, schema, output_path)
def | random_line_split |
|
build_assets.py | p' as an argument. Additionally, if you would like to clean all
generated files, you can call this script with the argument 'clean'.
"""
import distutils.spawn
import glob
import os
import platform
import subprocess
import sys
# The project root directory, which is one level up from this script's
# directory.
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.path.pardir))
PREBUILTS_ROOT = os.path.abspath(os.path.join(os.path.join(PROJECT_ROOT),
os.path.pardir, os.path.pardir,
os.path.pardir, os.path.pardir,
'prebuilts'))
# Directories that may contains the FlatBuffers compiler.
FLATBUFFERS_PATHS = [
os.path.join(PROJECT_ROOT, 'bin'),
os.path.join(PROJECT_ROOT, 'bin', 'Release'),
os.path.join(PROJECT_ROOT, 'bin', 'Debug'),
]
# Directory that contains the cwebp tool.
CWEBP_BINARY_IN_PATH = distutils.spawn.find_executable('cwebp')
CWEBP_PATHS = [
os.path.join(PROJECT_ROOT, 'bin'),
os.path.join(PROJECT_ROOT, 'bin', 'Release'),
os.path.join(PROJECT_ROOT, 'bin', 'Debug'),
os.path.join(PREBUILTS_ROOT, 'libwebp',
'%s-x86' % platform.system().lower(),
'libwebp-0.4.1-%s-x86-32' % platform.system().lower(), 'bin'),
os.path.dirname(CWEBP_BINARY_IN_PATH) if CWEBP_BINARY_IN_PATH else '',
]
# Directory to place processed assets.
ASSETS_PATH = os.path.join(PROJECT_ROOT, 'assets')
# Directory where unprocessed assets can be found.
RAW_ASSETS_PATH = os.path.join(PROJECT_ROOT, 'src', 'rawassets')
# Directory where processed sound flatbuffer data can be found.
SOUND_PATH = os.path.join(ASSETS_PATH, 'sounds')
# Directory where unprocessed sound flatbuffer data can be found.
RAW_SOUND_PATH = os.path.join(RAW_ASSETS_PATH, 'sounds')
# Directory where processed material flatbuffer data can be found.
MATERIAL_PATH = os.path.join(ASSETS_PATH, 'materials')
# Directory where unprocessed material flatbuffer data can be found.
RAW_MATERIAL_PATH = os.path.join(RAW_ASSETS_PATH, 'materials')
# Directory where processed textures can be found.
TEXTURE_PATH = os.path.join(ASSETS_PATH, 'textures')
# Directory where unprocessed textures can be found.
RAW_TEXTURE_PATH = os.path.join(RAW_ASSETS_PATH, 'textures')
# Directory where unprocessed assets can be found.
SCHEMA_PATH = os.path.join(PROJECT_ROOT, 'src', 'flatbufferschemas')
# Windows uses the .exe extension on executables.
EXECUTABLE_EXTENSION = '.exe' if platform.system() == 'Windows' else ''
# Name of the flatbuffer executable.
FLATC_EXECUTABLE_NAME = 'flatc' + EXECUTABLE_EXTENSION
# Name of the cwebp executable.
CWEBP_EXECUTABLE_NAME = 'cwebp' + EXECUTABLE_EXTENSION
# What level of quality we want to apply to the webp files.
# Ranges from 0 to 100.
WEBP_QUALITY = 90
def processed_json_dir(path):
"""Take the path to a raw json asset and convert it to target directory."""
return os.path.dirname(path.replace(RAW_ASSETS_PATH, ASSETS_PATH))
class FlatbuffersConversionData(object):
|
# A list of json files and their schemas that will be converted to binary files
# by the flatbuffer compiler.
FLATBUFFERS_CONVERSION_DATA = [
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'config.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'config.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'buses.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'buses.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'sound_assets.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'sound_assets.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'character_state_machine_def.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH,
'character_state_machine_def.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'sound_collection_def.fbs'),
input_files=glob.glob(os.path.join(RAW_SOUND_PATH, '*.json')),
output_path=SOUND_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'materials.fbs'),
input_files=glob.glob(os.path.join(RAW_MATERIAL_PATH, '*.json')),
output_path=MATERIAL_PATH)
]
def processed_texture_path(path):
"""Take the path to a raw png asset and convert it to target webp path."""
return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('png', 'webp')
# PNG files to convert to webp.
PNG_TEXTURES = {
'input_files': glob.glob(os.path.join(RAW_TEXTURE_PATH, '*.png')),
'output_files': [processed_texture_path(png_path)
for png_path in glob.glob(os.path.join(RAW_TEXTURE_PATH,
'*.png'))]
}
def find_executable(name, paths):
"""Searches for a file with named `name` in the given paths and returns it."""
for path in paths:
full_path = os.path.join(path, name)
if os.path.isfile(full_path):
return full_path
# If not found, just assume it's in the PATH.
return name
# Location of FlatBuffers compiler.
FLATC = find_executable(FLATC_EXECUTABLE_NAME, FLATBUFFERS_PATHS)
# Location of webp compression tool.
CWEBP = find_executable(CWEBP_EXECUTABLE_NAME, CWEBP_PATHS)
class BuildError(Exception):
"""Error indicating there was a problem building assets."""
def __init__(self, argv, error_code):
Exception.__init__(self)
self.argv = argv
self.error_code = error_code
def run_subprocess(argv):
process = subprocess.Popen(argv)
process.wait()
if process.returncode:
raise BuildError(argv, process.returncode)
def convert_json_to_flatbuffer_binary(json, schema, out_dir):
"""Run the flatbuffer compiler on the given json file and schema.
Args:
json: The path to the json file to convert to a flatbuffer binary.
schema: The path to the schema to use in the conversion process.
out_dir: The directory to write the flatbuffer binary.
Raises:
BuildError: Process return code was nonzero.
"""
command = [FLATC, '-o', out_dir, '-b', schema, json]
run_subprocess(command)
def convert_png_image_to_webp(png, out, quality=80):
"""Run the webp converter on the given png file.
Args:
png: The path to the png file to convert into a webp file.
out: The path of the webp to write to.
quality: The quality of the processed image, where quality is between 0
(poor) to 100 (very good). Typical value is around 80.
Raises:
BuildError: Process return code was nonzero.
"""
command = [CWEBP, '-q', str(quality), png, '-o', out]
run_subprocess(command)
def needs_rebuild(source, target):
"""Checks if the source file needs to be rebuilt.
Args:
source: The source file to be compared.
target: The target file which we may need to rebuild.
Returns:
True if the source file is newer than the target, or if the target file does
not exist.
"""
return not os.path.isfile(target) or (
os.path.getmtime(source) > os.path.getmtime(target))
def processed_json_path(path):
"""Take the path to a raw json asset and convert it to target bin path."""
return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('.json', '.bin')
def generate_flatbuffer_binaries():
"""Run the flatbuffer compiler on the all of the flatbuffer json files."""
for element in FLATBUFFERS_CONVERSION_DATA:
schema = element.schema
output_path = element.output_path
if not os.path.exists(output_path):
os.makedirs(output_path)
for json in element.input_files:
target = processed_json_path(json)
if needs_rebuild(json, target) or needs_rebuild(schema, target):
convert_json_to_flatbuffer_binary(
json, schema, output_path)
def | """Holds data needed to convert a set of json files to flatbuffer binaries.
Attributes:
schema: The path to the flatbuffer schema file.
input_files: A list of input files to convert.
output_path: The path to the output directory where the converted files will
be placed.
"""
def __init__(self, schema, input_files, output_path):
"""Initializes this object's schema, input_files and output_path."""
self.schema = schema
self.input_files = input_files
self.output_path = output_path | identifier_body |
build_assets.py | 'Debug'),
os.path.join(PREBUILTS_ROOT, 'libwebp',
'%s-x86' % platform.system().lower(),
'libwebp-0.4.1-%s-x86-32' % platform.system().lower(), 'bin'),
os.path.dirname(CWEBP_BINARY_IN_PATH) if CWEBP_BINARY_IN_PATH else '',
]
# Directory to place processed assets.
ASSETS_PATH = os.path.join(PROJECT_ROOT, 'assets')
# Directory where unprocessed assets can be found.
RAW_ASSETS_PATH = os.path.join(PROJECT_ROOT, 'src', 'rawassets')
# Directory where processed sound flatbuffer data can be found.
SOUND_PATH = os.path.join(ASSETS_PATH, 'sounds')
# Directory where unprocessed sound flatbuffer data can be found.
RAW_SOUND_PATH = os.path.join(RAW_ASSETS_PATH, 'sounds')
# Directory where processed material flatbuffer data can be found.
MATERIAL_PATH = os.path.join(ASSETS_PATH, 'materials')
# Directory where unprocessed material flatbuffer data can be found.
RAW_MATERIAL_PATH = os.path.join(RAW_ASSETS_PATH, 'materials')
# Directory where processed textures can be found.
TEXTURE_PATH = os.path.join(ASSETS_PATH, 'textures')
# Directory where unprocessed textures can be found.
RAW_TEXTURE_PATH = os.path.join(RAW_ASSETS_PATH, 'textures')
# Directory where unprocessed assets can be found.
SCHEMA_PATH = os.path.join(PROJECT_ROOT, 'src', 'flatbufferschemas')
# Windows uses the .exe extension on executables.
EXECUTABLE_EXTENSION = '.exe' if platform.system() == 'Windows' else ''
# Name of the flatbuffer executable.
FLATC_EXECUTABLE_NAME = 'flatc' + EXECUTABLE_EXTENSION
# Name of the cwebp executable.
CWEBP_EXECUTABLE_NAME = 'cwebp' + EXECUTABLE_EXTENSION
# What level of quality we want to apply to the webp files.
# Ranges from 0 to 100.
WEBP_QUALITY = 90
def processed_json_dir(path):
"""Take the path to a raw json asset and convert it to target directory."""
return os.path.dirname(path.replace(RAW_ASSETS_PATH, ASSETS_PATH))
class FlatbuffersConversionData(object):
"""Holds data needed to convert a set of json files to flatbuffer binaries.
Attributes:
schema: The path to the flatbuffer schema file.
input_files: A list of input files to convert.
output_path: The path to the output directory where the converted files will
be placed.
"""
def __init__(self, schema, input_files, output_path):
"""Initializes this object's schema, input_files and output_path."""
self.schema = schema
self.input_files = input_files
self.output_path = output_path
# A list of json files and their schemas that will be converted to binary files
# by the flatbuffer compiler.
FLATBUFFERS_CONVERSION_DATA = [
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'config.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'config.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'buses.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'buses.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'sound_assets.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'sound_assets.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'character_state_machine_def.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH,
'character_state_machine_def.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'sound_collection_def.fbs'),
input_files=glob.glob(os.path.join(RAW_SOUND_PATH, '*.json')),
output_path=SOUND_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'materials.fbs'),
input_files=glob.glob(os.path.join(RAW_MATERIAL_PATH, '*.json')),
output_path=MATERIAL_PATH)
]
def processed_texture_path(path):
"""Take the path to a raw png asset and convert it to target webp path."""
return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('png', 'webp')
# PNG files to convert to webp.
PNG_TEXTURES = {
'input_files': glob.glob(os.path.join(RAW_TEXTURE_PATH, '*.png')),
'output_files': [processed_texture_path(png_path)
for png_path in glob.glob(os.path.join(RAW_TEXTURE_PATH,
'*.png'))]
}
def find_executable(name, paths):
"""Searches for a file with named `name` in the given paths and returns it."""
for path in paths:
full_path = os.path.join(path, name)
if os.path.isfile(full_path):
return full_path
# If not found, just assume it's in the PATH.
return name
# Location of FlatBuffers compiler.
FLATC = find_executable(FLATC_EXECUTABLE_NAME, FLATBUFFERS_PATHS)
# Location of webp compression tool.
CWEBP = find_executable(CWEBP_EXECUTABLE_NAME, CWEBP_PATHS)
class BuildError(Exception):
"""Error indicating there was a problem building assets."""
def __init__(self, argv, error_code):
Exception.__init__(self)
self.argv = argv
self.error_code = error_code
def run_subprocess(argv):
process = subprocess.Popen(argv)
process.wait()
if process.returncode:
raise BuildError(argv, process.returncode)
def convert_json_to_flatbuffer_binary(json, schema, out_dir):
"""Run the flatbuffer compiler on the given json file and schema.
Args:
json: The path to the json file to convert to a flatbuffer binary.
schema: The path to the schema to use in the conversion process.
out_dir: The directory to write the flatbuffer binary.
Raises:
BuildError: Process return code was nonzero.
"""
command = [FLATC, '-o', out_dir, '-b', schema, json]
run_subprocess(command)
def convert_png_image_to_webp(png, out, quality=80):
"""Run the webp converter on the given png file.
Args:
png: The path to the png file to convert into a webp file.
out: The path of the webp to write to.
quality: The quality of the processed image, where quality is between 0
(poor) to 100 (very good). Typical value is around 80.
Raises:
BuildError: Process return code was nonzero.
"""
command = [CWEBP, '-q', str(quality), png, '-o', out]
run_subprocess(command)
def needs_rebuild(source, target):
"""Checks if the source file needs to be rebuilt.
Args:
source: The source file to be compared.
target: The target file which we may need to rebuild.
Returns:
True if the source file is newer than the target, or if the target file does
not exist.
"""
return not os.path.isfile(target) or (
os.path.getmtime(source) > os.path.getmtime(target))
def processed_json_path(path):
"""Take the path to a raw json asset and convert it to target bin path."""
return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('.json', '.bin')
def generate_flatbuffer_binaries():
"""Run the flatbuffer compiler on the all of the flatbuffer json files."""
for element in FLATBUFFERS_CONVERSION_DATA:
schema = element.schema
output_path = element.output_path
if not os.path.exists(output_path):
os.makedirs(output_path)
for json in element.input_files:
target = processed_json_path(json)
if needs_rebuild(json, target) or needs_rebuild(schema, target):
convert_json_to_flatbuffer_binary(
json, schema, output_path)
def generate_webp_textures():
"""Run the webp converter on off of the png files."""
input_files = PNG_TEXTURES['input_files']
output_files = PNG_TEXTURES['output_files']
if not os.path.exists(TEXTURE_PATH):
os.makedirs(TEXTURE_PATH)
for png, out in zip(input_files, output_files):
if needs_rebuild(png, out):
convert_png_image_to_webp(png, out, WEBP_QUALITY)
def clean_webp_textures():
"""Delete all the processed webp textures."""
for webp in PNG_TEXTURES['output_files']:
if os.path.isfile(webp):
os.remove(webp)
def clean_flatbuffer_binaries():
"""Delete all the processed flatbuffer binaries."""
for element in FLATBUFFERS_CONVERSION_DATA:
for json in element.input_files:
path = processed_json_path(json)
if os.path.isfile(path):
os.remove(path)
def clean():
"""Delete all the processed files."""
clean_flatbuffer_binaries()
clean_webp_textures()
def handle_build_error(error):
"""Prints an error message to stderr for BuildErrors."""
sys.stderr.write('Error running command `%s`. Returned %s.\n' % (
' '.join(error.argv), str(error.error_code)))
def | main | identifier_name |
|
bcfw_diffrac.py | (feats, block_idx, memory_mode, bias_value=-1.0):
"""Get feature for a given block."""
if memory_mode == 'RAM':
feat = feats[block_idx]
elif memory_mode == 'disk':
feat = np.load(feats[block_idx])
else:
raise ValueError(
'Memory mode {} is not supported.'.format(memory_mode))
if bias_value > 0.0:
feat = np.append(
feat, bias_value * np.ones([feat.shape[0], 1]), axis=1)
return feat
def get_p_block(p_matrix, block_idx, memory_mode):
if memory_mode == 'RAM':
return p_matrix[block_idx]
elif memory_mode == 'disk':
return np.load(p_matrix[block_idx])
else:
raise ValueError(
'Memory mode {} is not supported.'.format(memory_mode))
def compute_p_matrix(feats, alpha, memory_mode, bias_value=-1.0):
"""Precompute the P dictionnary matrix."""
_, d = np.shape(
get_feat_block(feats, 0, memory_mode, bias_value=bias_value))
# Compute X^TX
print('Computing xtx...')
x_t_x = np.zeros([d, d])
N = 0
for i in tqdm(range(len(feats))):
x = get_feat_block(feats, i, memory_mode, bias_value=bias_value)
x_t_x += np.dot(np.transpose(x), x)
N += x.shape[0]
# Compute P
p_matrix = []
print('Inverting big matrix...')
inv_mat = np.linalg.inv(x_t_x + N * alpha * np.eye(d))
print('Computing P matrix by block...')
for i in tqdm(range(len(feats))):
x = get_feat_block(feats, i, memory_mode, bias_value=bias_value)
sol = np.dot(inv_mat, np.transpose(x))
if memory_mode == 'RAM':
p_matrix.append(np.array(sol))
else:
path_x = feats[i]
base_path, filename = os.path.split(path_x)
np.save(os.path.join(base_path, 'P_{}'.format(filename)), sol)
p_matrix.append(path_x)
return p_matrix, N
def compute_weights(p_matrix, asgn, memory_mode):
d, _ = np.shape(get_p_block(p_matrix, 0, memory_mode))
_, k = np.shape(asgn[0])
weights = np.zeros([d, k])
print('Computing weights from scratch...')
for i in tqdm(range(len(p_matrix))):
weights += np.dot(get_p_block(p_matrix, i, memory_mode), asgn[i])
return weights
def compute_obj(x, y, weights, n_feats):
return 1.0 / n_feats * matrix_norm(np.dot(x, weights) - y, ord='fro')**2
def compute_grad(x, y, weights, n_feats):
return 1.0 / n_feats * (y - np.dot(x, weights))
def compute_gap(x,
y,
weights,
n_feats,
cstr,
cstr_solver,
opt_y=None,
grad_y=None):
# Check if we need to call the oracle.
if opt_y is None:
grad_y = compute_grad(x, y, weights, n_feats)
opt_y = cstr_solver.solve(cstr, grad_y)
gap = -np.multiply(opt_y - y, grad_y).sum()
return gap
def sample_block(gaps, block_sampling):
if block_sampling == 'uniform':
return np.random.randint(0, len(gaps), 1)[0]
elif block_sampling == 'gap_sampling':
if not np.all(gaps >= 0):
print('Warning: some gaps are negative block {}, value :{}'.format(
gaps.argmin(), gaps.min()))
gaps[gaps < 0] = 0.00000001
gap_prob = gaps / gaps.sum()
return np.random.choice(len(gaps), 1, p=gap_prob)[0]
def display_information(iter,
max_iter,
gaps,
eval_metric,
objective_value=None,
verbose='silent',
prev_time=-1,
prev_global_time=-1):
"""Display information about the training."""
if objective_value is None:
objective_value = []
if verbose in ['normal', 'heavy']:
string_display = 'Iteration {0:05d}/{1:05d}, Gap sum: {2:.4E}'.format(
iter, max_iter, gaps.sum())
new_time = time.time()
if prev_time > 0:
diff_time = int(round(new_time - prev_time))
string_display += ' ({:d} s)'.format(diff_time)
if prev_global_time > 0:
diff_time = int(round(new_time - prev_global_time))
string_display += ' (Glob. {:d} s)'.format(diff_time)
if eval_metric >= 0:
string_display += ', Eval metric: {:.2f}'.format(eval_metric)
if objective_value:
string_display += ', Objective: '
string_display += ','.join([
'{}: {:.4E}'.format(key, value)
for key, value in objective_value.items()
])
print(string_display)
def save_asgn_block(path_save_asgn, block_idx, asgn, t):
np.save(
os.path.join(path_save_asgn, '{0}_{1:05d}.npy'.format(block_idx, t)),
asgn[block_idx])
def save_xw_block(path_save_asgn, block_idx, x, weights, t):
np.save(
os.path.join(path_save_asgn, 'xw_{0}_{1:05d}.npy'.format(block_idx,
t)),
np.dot(x, weights))
def save_gt_block(path_save_asgn, block_idx, gts):
np.save(
os.path.join(path_save_asgn, '{}_gt.npy'.format(block_idx)),
gts[block_idx])
def solver(feats,
asgn,
cstrs,
cstrs_solver,
gts=None,
eval_function=None,
rounding_function=None,
alpha=1e-4,
memory_mode='RAM',
bias_value=-1.0,
n_iterations=10000,
block_sampling='uniform',
verbose='silent',
gap_frequency=2000,
eval_frequency=500,
verbose_frequency=250,
objective_frequency=250,
path_save_asgn=None,
validation_info=None):
"""Main solver for DiffracBCFW.
Args:
feats: Input features as a list (one entry per block).
asgn: Assignment variables as a list (one entry per block). This provides
the initialization of the system.
cstrs: Input constraints as a dictionary (one entry per block).
cstrs_solver: Method that takes as input a gradient for a block and a cstrs and then
returns the LP solution.
gts: A ground truth can be specified if you wish to evaluate your solution.
eval_function: an eval function method can be provided.
rounding_function: rounding function.
alpha: Value of the regularization parameter (lambda in the paper).
memory_mode: `disk` (features are stored in disk) or `RAM` (features are in RAM).
bias_value: Value to add for the bias (if negative no bias is added to the features).
n_iterations: Number of iterations of the solver.
block_sampling: Method for sampling block.
verbose: `silent`, `normal`, `heavy`.
gap_frequency: frequency to recompute all the gaps.
eval_frequency: frequency to perform evaluation.
verbose_frequency: frequency to print info.
objective_frequency: frequency to compute objective (only used if positive).
path_save_asgn: If not None save asgn at path_save_asgn. None by default.
validation_info: If not None perform evaluation on validation
"""
compute_objective = False
objective_value = None
if objective_frequency > 0:
compute_objective = True
save_asgn = False
save_ids = []
if path_save_asgn is not None:
if not os.path.exists(path_save_asgn):
os.makedirs(path_save_asgn)
# Monitor evolution of asgn during optim on a subset of samples.
save_asgn = True
n_save_asgn = min(20, len(asgn))
save_ids = np.random.choice(len(asgn), n_save_asgn, replace=False)
# Pre-compute the P matrix.
p_matrix, n_feats = compute_p_matrix(
feats, alpha, memory_mode, bias_value=bias_value)
# Compute W.
weights = compute_weights(p_matrix, asgn, memory_mode=memory_mode)
# Init the gaps.
gaps = np.zeros(len(feats))
print('Computing init gaps...')
for block_idx in tqdm(range(len(feats))):
x = get_feat_block(
feats, block_idx, memory_mode, bias_value=bias_value)
gaps[block_idx] = compute_gap(x, asgn[block_idx], weights, n_feats,
cstrs[block_idx], cstrs_solver)
if save_asgn and block_idx in save_ids:
save_asgn_block(path_save_asgn, block_idx, asgn, 0)
save_xw_block(path_save_asgn, block_idx | get_feat_block | identifier_name |
|
bcfw_diffrac.py | = gaps / gaps.sum()
return np.random.choice(len(gaps), 1, p=gap_prob)[0]
def display_information(iter,
max_iter,
gaps,
eval_metric,
objective_value=None,
verbose='silent',
prev_time=-1,
prev_global_time=-1):
"""Display information about the training."""
if objective_value is None:
objective_value = []
if verbose in ['normal', 'heavy']:
string_display = 'Iteration {0:05d}/{1:05d}, Gap sum: {2:.4E}'.format(
iter, max_iter, gaps.sum())
new_time = time.time()
if prev_time > 0:
diff_time = int(round(new_time - prev_time))
string_display += ' ({:d} s)'.format(diff_time)
if prev_global_time > 0:
diff_time = int(round(new_time - prev_global_time))
string_display += ' (Glob. {:d} s)'.format(diff_time)
if eval_metric >= 0:
string_display += ', Eval metric: {:.2f}'.format(eval_metric)
if objective_value:
string_display += ', Objective: '
string_display += ','.join([
'{}: {:.4E}'.format(key, value)
for key, value in objective_value.items()
])
print(string_display)
def save_asgn_block(path_save_asgn, block_idx, asgn, t):
np.save(
os.path.join(path_save_asgn, '{0}_{1:05d}.npy'.format(block_idx, t)),
asgn[block_idx])
def save_xw_block(path_save_asgn, block_idx, x, weights, t):
np.save(
os.path.join(path_save_asgn, 'xw_{0}_{1:05d}.npy'.format(block_idx,
t)),
np.dot(x, weights))
def save_gt_block(path_save_asgn, block_idx, gts):
np.save(
os.path.join(path_save_asgn, '{}_gt.npy'.format(block_idx)),
gts[block_idx])
def solver(feats,
asgn,
cstrs,
cstrs_solver,
gts=None,
eval_function=None,
rounding_function=None,
alpha=1e-4,
memory_mode='RAM',
bias_value=-1.0,
n_iterations=10000,
block_sampling='uniform',
verbose='silent',
gap_frequency=2000,
eval_frequency=500,
verbose_frequency=250,
objective_frequency=250,
path_save_asgn=None,
validation_info=None):
"""Main solver for DiffracBCFW.
Args:
feats: Input features as a list (one entry per block).
asgn: Assignment variables as a list (one entry per block). This provides
the initialization of the system.
cstrs: Input constraints as a dictionary (one entry per block).
cstrs_solver: Method that takes as input a gradient for a block and a cstrs and then
returns the LP solution.
gts: A ground truth can be specified if you wish to evaluate your solution.
eval_function: an eval function method can be provided.
rounding_function: rounding function.
alpha: Value of the regularization parameter (lambda in the paper).
memory_mode: `disk` (features are stored in disk) or `RAM` (features are in RAM).
bias_value: Value to add for the bias (if negative no bias is added to the features).
n_iterations: Number of iterations of the solver.
block_sampling: Method for sampling block.
verbose: `silent`, `normal`, `heavy`.
gap_frequency: frequency to recompute all the gaps.
eval_frequency: frequency to perform evaluation.
verbose_frequency: frequency to print info.
objective_frequency: frequency to compute objective (only used if positive).
path_save_asgn: If not None save asgn at path_save_asgn. None by default.
validation_info: If not None perform evaluation on validation
"""
compute_objective = False
objective_value = None
if objective_frequency > 0:
compute_objective = True
save_asgn = False
save_ids = []
if path_save_asgn is not None:
if not os.path.exists(path_save_asgn):
os.makedirs(path_save_asgn)
# Monitor evolution of asgn during optim on a subset of samples.
save_asgn = True
n_save_asgn = min(20, len(asgn))
save_ids = np.random.choice(len(asgn), n_save_asgn, replace=False)
# Pre-compute the P matrix.
p_matrix, n_feats = compute_p_matrix(
feats, alpha, memory_mode, bias_value=bias_value)
# Compute W.
weights = compute_weights(p_matrix, asgn, memory_mode=memory_mode)
# Init the gaps.
gaps = np.zeros(len(feats))
print('Computing init gaps...')
for block_idx in tqdm(range(len(feats))):
x = get_feat_block(
feats, block_idx, memory_mode, bias_value=bias_value)
gaps[block_idx] = compute_gap(x, asgn[block_idx], weights, n_feats,
cstrs[block_idx], cstrs_solver)
if save_asgn and block_idx in save_ids:
save_asgn_block(path_save_asgn, block_idx, asgn, 0)
save_xw_block(path_save_asgn, block_idx, x, weights, 0)
save_gt_block(path_save_asgn, block_idx, gts)
print('Init gap: {0:4E}, starting the optimization...'.format(gaps.sum()))
eval_metric = -1.0
prev_time = time.time() # init time of iterations
prev_global_time = prev_time
for t in range(n_iterations):
if eval_frequency > 0 and t % eval_frequency == 0:
# Evaluation.
if eval_function is not None and gts is not None:
print('Performing evaluation...')
eval_metric = eval_function.evaluate(asgn, gts, weights, feats,
rounding_function, cstrs)
if validation_info is not None:
gts_val = validation_info['gts']
feats_val = validation_info['feats']
eval_function.evaluate(None, gts_val, weights, feats_val,
rounding_function, None)
else:
eval_metric = -1.0
if compute_objective and t % objective_frequency == 0:
print('Computing objective...')
objective_value = {}
# Compute the diffrac objective.
dfrac_obj = 0.0
# Data dependent term: 1.0 / N * ||X * W - Y||_2^2
for block_idx in range(len(feats)):
x = get_feat_block(
feats, block_idx, memory_mode, bias_value=bias_value)
dfrac_obj += compute_obj(x, asgn[block_idx], weights, n_feats)
# Regularization term: \alpha * || W ||_2^2
dfrac_obj += alpha * matrix_norm(weights, ord='fro')**2
objective_value['dfrac'] = dfrac_obj
# Print information.
if t % verbose_frequency == 0:
display_information(t, n_iterations, gaps, eval_metric,
objective_value, verbose, prev_time, prev_global_time)
prev_time = time.time()
# Sample a block.
block_idx = sample_block(gaps, block_sampling)
# Compute gradient.
x = get_feat_block(
feats, block_idx, memory_mode, bias_value=bias_value)
y = asgn[block_idx]
grad_y = compute_grad(x, y, weights, n_feats)
opt_y = cstrs_solver.solve(cstrs[block_idx], grad_y)
gaps[block_idx] = compute_gap(x, y, weights, n_feats,
cstrs[block_idx], cstrs_solver,
opt_y, grad_y)
# Step size computation.
p = get_p_block(p_matrix, block_idx, memory_mode)
dir_y = opt_y - y
gamma_n = gaps[block_idx]
gamma_d = 1.0 / n_feats * np.multiply(
dir_y, dir_y - np.linalg.multi_dot([x, p, dir_y])).sum()
gamma = min(1.0, gamma_n / gamma_d)
# gamma should always be positive.
if gamma < 0:
print 'Warning: gamma = {}, gap_i = {}'.format(
gamma, gaps[block_idx])
gamma = 0.0
# Update variables.
asgn[block_idx] += gamma * dir_y
weights += gamma * np.dot(p, dir_y)
if save_asgn and block_idx in save_ids:
save_asgn_block(path_save_asgn, block_idx, asgn, t)
save_xw_block(path_save_asgn, block_idx, x, weights, t)
# Update gaps if needed.
if (t + 1) % gap_frequency == 0:
print('Recomputing gaps...')
for block_idx in tqdm(range(len(feats))):
| x = get_feat_block(
feats, block_idx, memory_mode, bias_value=bias_value)
gaps[block_idx] = compute_gap(x, asgn[block_idx], weights,
n_feats, cstrs[block_idx],
cstrs_solver) | conditional_block |
|
bcfw_diffrac.py | bias_value=bias_value))
# Compute X^TX
print('Computing xtx...')
x_t_x = np.zeros([d, d])
N = 0
for i in tqdm(range(len(feats))):
x = get_feat_block(feats, i, memory_mode, bias_value=bias_value)
x_t_x += np.dot(np.transpose(x), x)
N += x.shape[0]
# Compute P
p_matrix = []
print('Inverting big matrix...')
inv_mat = np.linalg.inv(x_t_x + N * alpha * np.eye(d))
print('Computing P matrix by block...')
for i in tqdm(range(len(feats))):
x = get_feat_block(feats, i, memory_mode, bias_value=bias_value)
sol = np.dot(inv_mat, np.transpose(x))
if memory_mode == 'RAM':
p_matrix.append(np.array(sol))
else:
path_x = feats[i]
base_path, filename = os.path.split(path_x)
np.save(os.path.join(base_path, 'P_{}'.format(filename)), sol)
p_matrix.append(path_x)
return p_matrix, N
def compute_weights(p_matrix, asgn, memory_mode):
d, _ = np.shape(get_p_block(p_matrix, 0, memory_mode))
_, k = np.shape(asgn[0])
weights = np.zeros([d, k])
print('Computing weights from scratch...')
for i in tqdm(range(len(p_matrix))):
weights += np.dot(get_p_block(p_matrix, i, memory_mode), asgn[i])
return weights
def compute_obj(x, y, weights, n_feats):
return 1.0 / n_feats * matrix_norm(np.dot(x, weights) - y, ord='fro')**2
def compute_grad(x, y, weights, n_feats):
return 1.0 / n_feats * (y - np.dot(x, weights))
def compute_gap(x,
y,
weights,
n_feats,
cstr,
cstr_solver,
opt_y=None,
grad_y=None):
# Check if we need to call the oracle.
if opt_y is None:
grad_y = compute_grad(x, y, weights, n_feats)
opt_y = cstr_solver.solve(cstr, grad_y)
gap = -np.multiply(opt_y - y, grad_y).sum()
return gap
def sample_block(gaps, block_sampling):
if block_sampling == 'uniform':
return np.random.randint(0, len(gaps), 1)[0]
elif block_sampling == 'gap_sampling':
if not np.all(gaps >= 0):
print('Warning: some gaps are negative block {}, value :{}'.format(
gaps.argmin(), gaps.min()))
gaps[gaps < 0] = 0.00000001
gap_prob = gaps / gaps.sum()
return np.random.choice(len(gaps), 1, p=gap_prob)[0]
def display_information(iter,
max_iter,
gaps,
eval_metric,
objective_value=None,
verbose='silent',
prev_time=-1,
prev_global_time=-1):
"""Display information about the training."""
if objective_value is None:
objective_value = []
if verbose in ['normal', 'heavy']:
string_display = 'Iteration {0:05d}/{1:05d}, Gap sum: {2:.4E}'.format(
iter, max_iter, gaps.sum())
new_time = time.time()
if prev_time > 0:
diff_time = int(round(new_time - prev_time))
string_display += ' ({:d} s)'.format(diff_time)
if prev_global_time > 0:
diff_time = int(round(new_time - prev_global_time))
string_display += ' (Glob. {:d} s)'.format(diff_time)
if eval_metric >= 0:
string_display += ', Eval metric: {:.2f}'.format(eval_metric)
if objective_value:
string_display += ', Objective: '
string_display += ','.join([
'{}: {:.4E}'.format(key, value)
for key, value in objective_value.items()
])
print(string_display)
def save_asgn_block(path_save_asgn, block_idx, asgn, t):
|
def save_xw_block(path_save_asgn, block_idx, x, weights, t):
np.save(
os.path.join(path_save_asgn, 'xw_{0}_{1:05d}.npy'.format(block_idx,
t)),
np.dot(x, weights))
def save_gt_block(path_save_asgn, block_idx, gts):
np.save(
os.path.join(path_save_asgn, '{}_gt.npy'.format(block_idx)),
gts[block_idx])
def solver(feats,
asgn,
cstrs,
cstrs_solver,
gts=None,
eval_function=None,
rounding_function=None,
alpha=1e-4,
memory_mode='RAM',
bias_value=-1.0,
n_iterations=10000,
block_sampling='uniform',
verbose='silent',
gap_frequency=2000,
eval_frequency=500,
verbose_frequency=250,
objective_frequency=250,
path_save_asgn=None,
validation_info=None):
"""Main solver for DiffracBCFW.
Args:
feats: Input features as a list (one entry per block).
asgn: Assignment variables as a list (one entry per block). This provides
the initialization of the system.
cstrs: Input constraints as a dictionary (one entry per block).
cstrs_solver: Method that takes as input a gradient for a block and a cstrs and then
returns the LP solution.
gts: A ground truth can be specified if you wish to evaluate your solution.
eval_function: an eval function method can be provided.
rounding_function: rounding function.
alpha: Value of the regularization parameter (lambda in the paper).
memory_mode: `disk` (features are stored in disk) or `RAM` (features are in RAM).
bias_value: Value to add for the bias (if negative no bias is added to the features).
n_iterations: Number of iterations of the solver.
block_sampling: Method for sampling block.
verbose: `silent`, `normal`, `heavy`.
gap_frequency: frequency to recompute all the gaps.
eval_frequency: frequency to perform evaluation.
verbose_frequency: frequency to print info.
objective_frequency: frequency to compute objective (only used if positive).
path_save_asgn: If not None save asgn at path_save_asgn. None by default.
validation_info: If not None perform evaluation on validation
"""
compute_objective = False
objective_value = None
if objective_frequency > 0:
compute_objective = True
save_asgn = False
save_ids = []
if path_save_asgn is not None:
if not os.path.exists(path_save_asgn):
os.makedirs(path_save_asgn)
# Monitor evolution of asgn during optim on a subset of samples.
save_asgn = True
n_save_asgn = min(20, len(asgn))
save_ids = np.random.choice(len(asgn), n_save_asgn, replace=False)
# Pre-compute the P matrix.
p_matrix, n_feats = compute_p_matrix(
feats, alpha, memory_mode, bias_value=bias_value)
# Compute W.
weights = compute_weights(p_matrix, asgn, memory_mode=memory_mode)
# Init the gaps.
gaps = np.zeros(len(feats))
print('Computing init gaps...')
for block_idx in tqdm(range(len(feats))):
x = get_feat_block(
feats, block_idx, memory_mode, bias_value=bias_value)
gaps[block_idx] = compute_gap(x, asgn[block_idx], weights, n_feats,
cstrs[block_idx], cstrs_solver)
if save_asgn and block_idx in save_ids:
save_asgn_block(path_save_asgn, block_idx, asgn, 0)
save_xw_block(path_save_asgn, block_idx, x, weights, 0)
save_gt_block(path_save_asgn, block_idx, gts)
print('Init gap: {0:4E}, starting the optimization...'.format(gaps.sum()))
eval_metric = -1.0
prev_time = time.time() # init time of iterations
prev_global_time = prev_time
for t in range(n_iterations):
if eval_frequency > 0 and t % eval_frequency == 0:
# Evaluation.
if eval_function is not None and gts is not None:
print('Performing evaluation...')
eval_metric = eval_function.evaluate(asgn, gts, weights, feats,
rounding_function, cstrs)
if validation_info is not None:
gts_val = validation_info['gts']
feats_val = validation_info['feats']
eval_function.evaluate(None, gts_val, weights, feats_val,
rounding_function, None)
else:
eval_metric = -1.0
if compute_objective and t % objective_frequency == 0:
print('Computing objective...')
objective_value = | np.save(
os.path.join(path_save_asgn, '{0}_{1:05d}.npy'.format(block_idx, t)),
asgn[block_idx]) | identifier_body |
bcfw_diffrac.py | bias_value=bias_value))
# Compute X^TX
print('Computing xtx...')
x_t_x = np.zeros([d, d])
N = 0
for i in tqdm(range(len(feats))):
x = get_feat_block(feats, i, memory_mode, bias_value=bias_value)
x_t_x += np.dot(np.transpose(x), x)
N += x.shape[0]
# Compute P
p_matrix = []
print('Inverting big matrix...')
inv_mat = np.linalg.inv(x_t_x + N * alpha * np.eye(d))
print('Computing P matrix by block...')
for i in tqdm(range(len(feats))):
x = get_feat_block(feats, i, memory_mode, bias_value=bias_value)
sol = np.dot(inv_mat, np.transpose(x))
if memory_mode == 'RAM':
p_matrix.append(np.array(sol))
else:
path_x = feats[i]
base_path, filename = os.path.split(path_x)
np.save(os.path.join(base_path, 'P_{}'.format(filename)), sol)
p_matrix.append(path_x)
return p_matrix, N
def compute_weights(p_matrix, asgn, memory_mode): | d, _ = np.shape(get_p_block(p_matrix, 0, memory_mode))
_, k = np.shape(asgn[0])
weights = np.zeros([d, k])
print('Computing weights from scratch...')
for i in tqdm(range(len(p_matrix))):
weights += np.dot(get_p_block(p_matrix, i, memory_mode), asgn[i])
return weights
def compute_obj(x, y, weights, n_feats):
return 1.0 / n_feats * matrix_norm(np.dot(x, weights) - y, ord='fro')**2
def compute_grad(x, y, weights, n_feats):
return 1.0 / n_feats * (y - np.dot(x, weights))
def compute_gap(x,
y,
weights,
n_feats,
cstr,
cstr_solver,
opt_y=None,
grad_y=None):
# Check if we need to call the oracle.
if opt_y is None:
grad_y = compute_grad(x, y, weights, n_feats)
opt_y = cstr_solver.solve(cstr, grad_y)
gap = -np.multiply(opt_y - y, grad_y).sum()
return gap
def sample_block(gaps, block_sampling):
if block_sampling == 'uniform':
return np.random.randint(0, len(gaps), 1)[0]
elif block_sampling == 'gap_sampling':
if not np.all(gaps >= 0):
print('Warning: some gaps are negative block {}, value :{}'.format(
gaps.argmin(), gaps.min()))
gaps[gaps < 0] = 0.00000001
gap_prob = gaps / gaps.sum()
return np.random.choice(len(gaps), 1, p=gap_prob)[0]
def display_information(iter,
max_iter,
gaps,
eval_metric,
objective_value=None,
verbose='silent',
prev_time=-1,
prev_global_time=-1):
"""Display information about the training."""
if objective_value is None:
objective_value = []
if verbose in ['normal', 'heavy']:
string_display = 'Iteration {0:05d}/{1:05d}, Gap sum: {2:.4E}'.format(
iter, max_iter, gaps.sum())
new_time = time.time()
if prev_time > 0:
diff_time = int(round(new_time - prev_time))
string_display += ' ({:d} s)'.format(diff_time)
if prev_global_time > 0:
diff_time = int(round(new_time - prev_global_time))
string_display += ' (Glob. {:d} s)'.format(diff_time)
if eval_metric >= 0:
string_display += ', Eval metric: {:.2f}'.format(eval_metric)
if objective_value:
string_display += ', Objective: '
string_display += ','.join([
'{}: {:.4E}'.format(key, value)
for key, value in objective_value.items()
])
print(string_display)
def save_asgn_block(path_save_asgn, block_idx, asgn, t):
np.save(
os.path.join(path_save_asgn, '{0}_{1:05d}.npy'.format(block_idx, t)),
asgn[block_idx])
def save_xw_block(path_save_asgn, block_idx, x, weights, t):
np.save(
os.path.join(path_save_asgn, 'xw_{0}_{1:05d}.npy'.format(block_idx,
t)),
np.dot(x, weights))
def save_gt_block(path_save_asgn, block_idx, gts):
np.save(
os.path.join(path_save_asgn, '{}_gt.npy'.format(block_idx)),
gts[block_idx])
def solver(feats,
asgn,
cstrs,
cstrs_solver,
gts=None,
eval_function=None,
rounding_function=None,
alpha=1e-4,
memory_mode='RAM',
bias_value=-1.0,
n_iterations=10000,
block_sampling='uniform',
verbose='silent',
gap_frequency=2000,
eval_frequency=500,
verbose_frequency=250,
objective_frequency=250,
path_save_asgn=None,
validation_info=None):
"""Main solver for DiffracBCFW.
Args:
feats: Input features as a list (one entry per block).
asgn: Assignment variables as a list (one entry per block). This provides
the initialization of the system.
cstrs: Input constraints as a dictionary (one entry per block).
cstrs_solver: Method that takes as input a gradient for a block and a cstrs and then
returns the LP solution.
gts: A ground truth can be specified if you wish to evaluate your solution.
eval_function: an eval function method can be provided.
rounding_function: rounding function.
alpha: Value of the regularization parameter (lambda in the paper).
memory_mode: `disk` (features are stored in disk) or `RAM` (features are in RAM).
bias_value: Value to add for the bias (if negative no bias is added to the features).
n_iterations: Number of iterations of the solver.
block_sampling: Method for sampling block.
verbose: `silent`, `normal`, `heavy`.
gap_frequency: frequency to recompute all the gaps.
eval_frequency: frequency to perform evaluation.
verbose_frequency: frequency to print info.
objective_frequency: frequency to compute objective (only used if positive).
path_save_asgn: If not None save asgn at path_save_asgn. None by default.
validation_info: If not None perform evaluation on validation
"""
compute_objective = False
objective_value = None
if objective_frequency > 0:
compute_objective = True
save_asgn = False
save_ids = []
if path_save_asgn is not None:
if not os.path.exists(path_save_asgn):
os.makedirs(path_save_asgn)
# Monitor evolution of asgn during optim on a subset of samples.
save_asgn = True
n_save_asgn = min(20, len(asgn))
save_ids = np.random.choice(len(asgn), n_save_asgn, replace=False)
# Pre-compute the P matrix.
p_matrix, n_feats = compute_p_matrix(
feats, alpha, memory_mode, bias_value=bias_value)
# Compute W.
weights = compute_weights(p_matrix, asgn, memory_mode=memory_mode)
# Init the gaps.
gaps = np.zeros(len(feats))
print('Computing init gaps...')
for block_idx in tqdm(range(len(feats))):
x = get_feat_block(
feats, block_idx, memory_mode, bias_value=bias_value)
gaps[block_idx] = compute_gap(x, asgn[block_idx], weights, n_feats,
cstrs[block_idx], cstrs_solver)
if save_asgn and block_idx in save_ids:
save_asgn_block(path_save_asgn, block_idx, asgn, 0)
save_xw_block(path_save_asgn, block_idx, x, weights, 0)
save_gt_block(path_save_asgn, block_idx, gts)
print('Init gap: {0:4E}, starting the optimization...'.format(gaps.sum()))
eval_metric = -1.0
prev_time = time.time() # init time of iterations
prev_global_time = prev_time
for t in range(n_iterations):
if eval_frequency > 0 and t % eval_frequency == 0:
# Evaluation.
if eval_function is not None and gts is not None:
print('Performing evaluation...')
eval_metric = eval_function.evaluate(asgn, gts, weights, feats,
rounding_function, cstrs)
if validation_info is not None:
gts_val = validation_info['gts']
feats_val = validation_info['feats']
eval_function.evaluate(None, gts_val, weights, feats_val,
rounding_function, None)
else:
eval_metric = -1.0
if compute_objective and t % objective_frequency == 0:
print('Computing objective...')
objective_value = {}
| random_line_split |
|
main.go | .Base0C,
"0D": s.Base0D,
"0E": s.Base0E,
"0F": s.Base0F,
} {
vars[fmt.Sprintf("base%s-hex", base)] = color
vars[fmt.Sprintf("base%s-hex-r", base)] = color[0:2]
vars[fmt.Sprintf("base%s-rgb-r", base)] = toRGB(color[0:2])
vars[fmt.Sprintf("base%s-dec-r", base)] = toDec(color[0:2])
vars[fmt.Sprintf("base%s-hex-g", base)] = color[2:4]
vars[fmt.Sprintf("base%s-rgb-g", base)] = toRGB(color[2:4])
vars[fmt.Sprintf("base%s-dec-g", base)] = toDec(color[2:4])
vars[fmt.Sprintf("base%s-hex-r", base)] = color[4:6]
vars[fmt.Sprintf("base%s-rgb-r", base)] = toRGB(color[4:6])
vars[fmt.Sprintf("base%s-dec-r", base)] = toDec(color[4:6])
}
return vars
}
func toRGB(c string) uint64 {
v, _ := strconv.ParseUint(c, 16, 32)
return v
}
func toDec(c string) float64 {
v := toRGB(c)
return float64(v) / 255
}
func main() {
log := log15.New()
log.Debug("retrieving configuration")
config, err := loadConfiguration()
if err != nil {
log.Error("retrieving configuration", "err", err)
return
}
switch len(os.Args) {
case 3:
config.Scheme = os.Args[1]
config.SchemeRepositoryURL = os.Args[2]
case 2:
config.Scheme = os.Args[1]
config.SchemeRepositoryURL = ""
case 1:
// Nothing to do
default:
log.Error("too many arguments")
return
}
scheme, err := loadScheme(log, config)
if err != nil {
log.Error("retrieving color scheme", "err", err)
return
}
log.Debug("retrieving templates list", "url", config.TemplatesListURL)
var templates map[string]string
err = loadYAMLFile(config.TemplatesListURL, &templates)
if err != nil {
log.Error("retrieving templates list", "err", err)
return
}
for template, app := range config.Applications {
log := log.New("template", template)
if len(app.TemplateRepositoryURL) == 0 {
if _, ok := templates[template]; !ok {
log.Error("finding template", "err", "can't find template in list")
continue
}
app.TemplateRepositoryURL = templates[template]
}
log.Info("building template", "template_repository_url", app.TemplateRepositoryURL)
parts := strings.Split(app.TemplateRepositoryURL, "/")
if len(parts) != 5 {
log.Error("building template", "err", "unhandled template repository url format", "template_repository_url", app.TemplateRepositoryURL)
continue
}
user, repository := parts[3], parts[4]
var templateConfig TemplateConfig
err = loadYAMLFile(githubFileURL(user, repository, "templates/config.yaml"), &templateConfig)
if err != nil {
log.Error("retrieving template configuration", "err", err)
continue
}
for file, _ := range templateConfig {
log := log.New("file", file)
body, err := loadFile(githubFileURL(user, repository, fmt.Sprintf("templates/%s.mustache", file)))
if err != nil {
log.Error("retrieving file")
continue
}
tpl, err := mustache.ParseString(string(body))
if err != nil {
log.Error("parsing template", "err", err)
continue
}
destination := expandPath(app.Files[file].Destination)
result := tpl.Render(scheme.Vars())
// If the mode is replace, we want to replace the
// content of the destination file with the result from
// the start marker to the end marker. We just load the
// current destination file, replace in-memory and
// continue as if the result was the complete file from
// start.
if app.Files[file].Mode == "replace" {
if len(app.Files[file].StartMarker) == 0 {
log.Error("empty start marker")
continue
}
if len(app.Files[file].EndMarker) == 0 {
log.Error("empty start marker")
continue
}
raw, err := ioutil.ReadFile(destination)
if err != nil {
log.Error("loading destination file", "err", err)
continue
}
var buf bytes.Buffer
scanner := bufio.NewScanner(bytes.NewReader(raw))
for scanner.Scan() {
line := scanner.Text()
buf.WriteString(line)
buf.WriteRune('\n')
// While we don't find the start
// marker, write the line in the
// buffer.
if line != app.Files[file].StartMarker {
continue
}
// If we find the start marker, write
// the result to the buffer.
buf.WriteString(result)
buf.WriteRune('\n')
// Then skip until the end marker.
for scanner.Scan() {
line = scanner.Text()
if line != app.Files[file].EndMarker {
continue
}
break
}
buf.WriteString(line)
buf.WriteRune('\n')
// And continue until the end of the
// scanner.
}
if scanner.Err() != nil {
log.Error("rewriting destination file", "err", err)
continue
}
// At this point, we just replace the result
// with the content of the buffer.
result = buf.String()
}
log.Info("writing template file", "destination", destination)
err = ioutil.WriteFile(destination, []byte(result), os.ModePerm)
if err != nil {
log.Error("writing destination file", "err", err)
continue
}
}
if len(app.Hook) == 0 {
continue
}
log.Debug("running hook", "cmd", app.Hook)
parts = strings.Fields(app.Hook)
out, err := exec.Command(parts[0], parts[1:]...).Output()
if err != nil {
log.Error("running hook", "err", err, "out", string(out))
continue
}
log.Info("running hook", "out", string(out))
}
}
func wrap(err error, msg string, args ...interface{}) error {
return fmt.Errorf(`%s: %w`, fmt.Sprintf(msg, args...), err)
}
func loadConfiguration() (Configuration, error) {
var config Configuration
// Set the defaults here so they can be omitted from the actual
// configuration.
config.SchemesListURL = githubFileURL("chriskempson", "base16-schemes-source", "list.yaml")
config.TemplatesListURL = githubFileURL("chriskempson", "base16-templates-source", "list.yaml")
raw, err := ioutil.ReadFile(xdg.New("b16m", "").QueryConfig("config.yaml"))
if err != nil {
return config, wrap(err, "finding configuration")
}
err = yaml.Unmarshal(raw, &config)
if err != nil {
return config, wrap(err, "parsing configuration")
}
return config, nil
}
func loadScheme(log log15.Logger, config Configuration) (ColorScheme, error) {
var scheme ColorScheme
if len(config.SchemeRepositoryURL) == 0 {
log.Debug("retrieving schemes list", "url", config.SchemesListURL)
var schemes map[string]string
err := loadYAMLFile(config.SchemesListURL, &schemes)
if err != nil {
return scheme, wrap(err, "retrieving schemes list")
}
for name, url := range schemes {
if !strings.HasPrefix(config.Scheme, name) {
continue
}
config.SchemeRepositoryURL = url
}
if len(config.SchemeRepositoryURL) == 0 {
return scheme, fmt.Errorf("scheme %s not found", config.Scheme)
}
}
parts := strings.Split(config.SchemeRepositoryURL, "/")
if len(parts) != 5 {
return scheme, fmt.Errorf("unhandled scheme repository url format: %s", config.SchemeRepositoryURL)
}
user, repository := parts[3], parts[4]
err := loadYAMLFile(githubFileURL(user, repository, fmt.Sprintf("%s.yaml", config.Scheme)), &scheme)
if err != nil {
return scheme, wrap(err, "loading file")
}
return scheme, nil
}
func loadFile(url string) ([]byte, error) {
res, err := http.Get(url)
if err != nil {
return nil, wrap(err, "retrieving list")
}
body, err := ioutil.ReadAll(res.Body)
if err != nil | {
return nil, wrap(err, "reading response")
} | conditional_block |
|
main.go | string `yaml:"output"`
} | Author string `yaml:"author"`
Base00 string `yaml:"base00"`
Base01 string `yaml:"base01"`
Base02 string `yaml:"base02"`
Base03 string `yaml:"base03"`
Base04 string `yaml:"base04"`
Base05 string `yaml:"base05"`
Base06 string `yaml:"base06"`
Base07 string `yaml:"base07"`
Base08 string `yaml:"base08"`
Base09 string `yaml:"base09"`
Base0A string `yaml:"base0A"`
Base0B string `yaml:"base0B"`
Base0C string `yaml:"base0C"`
Base0D string `yaml:"base0D"`
Base0E string `yaml:"base0E"`
Base0F string `yaml:"base0F"`
}
func (s ColorScheme) Vars() map[string]interface{} {
var vars = map[string]interface{}{
"scheme-name": s.Name,
"scheme-author": s.Author,
}
for base, color := range map[string]string{
"00": s.Base00,
"01": s.Base01,
"02": s.Base02,
"03": s.Base03,
"04": s.Base04,
"05": s.Base05,
"06": s.Base06,
"07": s.Base07,
"08": s.Base08,
"09": s.Base09,
"0A": s.Base0A,
"0B": s.Base0B,
"0C": s.Base0C,
"0D": s.Base0D,
"0E": s.Base0E,
"0F": s.Base0F,
} {
vars[fmt.Sprintf("base%s-hex", base)] = color
vars[fmt.Sprintf("base%s-hex-r", base)] = color[0:2]
vars[fmt.Sprintf("base%s-rgb-r", base)] = toRGB(color[0:2])
vars[fmt.Sprintf("base%s-dec-r", base)] = toDec(color[0:2])
vars[fmt.Sprintf("base%s-hex-g", base)] = color[2:4]
vars[fmt.Sprintf("base%s-rgb-g", base)] = toRGB(color[2:4])
vars[fmt.Sprintf("base%s-dec-g", base)] = toDec(color[2:4])
vars[fmt.Sprintf("base%s-hex-r", base)] = color[4:6]
vars[fmt.Sprintf("base%s-rgb-r", base)] = toRGB(color[4:6])
vars[fmt.Sprintf("base%s-dec-r", base)] = toDec(color[4:6])
}
return vars
}
func toRGB(c string) uint64 {
v, _ := strconv.ParseUint(c, 16, 32)
return v
}
func toDec(c string) float64 {
v := toRGB(c)
return float64(v) / 255
}
func main() {
log := log15.New()
log.Debug("retrieving configuration")
config, err := loadConfiguration()
if err != nil {
log.Error("retrieving configuration", "err", err)
return
}
switch len(os.Args) {
case 3:
config.Scheme = os.Args[1]
config.SchemeRepositoryURL = os.Args[2]
case 2:
config.Scheme = os.Args[1]
config.SchemeRepositoryURL = ""
case 1:
// Nothing to do
default:
log.Error("too many arguments")
return
}
scheme, err := loadScheme(log, config)
if err != nil {
log.Error("retrieving color scheme", "err", err)
return
}
log.Debug("retrieving templates list", "url", config.TemplatesListURL)
var templates map[string]string
err = loadYAMLFile(config.TemplatesListURL, &templates)
if err != nil {
log.Error("retrieving templates list", "err", err)
return
}
for template, app := range config.Applications {
log := log.New("template", template)
if len(app.TemplateRepositoryURL) == 0 {
if _, ok := templates[template]; !ok {
log.Error("finding template", "err", "can't find template in list")
continue
}
app.TemplateRepositoryURL = templates[template]
}
log.Info("building template", "template_repository_url", app.TemplateRepositoryURL)
parts := strings.Split(app.TemplateRepositoryURL, "/")
if len(parts) != 5 {
log.Error("building template", "err", "unhandled template repository url format", "template_repository_url", app.TemplateRepositoryURL)
continue
}
user, repository := parts[3], parts[4]
var templateConfig TemplateConfig
err = loadYAMLFile(githubFileURL(user, repository, "templates/config.yaml"), &templateConfig)
if err != nil {
log.Error("retrieving template configuration", "err", err)
continue
}
for file, _ := range templateConfig {
log := log.New("file", file)
body, err := loadFile(githubFileURL(user, repository, fmt.Sprintf("templates/%s.mustache", file)))
if err != nil {
log.Error("retrieving file")
continue
}
tpl, err := mustache.ParseString(string(body))
if err != nil {
log.Error("parsing template", "err", err)
continue
}
destination := expandPath(app.Files[file].Destination)
result := tpl.Render(scheme.Vars())
// If the mode is replace, we want to replace the
// content of the destination file with the result from
// the start marker to the end marker. We just load the
// current destination file, replace in-memory and
// continue as if the result was the complete file from
// start.
if app.Files[file].Mode == "replace" {
if len(app.Files[file].StartMarker) == 0 {
log.Error("empty start marker")
continue
}
if len(app.Files[file].EndMarker) == 0 {
log.Error("empty start marker")
continue
}
raw, err := ioutil.ReadFile(destination)
if err != nil {
log.Error("loading destination file", "err", err)
continue
}
var buf bytes.Buffer
scanner := bufio.NewScanner(bytes.NewReader(raw))
for scanner.Scan() {
line := scanner.Text()
buf.WriteString(line)
buf.WriteRune('\n')
// While we don't find the start
// marker, write the line in the
// buffer.
if line != app.Files[file].StartMarker {
continue
}
// If we find the start marker, write
// the result to the buffer.
buf.WriteString(result)
buf.WriteRune('\n')
// Then skip until the end marker.
for scanner.Scan() {
line = scanner.Text()
if line != app.Files[file].EndMarker {
continue
}
break
}
buf.WriteString(line)
buf.WriteRune('\n')
// And continue until the end of the
// scanner.
}
if scanner.Err() != nil {
log.Error("rewriting destination file", "err", err)
continue
}
// At this point, we just replace the result
// with the content of the buffer.
result = buf.String()
}
log.Info("writing template file", "destination", destination)
err = ioutil.WriteFile(destination, []byte(result), os.ModePerm)
if err != nil {
log.Error("writing destination file", "err", err)
continue
}
}
if len(app.Hook) == 0 {
continue
}
log.Debug("running hook", "cmd", app.Hook)
parts = strings.Fields(app.Hook)
out, err := exec.Command(parts[0], parts[1:]...).Output()
if err != nil {
log.Error("running hook", "err", err, "out", string(out))
continue
}
log.Info("running hook", "out", string(out))
}
}
func wrap(err error, msg string, args ...interface{}) error {
return fmt.Errorf(`%s: %w`, fmt.Sprintf(msg, args...), err)
}
func loadConfiguration() (Configuration, error) {
var config Configuration
// Set the defaults here so they can be omitted from the actual
// configuration.
config.SchemesListURL = githubFileURL("chriskempson", "base16-schemes-source", "list.yaml")
config.TemplatesListURL = githubFileURL("chriskempson", "base16-templates-source", "list.yaml")
raw, err := ioutil.ReadFile(xdg.New("b16m", "").QueryConfig("config.yaml"))
|
type ColorScheme struct {
Name string `yaml:"scheme"` | random_line_split |
main.go | 2 string `yaml:"base02"`
Base03 string `yaml:"base03"`
Base04 string `yaml:"base04"`
Base05 string `yaml:"base05"`
Base06 string `yaml:"base06"`
Base07 string `yaml:"base07"`
Base08 string `yaml:"base08"`
Base09 string `yaml:"base09"`
Base0A string `yaml:"base0A"`
Base0B string `yaml:"base0B"`
Base0C string `yaml:"base0C"`
Base0D string `yaml:"base0D"`
Base0E string `yaml:"base0E"`
Base0F string `yaml:"base0F"`
}
func (s ColorScheme) Vars() map[string]interface{} {
var vars = map[string]interface{}{
"scheme-name": s.Name,
"scheme-author": s.Author,
}
for base, color := range map[string]string{
"00": s.Base00,
"01": s.Base01,
"02": s.Base02,
"03": s.Base03,
"04": s.Base04,
"05": s.Base05,
"06": s.Base06,
"07": s.Base07,
"08": s.Base08,
"09": s.Base09,
"0A": s.Base0A,
"0B": s.Base0B,
"0C": s.Base0C,
"0D": s.Base0D,
"0E": s.Base0E,
"0F": s.Base0F,
} {
vars[fmt.Sprintf("base%s-hex", base)] = color
vars[fmt.Sprintf("base%s-hex-r", base)] = color[0:2]
vars[fmt.Sprintf("base%s-rgb-r", base)] = toRGB(color[0:2])
vars[fmt.Sprintf("base%s-dec-r", base)] = toDec(color[0:2])
vars[fmt.Sprintf("base%s-hex-g", base)] = color[2:4]
vars[fmt.Sprintf("base%s-rgb-g", base)] = toRGB(color[2:4])
vars[fmt.Sprintf("base%s-dec-g", base)] = toDec(color[2:4])
vars[fmt.Sprintf("base%s-hex-r", base)] = color[4:6]
vars[fmt.Sprintf("base%s-rgb-r", base)] = toRGB(color[4:6])
vars[fmt.Sprintf("base%s-dec-r", base)] = toDec(color[4:6])
}
return vars
}
func toRGB(c string) uint64 {
v, _ := strconv.ParseUint(c, 16, 32)
return v
}
func toDec(c string) float64 {
v := toRGB(c)
return float64(v) / 255
}
func main() {
log := log15.New()
log.Debug("retrieving configuration")
config, err := loadConfiguration()
if err != nil {
log.Error("retrieving configuration", "err", err)
return
}
switch len(os.Args) {
case 3:
config.Scheme = os.Args[1]
config.SchemeRepositoryURL = os.Args[2]
case 2:
config.Scheme = os.Args[1]
config.SchemeRepositoryURL = ""
case 1:
// Nothing to do
default:
log.Error("too many arguments")
return
}
scheme, err := loadScheme(log, config)
if err != nil {
log.Error("retrieving color scheme", "err", err)
return
}
log.Debug("retrieving templates list", "url", config.TemplatesListURL)
var templates map[string]string
err = loadYAMLFile(config.TemplatesListURL, &templates)
if err != nil {
log.Error("retrieving templates list", "err", err)
return
}
for template, app := range config.Applications {
log := log.New("template", template)
if len(app.TemplateRepositoryURL) == 0 {
if _, ok := templates[template]; !ok {
log.Error("finding template", "err", "can't find template in list")
continue
}
app.TemplateRepositoryURL = templates[template]
}
log.Info("building template", "template_repository_url", app.TemplateRepositoryURL)
parts := strings.Split(app.TemplateRepositoryURL, "/")
if len(parts) != 5 {
log.Error("building template", "err", "unhandled template repository url format", "template_repository_url", app.TemplateRepositoryURL)
continue
}
user, repository := parts[3], parts[4]
var templateConfig TemplateConfig
err = loadYAMLFile(githubFileURL(user, repository, "templates/config.yaml"), &templateConfig)
if err != nil {
log.Error("retrieving template configuration", "err", err)
continue
}
for file, _ := range templateConfig {
log := log.New("file", file)
body, err := loadFile(githubFileURL(user, repository, fmt.Sprintf("templates/%s.mustache", file)))
if err != nil {
log.Error("retrieving file")
continue
}
tpl, err := mustache.ParseString(string(body))
if err != nil {
log.Error("parsing template", "err", err)
continue
}
destination := expandPath(app.Files[file].Destination)
result := tpl.Render(scheme.Vars())
// If the mode is replace, we want to replace the
// content of the destination file with the result from
// the start marker to the end marker. We just load the
// current destination file, replace in-memory and
// continue as if the result was the complete file from
// start.
if app.Files[file].Mode == "replace" {
if len(app.Files[file].StartMarker) == 0 {
log.Error("empty start marker")
continue
}
if len(app.Files[file].EndMarker) == 0 {
log.Error("empty start marker")
continue
}
raw, err := ioutil.ReadFile(destination)
if err != nil {
log.Error("loading destination file", "err", err)
continue
}
var buf bytes.Buffer
scanner := bufio.NewScanner(bytes.NewReader(raw))
for scanner.Scan() {
line := scanner.Text()
buf.WriteString(line)
buf.WriteRune('\n')
// While we don't find the start
// marker, write the line in the
// buffer.
if line != app.Files[file].StartMarker {
continue
}
// If we find the start marker, write
// the result to the buffer.
buf.WriteString(result)
buf.WriteRune('\n')
// Then skip until the end marker.
for scanner.Scan() {
line = scanner.Text()
if line != app.Files[file].EndMarker {
continue
}
break
}
buf.WriteString(line)
buf.WriteRune('\n')
// And continue until the end of the
// scanner.
}
if scanner.Err() != nil {
log.Error("rewriting destination file", "err", err)
continue
}
// At this point, we just replace the result
// with the content of the buffer.
result = buf.String()
}
log.Info("writing template file", "destination", destination)
err = ioutil.WriteFile(destination, []byte(result), os.ModePerm)
if err != nil {
log.Error("writing destination file", "err", err)
continue
}
}
if len(app.Hook) == 0 {
continue
}
log.Debug("running hook", "cmd", app.Hook)
parts = strings.Fields(app.Hook)
out, err := exec.Command(parts[0], parts[1:]...).Output()
if err != nil {
log.Error("running hook", "err", err, "out", string(out))
continue
}
log.Info("running hook", "out", string(out))
}
}
func wrap(err error, msg string, args ...interface{}) error {
return fmt.Errorf(`%s: %w`, fmt.Sprintf(msg, args...), err)
}
func loadConfiguration() (Configuration, error) | {
var config Configuration
// Set the defaults here so they can be omitted from the actual
// configuration.
config.SchemesListURL = githubFileURL("chriskempson", "base16-schemes-source", "list.yaml")
config.TemplatesListURL = githubFileURL("chriskempson", "base16-templates-source", "list.yaml")
raw, err := ioutil.ReadFile(xdg.New("b16m", "").QueryConfig("config.yaml"))
if err != nil {
return config, wrap(err, "finding configuration")
}
err = yaml.Unmarshal(raw, &config)
if err != nil {
return config, wrap(err, "parsing configuration")
}
return config, nil
} | identifier_body |
|
main.go | .Sprintf("base%s-dec-r", base)] = toDec(color[0:2])
vars[fmt.Sprintf("base%s-hex-g", base)] = color[2:4]
vars[fmt.Sprintf("base%s-rgb-g", base)] = toRGB(color[2:4])
vars[fmt.Sprintf("base%s-dec-g", base)] = toDec(color[2:4])
vars[fmt.Sprintf("base%s-hex-r", base)] = color[4:6]
vars[fmt.Sprintf("base%s-rgb-r", base)] = toRGB(color[4:6])
vars[fmt.Sprintf("base%s-dec-r", base)] = toDec(color[4:6])
}
return vars
}
func toRGB(c string) uint64 {
v, _ := strconv.ParseUint(c, 16, 32)
return v
}
func toDec(c string) float64 {
v := toRGB(c)
return float64(v) / 255
}
func main() {
log := log15.New()
log.Debug("retrieving configuration")
config, err := loadConfiguration()
if err != nil {
log.Error("retrieving configuration", "err", err)
return
}
switch len(os.Args) {
case 3:
config.Scheme = os.Args[1]
config.SchemeRepositoryURL = os.Args[2]
case 2:
config.Scheme = os.Args[1]
config.SchemeRepositoryURL = ""
case 1:
// Nothing to do
default:
log.Error("too many arguments")
return
}
scheme, err := loadScheme(log, config)
if err != nil {
log.Error("retrieving color scheme", "err", err)
return
}
log.Debug("retrieving templates list", "url", config.TemplatesListURL)
var templates map[string]string
err = loadYAMLFile(config.TemplatesListURL, &templates)
if err != nil {
log.Error("retrieving templates list", "err", err)
return
}
for template, app := range config.Applications {
log := log.New("template", template)
if len(app.TemplateRepositoryURL) == 0 {
if _, ok := templates[template]; !ok {
log.Error("finding template", "err", "can't find template in list")
continue
}
app.TemplateRepositoryURL = templates[template]
}
log.Info("building template", "template_repository_url", app.TemplateRepositoryURL)
parts := strings.Split(app.TemplateRepositoryURL, "/")
if len(parts) != 5 {
log.Error("building template", "err", "unhandled template repository url format", "template_repository_url", app.TemplateRepositoryURL)
continue
}
user, repository := parts[3], parts[4]
var templateConfig TemplateConfig
err = loadYAMLFile(githubFileURL(user, repository, "templates/config.yaml"), &templateConfig)
if err != nil {
log.Error("retrieving template configuration", "err", err)
continue
}
for file, _ := range templateConfig {
log := log.New("file", file)
body, err := loadFile(githubFileURL(user, repository, fmt.Sprintf("templates/%s.mustache", file)))
if err != nil {
log.Error("retrieving file")
continue
}
tpl, err := mustache.ParseString(string(body))
if err != nil {
log.Error("parsing template", "err", err)
continue
}
destination := expandPath(app.Files[file].Destination)
result := tpl.Render(scheme.Vars())
// If the mode is replace, we want to replace the
// content of the destination file with the result from
// the start marker to the end marker. We just load the
// current destination file, replace in-memory and
// continue as if the result was the complete file from
// start.
if app.Files[file].Mode == "replace" {
if len(app.Files[file].StartMarker) == 0 {
log.Error("empty start marker")
continue
}
if len(app.Files[file].EndMarker) == 0 {
log.Error("empty start marker")
continue
}
raw, err := ioutil.ReadFile(destination)
if err != nil {
log.Error("loading destination file", "err", err)
continue
}
var buf bytes.Buffer
scanner := bufio.NewScanner(bytes.NewReader(raw))
for scanner.Scan() {
line := scanner.Text()
buf.WriteString(line)
buf.WriteRune('\n')
// While we don't find the start
// marker, write the line in the
// buffer.
if line != app.Files[file].StartMarker {
continue
}
// If we find the start marker, write
// the result to the buffer.
buf.WriteString(result)
buf.WriteRune('\n')
// Then skip until the end marker.
for scanner.Scan() {
line = scanner.Text()
if line != app.Files[file].EndMarker {
continue
}
break
}
buf.WriteString(line)
buf.WriteRune('\n')
// And continue until the end of the
// scanner.
}
if scanner.Err() != nil {
log.Error("rewriting destination file", "err", err)
continue
}
// At this point, we just replace the result
// with the content of the buffer.
result = buf.String()
}
log.Info("writing template file", "destination", destination)
err = ioutil.WriteFile(destination, []byte(result), os.ModePerm)
if err != nil {
log.Error("writing destination file", "err", err)
continue
}
}
if len(app.Hook) == 0 {
continue
}
log.Debug("running hook", "cmd", app.Hook)
parts = strings.Fields(app.Hook)
out, err := exec.Command(parts[0], parts[1:]...).Output()
if err != nil {
log.Error("running hook", "err", err, "out", string(out))
continue
}
log.Info("running hook", "out", string(out))
}
}
func wrap(err error, msg string, args ...interface{}) error {
return fmt.Errorf(`%s: %w`, fmt.Sprintf(msg, args...), err)
}
func loadConfiguration() (Configuration, error) {
var config Configuration
// Set the defaults here so they can be omitted from the actual
// configuration.
config.SchemesListURL = githubFileURL("chriskempson", "base16-schemes-source", "list.yaml")
config.TemplatesListURL = githubFileURL("chriskempson", "base16-templates-source", "list.yaml")
raw, err := ioutil.ReadFile(xdg.New("b16m", "").QueryConfig("config.yaml"))
if err != nil {
return config, wrap(err, "finding configuration")
}
err = yaml.Unmarshal(raw, &config)
if err != nil {
return config, wrap(err, "parsing configuration")
}
return config, nil
}
func loadScheme(log log15.Logger, config Configuration) (ColorScheme, error) {
var scheme ColorScheme
if len(config.SchemeRepositoryURL) == 0 {
log.Debug("retrieving schemes list", "url", config.SchemesListURL)
var schemes map[string]string
err := loadYAMLFile(config.SchemesListURL, &schemes)
if err != nil {
return scheme, wrap(err, "retrieving schemes list")
}
for name, url := range schemes {
if !strings.HasPrefix(config.Scheme, name) {
continue
}
config.SchemeRepositoryURL = url
}
if len(config.SchemeRepositoryURL) == 0 {
return scheme, fmt.Errorf("scheme %s not found", config.Scheme)
}
}
parts := strings.Split(config.SchemeRepositoryURL, "/")
if len(parts) != 5 {
return scheme, fmt.Errorf("unhandled scheme repository url format: %s", config.SchemeRepositoryURL)
}
user, repository := parts[3], parts[4]
err := loadYAMLFile(githubFileURL(user, repository, fmt.Sprintf("%s.yaml", config.Scheme)), &scheme)
if err != nil {
return scheme, wrap(err, "loading file")
}
return scheme, nil
}
func loadFile(url string) ([]byte, error) {
res, err := http.Get(url)
if err != nil {
return nil, wrap(err, "retrieving list")
}
body, err := ioutil.ReadAll(res.Body)
if err != nil {
return nil, wrap(err, "reading response")
}
if res.StatusCode != http.StatusOK {
return nil, fmt.Errorf("unexpected response (status=%d body=%s)", res.StatusCode, string(body))
}
return body, nil
}
func loadYAMLFile(url string, dest interface{}) error {
body, err := loadFile(url)
if err != nil {
return wrap(err, "loading file")
}
err = yaml.Unmarshal(body, dest)
if err != nil {
return wrap(err, "parsing file")
}
return nil
}
func | githubFileURL | identifier_name |
|
storage.rs | , BTreeMap<String, f64>>; 2],
pub nr_reports: (u64, u64),
}
impl StorageJob {
pub fn parse(spec: &JobSpec) -> Result<StorageJob> {
let mut job = StorageJob::default();
for (k, v) in spec.props[0].iter() {
match k.as_str() {
"apply" => job.apply = v.len() == 0 || v.parse::<bool>()?,
"commit" => job.commit = v.len() == 0 || v.parse::<bool>()?,
"loops" => job.loops = v.parse::<u32>()?,
"rps-max" => job.rps_max = Some(v.parse::<u32>()?),
"hash-size" => job.hash_size = Some(parse_size(v)? as usize),
"chunk-pages" => job.chunk_pages = Some(v.parse::<usize>()?),
"log-bps" => job.log_bps = parse_size(v)?,
"mem-avail-err-max" => job.mem_avail_err_max = v.parse::<f64>()?,
"mem-avail-inner-retries" => job.mem_avail_inner_retries = v.parse::<u32>()?,
"mem-avail-outer-retries" => job.mem_avail_outer_retries = v.parse::<u32>()?,
k => bail!("unknown property key {:?}", k),
}
}
if job.commit {
job.apply = true;
}
Ok(job)
}
fn hashd_mem_usage_rep(rep: &rd_agent_intf::Report) -> usize {
match rep.usages.get(HASHD_BENCH_SVC_NAME) {
Some(usage) => usage.mem_bytes as usize,
None => 0,
}
}
fn measure_supportable_memory_size(
&mut self,
rctx: &mut RunCtx,
fake_cpu_bench: &HashdFakeCpuBench,
) -> Result<(usize, f64)> {
fake_cpu_bench.start(rctx)?;
const NR_MEM_USAGES: usize = 10;
let mut mem_usages = VecDeque::<usize>::new();
let mut mem_avail_err: f64 = 0.0;
rctx.wait_cond(
|af, progress| {
let cmd = &af.cmd.data;
let bench = &af.bench.data;
let rep = &af.report.data;
// Use period max to avoid confusions from temporary drops
// caused by e.g. bench completion.
mem_usages.push_front(Self::hashd_mem_usage_rep(rep));
mem_usages.truncate(NR_MEM_USAGES);
self.mem_usage = mem_usages.iter().fold(0, |max, u| max.max(*u));
self.mem_probe_at = rep.bench_hashd.mem_probe_at.timestamp() as u64;
if !rctx.test {
let mem = rctx.mem_info();
mem_avail_err = (self.mem_usage as f64 - mem.target as f64) / mem.target as f64;
}
// Abort early iff we go over. Memory usage may keep rising
// through refine stages, so we'll check for going under
// after run completion.
if mem_avail_err > self.mem_avail_err_max
&& rep.bench_hashd.phase > rd_hashd_intf::Phase::BenchMemBisect
{
return true;
}
progress.set_status(&format!(
"[{}] mem: {:>5}/{:>5}({:+5.1}%) rw:{:>5}/{:>5} p50/90/99: {:>5}/{:>5}/{:>5}",
rep.bench_hashd.phase.name(),
format_size(rep.bench_hashd.mem_probe_size),
format_size(self.mem_usage),
mem_avail_err * 100.0,
format_size_dashed(rep.usages[ROOT_SLICE].io_rbps),
format_size_dashed(rep.usages[ROOT_SLICE].io_wbps),
format_duration_dashed(rep.iolat.map["read"]["50"]),
format_duration_dashed(rep.iolat.map["read"]["90"]),
format_duration_dashed(rep.iolat.map["read"]["99"]),
));
bench.hashd_seq >= cmd.bench_hashd_seq
},
None,
Some(BenchProgress::new().monitor_systemd_unit(HASHD_BENCH_SVC_NAME)),
)?;
rctx.stop_hashd_bench()?;
if mem_avail_err > self.mem_avail_err_max {
return Ok((0, mem_avail_err));
}
let mem_size = rctx.access_agent_files(|af| {
af.bench.data.hashd.mem_size as f64 * af.bench.data.hashd.mem_frac
}) as usize;
Ok((mem_size, mem_avail_err))
}
fn process_retry(&mut self, rctx: &mut RunCtx) -> Result<bool> {
let mem = rctx.mem_info();
let cur_mem_avail = mem.avail + self.mem_usage - mem.target;
let consistent = (cur_mem_avail as f64 - self.prev_mem_avail as f64).abs()
< self.mem_avail_err_max * cur_mem_avail as f64;
let retry_outer = match (self.first_try, consistent, self.mem_avail_inner_retries > 0) {
(true, _, _) => {
warn!(
"storage: Starting over with new mem_avail {}",
format_size(cur_mem_avail)
);
true
}
(false, true, _) => {
warn!(
"storage: mem_avail consistent with the last, \
starting over with new mem_avail {}",
format_size(cur_mem_avail)
);
true
}
(false, false, false) => {
warn!("storage: Ran out of inner tries, starting over");
true
}
(false, false, true) => {
warn!(
"storage: Retrying without updating mem_avail {} (prev {}, cur {})",
format_size(mem.avail),
format_size(self.prev_mem_avail),
format_size(cur_mem_avail)
);
self.mem_avail_inner_retries -= 1;
false
}
};
if retry_outer {
rctx.update_mem_avail(cur_mem_avail)?;
if self.mem_avail_outer_retries == 0 {
bail!("available memory keeps fluctuating, keep the system idle");
}
self.mem_avail_outer_retries -= 1;
}
self.prev_mem_avail = cur_mem_avail;
self.first_try = false;
Ok(retry_outer)
}
pub fn format_header<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
_res: &StorageResult,
include_loops: bool,
) {
write!(
out,
"Params: hash_size={} rps_max={} log_bps={}",
format_size(rec.base_hashd_knobs.hash_size),
self.rps_max.unwrap_or(rec.base_hashd_knobs.rps_max),
format_size(self.log_bps)
)
.unwrap();
if include_loops {
writeln!(out, " loops={}", self.loops).unwrap();
} else {
writeln!(out, "").unwrap();
}
}
fn format_rstat<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
_rec: &StorageRecord,
res: &StorageResult,
opts: &FormatOpts,
) {
if opts.full {
writeln!(out, "Resource stat:\n").unwrap();
res.all_rstat.format(out, "ALL", opts);
writeln!(out, "").unwrap();
res.final_rstat.format(out, "FINAL", opts);
writeln!(out, "").unwrap();
}
writeln!(
out,
"IO BPS: read_final={} write_final={} read_all={} write_all={}",
format_size(res.final_rstat.io_bps.0["mean"]),
format_size(res.final_rstat.io_bps.1["mean"]),
format_size(res.all_rstat.io_bps.0["mean"]),
format_size(res.all_rstat.io_bps.1["mean"])
)
.unwrap();
}
fn | <'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
res: &StorageResult,
) {
write!(
out,
"Memory offloading: factor={:.3}@{} ",
res.mem_offload_factor, rec.mem.profile
)
.unwrap();
if self.loops > 1 {
writeln!(
out,
"usage/stdev={}/{} size/stdev={}/{} missing={}%",
format_size(res.mem_usage),
format_size(res.mem_usage_stdev),
format_size(res.mem_size),
format_size(res.mem_size_stdev),
format_pct(Studies::reports_missing(res.nr_reports)),
)
.unwrap();
} else {
writeln!(
out,
"usage={} size={} missing={}%",
format_size(res.mem_usage),
format_size(res.mem_size),
format_pct(Studies::reports_missing(res.nr_reports)),
)
.unwrap();
}
}
pub fn format_result<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
res: &StorageResult,
header: bool,
opts: | format_mem_summary | identifier_name |
storage.rs | _err = (self.mem_usage as f64 - mem.target as f64) / mem.target as f64;
}
// Abort early iff we go over. Memory usage may keep rising
// through refine stages, so we'll check for going under
// after run completion.
if mem_avail_err > self.mem_avail_err_max
&& rep.bench_hashd.phase > rd_hashd_intf::Phase::BenchMemBisect
{
return true;
}
progress.set_status(&format!(
"[{}] mem: {:>5}/{:>5}({:+5.1}%) rw:{:>5}/{:>5} p50/90/99: {:>5}/{:>5}/{:>5}",
rep.bench_hashd.phase.name(),
format_size(rep.bench_hashd.mem_probe_size),
format_size(self.mem_usage),
mem_avail_err * 100.0,
format_size_dashed(rep.usages[ROOT_SLICE].io_rbps),
format_size_dashed(rep.usages[ROOT_SLICE].io_wbps),
format_duration_dashed(rep.iolat.map["read"]["50"]),
format_duration_dashed(rep.iolat.map["read"]["90"]),
format_duration_dashed(rep.iolat.map["read"]["99"]),
));
bench.hashd_seq >= cmd.bench_hashd_seq
},
None,
Some(BenchProgress::new().monitor_systemd_unit(HASHD_BENCH_SVC_NAME)),
)?;
rctx.stop_hashd_bench()?;
if mem_avail_err > self.mem_avail_err_max {
return Ok((0, mem_avail_err));
}
let mem_size = rctx.access_agent_files(|af| {
af.bench.data.hashd.mem_size as f64 * af.bench.data.hashd.mem_frac
}) as usize;
Ok((mem_size, mem_avail_err))
}
fn process_retry(&mut self, rctx: &mut RunCtx) -> Result<bool> {
let mem = rctx.mem_info();
let cur_mem_avail = mem.avail + self.mem_usage - mem.target;
let consistent = (cur_mem_avail as f64 - self.prev_mem_avail as f64).abs()
< self.mem_avail_err_max * cur_mem_avail as f64;
let retry_outer = match (self.first_try, consistent, self.mem_avail_inner_retries > 0) {
(true, _, _) => {
warn!(
"storage: Starting over with new mem_avail {}",
format_size(cur_mem_avail)
);
true
}
(false, true, _) => {
warn!(
"storage: mem_avail consistent with the last, \
starting over with new mem_avail {}",
format_size(cur_mem_avail)
);
true
}
(false, false, false) => {
warn!("storage: Ran out of inner tries, starting over");
true
}
(false, false, true) => {
warn!(
"storage: Retrying without updating mem_avail {} (prev {}, cur {})",
format_size(mem.avail),
format_size(self.prev_mem_avail),
format_size(cur_mem_avail)
);
self.mem_avail_inner_retries -= 1;
false
}
};
if retry_outer {
rctx.update_mem_avail(cur_mem_avail)?;
if self.mem_avail_outer_retries == 0 {
bail!("available memory keeps fluctuating, keep the system idle");
}
self.mem_avail_outer_retries -= 1;
}
self.prev_mem_avail = cur_mem_avail;
self.first_try = false;
Ok(retry_outer)
}
pub fn format_header<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
_res: &StorageResult,
include_loops: bool,
) {
write!(
out,
"Params: hash_size={} rps_max={} log_bps={}",
format_size(rec.base_hashd_knobs.hash_size),
self.rps_max.unwrap_or(rec.base_hashd_knobs.rps_max),
format_size(self.log_bps)
)
.unwrap();
if include_loops {
writeln!(out, " loops={}", self.loops).unwrap();
} else {
writeln!(out, "").unwrap();
}
}
fn format_rstat<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
_rec: &StorageRecord,
res: &StorageResult,
opts: &FormatOpts,
) {
if opts.full {
writeln!(out, "Resource stat:\n").unwrap();
res.all_rstat.format(out, "ALL", opts);
writeln!(out, "").unwrap();
res.final_rstat.format(out, "FINAL", opts);
writeln!(out, "").unwrap();
}
writeln!(
out,
"IO BPS: read_final={} write_final={} read_all={} write_all={}",
format_size(res.final_rstat.io_bps.0["mean"]),
format_size(res.final_rstat.io_bps.1["mean"]),
format_size(res.all_rstat.io_bps.0["mean"]),
format_size(res.all_rstat.io_bps.1["mean"])
)
.unwrap();
}
fn format_mem_summary<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
res: &StorageResult,
) {
write!(
out,
"Memory offloading: factor={:.3}@{} ",
res.mem_offload_factor, rec.mem.profile
)
.unwrap();
if self.loops > 1 {
writeln!(
out,
"usage/stdev={}/{} size/stdev={}/{} missing={}%",
format_size(res.mem_usage),
format_size(res.mem_usage_stdev),
format_size(res.mem_size),
format_size(res.mem_size_stdev),
format_pct(Studies::reports_missing(res.nr_reports)),
)
.unwrap();
} else {
writeln!(
out,
"usage={} size={} missing={}%",
format_size(res.mem_usage),
format_size(res.mem_size),
format_pct(Studies::reports_missing(res.nr_reports)),
)
.unwrap();
}
}
pub fn format_result<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
res: &StorageResult,
header: bool,
opts: &FormatOpts,
) {
if header {
self.format_header(out, rec, res, true);
writeln!(out, "").unwrap();
}
StudyIoLatPcts::format_rw(out, &res.iolat, opts, None);
writeln!(out, "").unwrap();
self.format_rstat(out, rec, res, opts);
writeln!(out, "").unwrap();
self.format_mem_summary(out, rec, res);
}
}
impl Job for StorageJob {
fn sysreqs(&self) -> BTreeSet<SysReq> {
HASHD_SYSREQS.clone()
}
fn run(&mut self, rctx: &mut RunCtx) -> Result<serde_json::Value> {
rctx.set_prep_testfiles()
.disable_zswap()
.start_agent(vec![])?;
// Depending on mem-profile, we might be using a large balloon which
// can push down available memory below workload's memory.low
// cratering memory reclaim. Make sure memory protection is off. We
// aren't testing memory protection.
rctx.access_agent_files(|af| {
af.slices.data.disable_seqs.mem = af.report.data.seq;
af.slices.save().unwrap();
});
let saved_mem_avail_inner_retries = self.mem_avail_inner_retries;
let mut started_at;
let mut final_mem_probe_periods = vec![];
let mut mem_usages = vec![];
let mut mem_sizes = vec![];
let mut fake_cpu_bench;
'outer: loop {
final_mem_probe_periods.clear();
mem_usages.clear();
mem_sizes.clear();
self.mem_avail_inner_retries = saved_mem_avail_inner_retries;
started_at = unix_now();
let base = HashdFakeCpuBench::base(rctx);
fake_cpu_bench = HashdFakeCpuBench {
rps_max: self.rps_max.unwrap_or(base.rps_max),
hash_size: self.hash_size.unwrap_or(base.hash_size),
chunk_pages: self.chunk_pages.unwrap_or(base.chunk_pages),
log_bps: self.log_bps,
..base
};
// We now know all the parameters. Let's run the actual benchmark.
'inner: loop {
info!(
"storage: Measuring supportable memory footprint and IO latencies ({}/{})",
mem_sizes.len() + 1,
self.loops
);
let (mem_size, mem_avail_err) =
self.measure_supportable_memory_size(rctx, &fake_cpu_bench)?;
// check for both going over and under, see the above function
if mem_avail_err.abs() > self.mem_avail_err_max && !rctx.test {
warn!(
"storage: mem_avail error |{:.2}|% > {:.2}%, please keep system idle",
mem_avail_err * 100.0,
self.mem_avail_err_max * 100.0
);
if self.process_retry(rctx)? { | random_line_split |
||
storage.rs | job.loops = v.parse::<u32>()?,
"rps-max" => job.rps_max = Some(v.parse::<u32>()?),
"hash-size" => job.hash_size = Some(parse_size(v)? as usize),
"chunk-pages" => job.chunk_pages = Some(v.parse::<usize>()?),
"log-bps" => job.log_bps = parse_size(v)?,
"mem-avail-err-max" => job.mem_avail_err_max = v.parse::<f64>()?,
"mem-avail-inner-retries" => job.mem_avail_inner_retries = v.parse::<u32>()?,
"mem-avail-outer-retries" => job.mem_avail_outer_retries = v.parse::<u32>()?,
k => bail!("unknown property key {:?}", k),
}
}
if job.commit {
job.apply = true;
}
Ok(job)
}
fn hashd_mem_usage_rep(rep: &rd_agent_intf::Report) -> usize {
match rep.usages.get(HASHD_BENCH_SVC_NAME) {
Some(usage) => usage.mem_bytes as usize,
None => 0,
}
}
fn measure_supportable_memory_size(
&mut self,
rctx: &mut RunCtx,
fake_cpu_bench: &HashdFakeCpuBench,
) -> Result<(usize, f64)> {
fake_cpu_bench.start(rctx)?;
const NR_MEM_USAGES: usize = 10;
let mut mem_usages = VecDeque::<usize>::new();
let mut mem_avail_err: f64 = 0.0;
rctx.wait_cond(
|af, progress| {
let cmd = &af.cmd.data;
let bench = &af.bench.data;
let rep = &af.report.data;
// Use period max to avoid confusions from temporary drops
// caused by e.g. bench completion.
mem_usages.push_front(Self::hashd_mem_usage_rep(rep));
mem_usages.truncate(NR_MEM_USAGES);
self.mem_usage = mem_usages.iter().fold(0, |max, u| max.max(*u));
self.mem_probe_at = rep.bench_hashd.mem_probe_at.timestamp() as u64;
if !rctx.test {
let mem = rctx.mem_info();
mem_avail_err = (self.mem_usage as f64 - mem.target as f64) / mem.target as f64;
}
// Abort early iff we go over. Memory usage may keep rising
// through refine stages, so we'll check for going under
// after run completion.
if mem_avail_err > self.mem_avail_err_max
&& rep.bench_hashd.phase > rd_hashd_intf::Phase::BenchMemBisect
{
return true;
}
progress.set_status(&format!(
"[{}] mem: {:>5}/{:>5}({:+5.1}%) rw:{:>5}/{:>5} p50/90/99: {:>5}/{:>5}/{:>5}",
rep.bench_hashd.phase.name(),
format_size(rep.bench_hashd.mem_probe_size),
format_size(self.mem_usage),
mem_avail_err * 100.0,
format_size_dashed(rep.usages[ROOT_SLICE].io_rbps),
format_size_dashed(rep.usages[ROOT_SLICE].io_wbps),
format_duration_dashed(rep.iolat.map["read"]["50"]),
format_duration_dashed(rep.iolat.map["read"]["90"]),
format_duration_dashed(rep.iolat.map["read"]["99"]),
));
bench.hashd_seq >= cmd.bench_hashd_seq
},
None,
Some(BenchProgress::new().monitor_systemd_unit(HASHD_BENCH_SVC_NAME)),
)?;
rctx.stop_hashd_bench()?;
if mem_avail_err > self.mem_avail_err_max {
return Ok((0, mem_avail_err));
}
let mem_size = rctx.access_agent_files(|af| {
af.bench.data.hashd.mem_size as f64 * af.bench.data.hashd.mem_frac
}) as usize;
Ok((mem_size, mem_avail_err))
}
fn process_retry(&mut self, rctx: &mut RunCtx) -> Result<bool> {
let mem = rctx.mem_info();
let cur_mem_avail = mem.avail + self.mem_usage - mem.target;
let consistent = (cur_mem_avail as f64 - self.prev_mem_avail as f64).abs()
< self.mem_avail_err_max * cur_mem_avail as f64;
let retry_outer = match (self.first_try, consistent, self.mem_avail_inner_retries > 0) {
(true, _, _) => {
warn!(
"storage: Starting over with new mem_avail {}",
format_size(cur_mem_avail)
);
true
}
(false, true, _) => {
warn!(
"storage: mem_avail consistent with the last, \
starting over with new mem_avail {}",
format_size(cur_mem_avail)
);
true
}
(false, false, false) => {
warn!("storage: Ran out of inner tries, starting over");
true
}
(false, false, true) => {
warn!(
"storage: Retrying without updating mem_avail {} (prev {}, cur {})",
format_size(mem.avail),
format_size(self.prev_mem_avail),
format_size(cur_mem_avail)
);
self.mem_avail_inner_retries -= 1;
false
}
};
if retry_outer {
rctx.update_mem_avail(cur_mem_avail)?;
if self.mem_avail_outer_retries == 0 {
bail!("available memory keeps fluctuating, keep the system idle");
}
self.mem_avail_outer_retries -= 1;
}
self.prev_mem_avail = cur_mem_avail;
self.first_try = false;
Ok(retry_outer)
}
pub fn format_header<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
_res: &StorageResult,
include_loops: bool,
) {
write!(
out,
"Params: hash_size={} rps_max={} log_bps={}",
format_size(rec.base_hashd_knobs.hash_size),
self.rps_max.unwrap_or(rec.base_hashd_knobs.rps_max),
format_size(self.log_bps)
)
.unwrap();
if include_loops {
writeln!(out, " loops={}", self.loops).unwrap();
} else {
writeln!(out, "").unwrap();
}
}
fn format_rstat<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
_rec: &StorageRecord,
res: &StorageResult,
opts: &FormatOpts,
) {
if opts.full {
writeln!(out, "Resource stat:\n").unwrap();
res.all_rstat.format(out, "ALL", opts);
writeln!(out, "").unwrap();
res.final_rstat.format(out, "FINAL", opts);
writeln!(out, "").unwrap();
}
writeln!(
out,
"IO BPS: read_final={} write_final={} read_all={} write_all={}",
format_size(res.final_rstat.io_bps.0["mean"]),
format_size(res.final_rstat.io_bps.1["mean"]),
format_size(res.all_rstat.io_bps.0["mean"]),
format_size(res.all_rstat.io_bps.1["mean"])
)
.unwrap();
}
fn format_mem_summary<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
res: &StorageResult,
) {
write!(
out,
"Memory offloading: factor={:.3}@{} ",
res.mem_offload_factor, rec.mem.profile
)
.unwrap();
if self.loops > 1 {
writeln!(
out,
"usage/stdev={}/{} size/stdev={}/{} missing={}%",
format_size(res.mem_usage),
format_size(res.mem_usage_stdev),
format_size(res.mem_size),
format_size(res.mem_size_stdev),
format_pct(Studies::reports_missing(res.nr_reports)),
)
.unwrap();
} else {
writeln!(
out,
"usage={} size={} missing={}%",
format_size(res.mem_usage),
format_size(res.mem_size),
format_pct(Studies::reports_missing(res.nr_reports)),
)
.unwrap();
}
}
pub fn format_result<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
res: &StorageResult,
header: bool,
opts: &FormatOpts,
) {
if header {
self.format_header(out, rec, res, true);
writeln!(out, "").unwrap();
}
StudyIoLatPcts::format_rw(out, &res.iolat, opts, None);
writeln!(out, "").unwrap();
self.format_rstat(out, rec, res, opts);
writeln!(out, "").unwrap();
self.format_mem_summary(out, rec, res);
}
}
impl Job for StorageJob {
fn sysreqs(&self) -> BTreeSet<SysReq> | {
HASHD_SYSREQS.clone()
} | identifier_body |
|
storage.rs | may keep rising
// through refine stages, so we'll check for going under
// after run completion.
if mem_avail_err > self.mem_avail_err_max
&& rep.bench_hashd.phase > rd_hashd_intf::Phase::BenchMemBisect
{
return true;
}
progress.set_status(&format!(
"[{}] mem: {:>5}/{:>5}({:+5.1}%) rw:{:>5}/{:>5} p50/90/99: {:>5}/{:>5}/{:>5}",
rep.bench_hashd.phase.name(),
format_size(rep.bench_hashd.mem_probe_size),
format_size(self.mem_usage),
mem_avail_err * 100.0,
format_size_dashed(rep.usages[ROOT_SLICE].io_rbps),
format_size_dashed(rep.usages[ROOT_SLICE].io_wbps),
format_duration_dashed(rep.iolat.map["read"]["50"]),
format_duration_dashed(rep.iolat.map["read"]["90"]),
format_duration_dashed(rep.iolat.map["read"]["99"]),
));
bench.hashd_seq >= cmd.bench_hashd_seq
},
None,
Some(BenchProgress::new().monitor_systemd_unit(HASHD_BENCH_SVC_NAME)),
)?;
rctx.stop_hashd_bench()?;
if mem_avail_err > self.mem_avail_err_max {
return Ok((0, mem_avail_err));
}
let mem_size = rctx.access_agent_files(|af| {
af.bench.data.hashd.mem_size as f64 * af.bench.data.hashd.mem_frac
}) as usize;
Ok((mem_size, mem_avail_err))
}
fn process_retry(&mut self, rctx: &mut RunCtx) -> Result<bool> {
let mem = rctx.mem_info();
let cur_mem_avail = mem.avail + self.mem_usage - mem.target;
let consistent = (cur_mem_avail as f64 - self.prev_mem_avail as f64).abs()
< self.mem_avail_err_max * cur_mem_avail as f64;
let retry_outer = match (self.first_try, consistent, self.mem_avail_inner_retries > 0) {
(true, _, _) => {
warn!(
"storage: Starting over with new mem_avail {}",
format_size(cur_mem_avail)
);
true
}
(false, true, _) => {
warn!(
"storage: mem_avail consistent with the last, \
starting over with new mem_avail {}",
format_size(cur_mem_avail)
);
true
}
(false, false, false) => {
warn!("storage: Ran out of inner tries, starting over");
true
}
(false, false, true) => {
warn!(
"storage: Retrying without updating mem_avail {} (prev {}, cur {})",
format_size(mem.avail),
format_size(self.prev_mem_avail),
format_size(cur_mem_avail)
);
self.mem_avail_inner_retries -= 1;
false
}
};
if retry_outer {
rctx.update_mem_avail(cur_mem_avail)?;
if self.mem_avail_outer_retries == 0 {
bail!("available memory keeps fluctuating, keep the system idle");
}
self.mem_avail_outer_retries -= 1;
}
self.prev_mem_avail = cur_mem_avail;
self.first_try = false;
Ok(retry_outer)
}
pub fn format_header<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
_res: &StorageResult,
include_loops: bool,
) {
write!(
out,
"Params: hash_size={} rps_max={} log_bps={}",
format_size(rec.base_hashd_knobs.hash_size),
self.rps_max.unwrap_or(rec.base_hashd_knobs.rps_max),
format_size(self.log_bps)
)
.unwrap();
if include_loops {
writeln!(out, " loops={}", self.loops).unwrap();
} else {
writeln!(out, "").unwrap();
}
}
fn format_rstat<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
_rec: &StorageRecord,
res: &StorageResult,
opts: &FormatOpts,
) {
if opts.full {
writeln!(out, "Resource stat:\n").unwrap();
res.all_rstat.format(out, "ALL", opts);
writeln!(out, "").unwrap();
res.final_rstat.format(out, "FINAL", opts);
writeln!(out, "").unwrap();
}
writeln!(
out,
"IO BPS: read_final={} write_final={} read_all={} write_all={}",
format_size(res.final_rstat.io_bps.0["mean"]),
format_size(res.final_rstat.io_bps.1["mean"]),
format_size(res.all_rstat.io_bps.0["mean"]),
format_size(res.all_rstat.io_bps.1["mean"])
)
.unwrap();
}
fn format_mem_summary<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
res: &StorageResult,
) {
write!(
out,
"Memory offloading: factor={:.3}@{} ",
res.mem_offload_factor, rec.mem.profile
)
.unwrap();
if self.loops > 1 {
writeln!(
out,
"usage/stdev={}/{} size/stdev={}/{} missing={}%",
format_size(res.mem_usage),
format_size(res.mem_usage_stdev),
format_size(res.mem_size),
format_size(res.mem_size_stdev),
format_pct(Studies::reports_missing(res.nr_reports)),
)
.unwrap();
} else {
writeln!(
out,
"usage={} size={} missing={}%",
format_size(res.mem_usage),
format_size(res.mem_size),
format_pct(Studies::reports_missing(res.nr_reports)),
)
.unwrap();
}
}
pub fn format_result<'a>(
&self,
out: &mut Box<dyn Write + 'a>,
rec: &StorageRecord,
res: &StorageResult,
header: bool,
opts: &FormatOpts,
) {
if header {
self.format_header(out, rec, res, true);
writeln!(out, "").unwrap();
}
StudyIoLatPcts::format_rw(out, &res.iolat, opts, None);
writeln!(out, "").unwrap();
self.format_rstat(out, rec, res, opts);
writeln!(out, "").unwrap();
self.format_mem_summary(out, rec, res);
}
}
impl Job for StorageJob {
fn sysreqs(&self) -> BTreeSet<SysReq> {
HASHD_SYSREQS.clone()
}
fn run(&mut self, rctx: &mut RunCtx) -> Result<serde_json::Value> {
rctx.set_prep_testfiles()
.disable_zswap()
.start_agent(vec![])?;
// Depending on mem-profile, we might be using a large balloon which
// can push down available memory below workload's memory.low
// cratering memory reclaim. Make sure memory protection is off. We
// aren't testing memory protection.
rctx.access_agent_files(|af| {
af.slices.data.disable_seqs.mem = af.report.data.seq;
af.slices.save().unwrap();
});
let saved_mem_avail_inner_retries = self.mem_avail_inner_retries;
let mut started_at;
let mut final_mem_probe_periods = vec![];
let mut mem_usages = vec![];
let mut mem_sizes = vec![];
let mut fake_cpu_bench;
'outer: loop {
final_mem_probe_periods.clear();
mem_usages.clear();
mem_sizes.clear();
self.mem_avail_inner_retries = saved_mem_avail_inner_retries;
started_at = unix_now();
let base = HashdFakeCpuBench::base(rctx);
fake_cpu_bench = HashdFakeCpuBench {
rps_max: self.rps_max.unwrap_or(base.rps_max),
hash_size: self.hash_size.unwrap_or(base.hash_size),
chunk_pages: self.chunk_pages.unwrap_or(base.chunk_pages),
log_bps: self.log_bps,
..base
};
// We now know all the parameters. Let's run the actual benchmark.
'inner: loop {
info!(
"storage: Measuring supportable memory footprint and IO latencies ({}/{})",
mem_sizes.len() + 1,
self.loops
);
let (mem_size, mem_avail_err) =
self.measure_supportable_memory_size(rctx, &fake_cpu_bench)?;
// check for both going over and under, see the above function
if mem_avail_err.abs() > self.mem_avail_err_max && !rctx.test {
warn!(
"storage: mem_avail error |{:.2}|% > {:.2}%, please keep system idle",
mem_avail_err * 100.0,
self.mem_avail_err_max * 100.0
);
if self.process_retry(rctx)? {
continue 'outer;
} else {
continue 'inner;
}
} else | {
self.prev_mem_avail = 0;
self.first_try = false;
} | conditional_block |
|
phrases_or_entities_over_time_first.py | dataframe.
years = ['2007', '2008', '2009', '2010', '2011', '2012', '2013', '2014', '2015', '2016', '2017']
# zscores for years are 10 columns, 1st column is cluster number
col_list = ['cluster_number'].extend(years)
centres_df = pd.read_csv('centres_df.tsv', sep='\t', names=col_list)
centres_df = centres_df.set_index('cluster_number', drop=True)
phrases_df = pd.read_csv('cluster_phrase_semicolon.txt', sep='\t', names=['cluster_number', 'phrases'])
phrases_df = phrases_df.set_index('cluster_number', drop=True)
def phrases_df_notfound_message(nounphrase):
""" Takes a noun phrase which is not found in the phrases_df input filef and prints a messages
saying that it is not found. It also includes suitable styling (in an <h3> tag).
ARGUMENTS: nounphrase: searched noun phrses
RETURNS: a html h5 message with a message listing the terms not found"""
return html.H5('Noun phrases not found: {}.'.format(notfound),
style={'color': colours['text']}
)
app = dash.Dash(__name__)
# Add the default Dash CSS, and some custom (very simple) CSS to remove the undo button
# app.css.append_css({'external_url': 'https://www.jsdelivr.com/package/npm/normalize.css'})
#app.css.append_css({'external_url': 'https://unpkg.com/sakura.css/css/sakura.css'})
app.css.append_css({'external_url': 'https://codepen.io/chriddyp/pen/bWLwgP.css'})
#app.css.append_css({'external_url': 'https://rawgit.com/lwileczek/Dash/master/undo_redo5.css'})
app.css.append_css({'external_url': '/static/reset.css'})
colours = {
'background': '#111111',
'text': '#0080A5'
}
app.layout = html.Div(style={'backgroundColor': colours['background'],
'height':'100vh', 'width': '100%'},
children=[
html.H2(children='Distribution of Noun phrases/Entity Mentions over time',
style={
'textAlign': 'center',
'color': colours['text']
}
),
html.Label(id='setlabel',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em',
'margin-left': '1%'
}),
dcc.Input(id='npinput1-state', value='', type='text', style={'width': '75%', 'margin-left': '1%'}),
html.Div([
html.Div([
html.Label('Type:',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em'
}),
dcc.RadioItems(
id='type_of_term',
options=[{'label': i, 'value': i} for i in ['Noun phrases', 'Entity mentions', 'Clusters']],
value='Noun phrases',
style= {
'color': colours['text'],
'fontSize': '1.4em'
},
labelStyle={'display': 'inline-block'}
)
], style={'width': '50%', 'margin-left': '1%', 'float':'left'}),
html.Div([
html.Label('Time Period: ',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em'
}),
dcc.RadioItems(
id='time_period',
options=[{'label': i, 'value': i} for i in ['Monthly', 'Yearly']],
value='Monthly',
style= {
'color': colours['text'],
'fontSize': '1.4em'
},
labelStyle={'display': 'inline-block'}
)
], style={'width': '505', 'margin-right': '1%', 'float': 'left'})
], style={'width': '100%', 'overflow': 'hidden'}),
#html.Button(id='submit-button', n_clicks=0, children='Submit', style={'margin-top': '2%', 'margin-left': 'auto',
# 'margin-right': 'auto', 'width': '20%', 'display': 'block'}),
html.Button(id='submit-button', n_clicks=0, children='Submit', style={'margin-top': '2%', 'margin-left': '1%'}),
# 'margin-right': 'auto', 'width': '20%', 'display': 'block'}),
html.Div(id='output1'),
html.Div(id='output2')
])
@app.callback(
Output('setlabel', 'children'),
[Input('type_of_term', 'value'),
Input('time_period', 'value')])
def set_label(termtype, timeperiod):
""" Sets label based on the radio buttons selected"""
label = 'Graph these comma-separated noun phrases (yearly frequencies):' if termtype == 'Noun phrases' and timeperiod == 'Yearly' \
else 'Graph these comma-separated noun phrases (monthly frequencies):' if termtype == 'Noun phrases' and timeperiod == 'Monthly' \
else 'Graph these comma-separated entity mentions (yearly frequencies):' if termtype == 'Entity mentions' and timeperiod == 'Yearly' \
else 'Graph these comma-separated entity mentions (monthly frequencies):' if termtype == 'entity mentions' and timeperiod == 'Monthly' \
else 'Enter a phrase and get similar terms and the distribution of its "cluster"'
return label
@app.callback(
Output('npinput1-state', 'placeholder'),
[Input('type_of_term', 'value')])
def set_placeholder(termtype):
|
@app.callback(
Output('output1', 'children'),
[Input('type_of_term', 'value'),
Input('time_period', 'value'),
Input('submit-button', 'n_clicks')],
[State('npinput1-state', 'value')])
def create_graph(termtype, timeperiod, n_clicks, input_box):
""" Wrapped function which takes user input in a text box, and 2 radio buttons, returns the
appropriate graph if the query produces a hit in Solr, returns an error message otherwise.
ARGUMENTS: n_clicks: a parameter of the HTML button which indicates it has
been clicked
input_box: the content of the text box in which the user has
entered a comma-separated search query.
type_of_term: radio button with values 'Entity mention' or 'Noun phrase'
time_period: radio button with values 'Monthly' or 'Yearly'
RETURNS: 1 graph (total occurrences) of all terms which have results from
Solr, error messages of all terms which don't have results from Solr.
The 1 graph is generated based on the radio buttons' values. """
if termtype == 'Noun phrases' and timeperiod == 'Monthly':
# Call function show_graph_total_not_callback which is a normal function, not a decorator
return npvm.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Entity mentions' and timeperiod == 'Monthly':
return emvm.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Noun phrases' and timeperiod == 'Yearly':
return npvy.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Entity mentions' and timeperiod == 'Yearly':
return emvy.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Clusters':
# !!! DO NOT modify global variables
phrases_df_copy = phrases_df.copy()
# Add a new column which is 1 only for the cluster in which the term in input box is found.
phrases_df_copy['clusterfound'] = phrases_df_copy['phrases'].apply(lambda x: 1 if x.find(input_box.strip()) != -1 else 0)
if (phrases_df_copy.clusterfound==0).all():
return html.H5('Noun phrase "{}" not found. Try searching again!'.format(input_box.strip()),
style={'color': colours['text']}
)
# one_phrase_df will contain only one row
one_phrase_df = phrases_df_copy.loc[phrases_df_copy.clusterfound==1]
current_cluster = one_phrase_df.index.values[0]
current_cluster_message = 'Other noun phrases in same cluster (cluster {}):\n'.format(str(current_cluster))
current_cluster = 'Cluster {}'.format(current_cluster)
# Get the list of words using iloc[0] (only one row) and build it into a string with commas (input file had semicolons)
current_cluster_phrases = ', '. | """ Sets input placeholder based on the radio buttons selected"""
placeholder = 'E.g. search: "machine learning, model validation"' if termtype == 'Noun phrases'\
else 'E.g. search: "machine learning, model validation": each search term will automatically be converted to http://en.wikipedia.org/wiki/<search_term>' \
if termtype == 'Entity mentions' else 'E.g. model validation (one phrase only)'
return placeholder | identifier_body |
phrases_or_entities_over_time_first.py | dataframe.
years = ['2007', '2008', '2009', '2010', '2011', '2012', '2013', '2014', '2015', '2016', '2017']
# zscores for years are 10 columns, 1st column is cluster number
col_list = ['cluster_number'].extend(years)
centres_df = pd.read_csv('centres_df.tsv', sep='\t', names=col_list)
centres_df = centres_df.set_index('cluster_number', drop=True)
phrases_df = pd.read_csv('cluster_phrase_semicolon.txt', sep='\t', names=['cluster_number', 'phrases'])
phrases_df = phrases_df.set_index('cluster_number', drop=True)
def phrases_df_notfound_message(nounphrase):
""" Takes a noun phrase which is not found in the phrases_df input filef and prints a messages
saying that it is not found. It also includes suitable styling (in an <h3> tag).
ARGUMENTS: nounphrase: searched noun phrses
RETURNS: a html h5 message with a message listing the terms not found"""
return html.H5('Noun phrases not found: {}.'.format(notfound),
style={'color': colours['text']}
)
app = dash.Dash(__name__)
# Add the default Dash CSS, and some custom (very simple) CSS to remove the undo button
# app.css.append_css({'external_url': 'https://www.jsdelivr.com/package/npm/normalize.css'})
#app.css.append_css({'external_url': 'https://unpkg.com/sakura.css/css/sakura.css'})
app.css.append_css({'external_url': 'https://codepen.io/chriddyp/pen/bWLwgP.css'})
#app.css.append_css({'external_url': 'https://rawgit.com/lwileczek/Dash/master/undo_redo5.css'})
app.css.append_css({'external_url': '/static/reset.css'})
colours = {
'background': '#111111',
'text': '#0080A5'
}
app.layout = html.Div(style={'backgroundColor': colours['background'],
'height':'100vh', 'width': '100%'},
children=[
html.H2(children='Distribution of Noun phrases/Entity Mentions over time',
style={
'textAlign': 'center',
'color': colours['text']
}
),
html.Label(id='setlabel',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em',
'margin-left': '1%'
}),
dcc.Input(id='npinput1-state', value='', type='text', style={'width': '75%', 'margin-left': '1%'}),
html.Div([
html.Div([
html.Label('Type:',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em'
}),
dcc.RadioItems(
id='type_of_term',
options=[{'label': i, 'value': i} for i in ['Noun phrases', 'Entity mentions', 'Clusters']],
value='Noun phrases',
style= {
'color': colours['text'],
'fontSize': '1.4em'
},
labelStyle={'display': 'inline-block'}
)
], style={'width': '50%', 'margin-left': '1%', 'float':'left'}),
html.Div([
html.Label('Time Period: ',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em'
}),
dcc.RadioItems(
id='time_period',
options=[{'label': i, 'value': i} for i in ['Monthly', 'Yearly']],
value='Monthly',
style= {
'color': colours['text'],
'fontSize': '1.4em'
},
labelStyle={'display': 'inline-block'}
)
], style={'width': '505', 'margin-right': '1%', 'float': 'left'})
], style={'width': '100%', 'overflow': 'hidden'}),
#html.Button(id='submit-button', n_clicks=0, children='Submit', style={'margin-top': '2%', 'margin-left': 'auto',
# 'margin-right': 'auto', 'width': '20%', 'display': 'block'}),
html.Button(id='submit-button', n_clicks=0, children='Submit', style={'margin-top': '2%', 'margin-left': '1%'}),
# 'margin-right': 'auto', 'width': '20%', 'display': 'block'}),
html.Div(id='output1'),
html.Div(id='output2')
])
@app.callback(
Output('setlabel', 'children'),
[Input('type_of_term', 'value'),
Input('time_period', 'value')])
def set_label(termtype, timeperiod):
""" Sets label based on the radio buttons selected"""
label = 'Graph these comma-separated noun phrases (yearly frequencies):' if termtype == 'Noun phrases' and timeperiod == 'Yearly' \
else 'Graph these comma-separated noun phrases (monthly frequencies):' if termtype == 'Noun phrases' and timeperiod == 'Monthly' \
else 'Graph these comma-separated entity mentions (yearly frequencies):' if termtype == 'Entity mentions' and timeperiod == 'Yearly' \
else 'Graph these comma-separated entity mentions (monthly frequencies):' if termtype == 'entity mentions' and timeperiod == 'Monthly' \
else 'Enter a phrase and get similar terms and the distribution of its "cluster"'
return label
@app.callback(
Output('npinput1-state', 'placeholder'),
[Input('type_of_term', 'value')])
def set_placeholder(termtype):
""" Sets input placeholder based on the radio buttons selected"""
placeholder = 'E.g. search: "machine learning, model validation"' if termtype == 'Noun phrases'\
else 'E.g. search: "machine learning, model validation": each search term will automatically be converted to http://en.wikipedia.org/wiki/<search_term>' \
if termtype == 'Entity mentions' else 'E.g. model validation (one phrase only)'
return placeholder
@app.callback(
Output('output1', 'children'),
[Input('type_of_term', 'value'),
Input('time_period', 'value'),
Input('submit-button', 'n_clicks')],
[State('npinput1-state', 'value')])
def | (termtype, timeperiod, n_clicks, input_box):
""" Wrapped function which takes user input in a text box, and 2 radio buttons, returns the
appropriate graph if the query produces a hit in Solr, returns an error message otherwise.
ARGUMENTS: n_clicks: a parameter of the HTML button which indicates it has
been clicked
input_box: the content of the text box in which the user has
entered a comma-separated search query.
type_of_term: radio button with values 'Entity mention' or 'Noun phrase'
time_period: radio button with values 'Monthly' or 'Yearly'
RETURNS: 1 graph (total occurrences) of all terms which have results from
Solr, error messages of all terms which don't have results from Solr.
The 1 graph is generated based on the radio buttons' values. """
if termtype == 'Noun phrases' and timeperiod == 'Monthly':
# Call function show_graph_total_not_callback which is a normal function, not a decorator
return npvm.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Entity mentions' and timeperiod == 'Monthly':
return emvm.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Noun phrases' and timeperiod == 'Yearly':
return npvy.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Entity mentions' and timeperiod == 'Yearly':
return emvy.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Clusters':
# !!! DO NOT modify global variables
phrases_df_copy = phrases_df.copy()
# Add a new column which is 1 only for the cluster in which the term in input box is found.
phrases_df_copy['clusterfound'] = phrases_df_copy['phrases'].apply(lambda x: 1 if x.find(input_box.strip()) != -1 else 0)
if (phrases_df_copy.clusterfound==0).all():
return html.H5('Noun phrase "{}" not found. Try searching again!'.format(input_box.strip()),
style={'color': colours['text']}
)
# one_phrase_df will contain only one row
one_phrase_df = phrases_df_copy.loc[phrases_df_copy.clusterfound==1]
current_cluster = one_phrase_df.index.values[0]
current_cluster_message = 'Other noun phrases in same cluster (cluster {}):\n'.format(str(current_cluster))
current_cluster = 'Cluster {}'.format(current_cluster)
# Get the list of words using iloc[0] (only one row) and build it into a string with commas (input file had semicolons)
current_cluster_phrases = ', | create_graph | identifier_name |
phrases_or_entities_over_time_first.py | dataframe.
years = ['2007', '2008', '2009', '2010', '2011', '2012', '2013', '2014', '2015', '2016', '2017']
# zscores for years are 10 columns, 1st column is cluster number
col_list = ['cluster_number'].extend(years)
centres_df = pd.read_csv('centres_df.tsv', sep='\t', names=col_list)
centres_df = centres_df.set_index('cluster_number', drop=True)
phrases_df = pd.read_csv('cluster_phrase_semicolon.txt', sep='\t', names=['cluster_number', 'phrases'])
phrases_df = phrases_df.set_index('cluster_number', drop=True)
def phrases_df_notfound_message(nounphrase):
""" Takes a noun phrase which is not found in the phrases_df input filef and prints a messages
saying that it is not found. It also includes suitable styling (in an <h3> tag).
ARGUMENTS: nounphrase: searched noun phrses
RETURNS: a html h5 message with a message listing the terms not found"""
return html.H5('Noun phrases not found: {}.'.format(notfound), |
# Add the default Dash CSS, and some custom (very simple) CSS to remove the undo button
# app.css.append_css({'external_url': 'https://www.jsdelivr.com/package/npm/normalize.css'})
#app.css.append_css({'external_url': 'https://unpkg.com/sakura.css/css/sakura.css'})
app.css.append_css({'external_url': 'https://codepen.io/chriddyp/pen/bWLwgP.css'})
#app.css.append_css({'external_url': 'https://rawgit.com/lwileczek/Dash/master/undo_redo5.css'})
app.css.append_css({'external_url': '/static/reset.css'})
colours = {
'background': '#111111',
'text': '#0080A5'
}
app.layout = html.Div(style={'backgroundColor': colours['background'],
'height':'100vh', 'width': '100%'},
children=[
html.H2(children='Distribution of Noun phrases/Entity Mentions over time',
style={
'textAlign': 'center',
'color': colours['text']
}
),
html.Label(id='setlabel',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em',
'margin-left': '1%'
}),
dcc.Input(id='npinput1-state', value='', type='text', style={'width': '75%', 'margin-left': '1%'}),
html.Div([
html.Div([
html.Label('Type:',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em'
}),
dcc.RadioItems(
id='type_of_term',
options=[{'label': i, 'value': i} for i in ['Noun phrases', 'Entity mentions', 'Clusters']],
value='Noun phrases',
style= {
'color': colours['text'],
'fontSize': '1.4em'
},
labelStyle={'display': 'inline-block'}
)
], style={'width': '50%', 'margin-left': '1%', 'float':'left'}),
html.Div([
html.Label('Time Period: ',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em'
}),
dcc.RadioItems(
id='time_period',
options=[{'label': i, 'value': i} for i in ['Monthly', 'Yearly']],
value='Monthly',
style= {
'color': colours['text'],
'fontSize': '1.4em'
},
labelStyle={'display': 'inline-block'}
)
], style={'width': '505', 'margin-right': '1%', 'float': 'left'})
], style={'width': '100%', 'overflow': 'hidden'}),
#html.Button(id='submit-button', n_clicks=0, children='Submit', style={'margin-top': '2%', 'margin-left': 'auto',
# 'margin-right': 'auto', 'width': '20%', 'display': 'block'}),
html.Button(id='submit-button', n_clicks=0, children='Submit', style={'margin-top': '2%', 'margin-left': '1%'}),
# 'margin-right': 'auto', 'width': '20%', 'display': 'block'}),
html.Div(id='output1'),
html.Div(id='output2')
])
@app.callback(
Output('setlabel', 'children'),
[Input('type_of_term', 'value'),
Input('time_period', 'value')])
def set_label(termtype, timeperiod):
""" Sets label based on the radio buttons selected"""
label = 'Graph these comma-separated noun phrases (yearly frequencies):' if termtype == 'Noun phrases' and timeperiod == 'Yearly' \
else 'Graph these comma-separated noun phrases (monthly frequencies):' if termtype == 'Noun phrases' and timeperiod == 'Monthly' \
else 'Graph these comma-separated entity mentions (yearly frequencies):' if termtype == 'Entity mentions' and timeperiod == 'Yearly' \
else 'Graph these comma-separated entity mentions (monthly frequencies):' if termtype == 'entity mentions' and timeperiod == 'Monthly' \
else 'Enter a phrase and get similar terms and the distribution of its "cluster"'
return label
@app.callback(
Output('npinput1-state', 'placeholder'),
[Input('type_of_term', 'value')])
def set_placeholder(termtype):
""" Sets input placeholder based on the radio buttons selected"""
placeholder = 'E.g. search: "machine learning, model validation"' if termtype == 'Noun phrases'\
else 'E.g. search: "machine learning, model validation": each search term will automatically be converted to http://en.wikipedia.org/wiki/<search_term>' \
if termtype == 'Entity mentions' else 'E.g. model validation (one phrase only)'
return placeholder
@app.callback(
Output('output1', 'children'),
[Input('type_of_term', 'value'),
Input('time_period', 'value'),
Input('submit-button', 'n_clicks')],
[State('npinput1-state', 'value')])
def create_graph(termtype, timeperiod, n_clicks, input_box):
""" Wrapped function which takes user input in a text box, and 2 radio buttons, returns the
appropriate graph if the query produces a hit in Solr, returns an error message otherwise.
ARGUMENTS: n_clicks: a parameter of the HTML button which indicates it has
been clicked
input_box: the content of the text box in which the user has
entered a comma-separated search query.
type_of_term: radio button with values 'Entity mention' or 'Noun phrase'
time_period: radio button with values 'Monthly' or 'Yearly'
RETURNS: 1 graph (total occurrences) of all terms which have results from
Solr, error messages of all terms which don't have results from Solr.
The 1 graph is generated based on the radio buttons' values. """
if termtype == 'Noun phrases' and timeperiod == 'Monthly':
# Call function show_graph_total_not_callback which is a normal function, not a decorator
return npvm.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Entity mentions' and timeperiod == 'Monthly':
return emvm.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Noun phrases' and timeperiod == 'Yearly':
return npvy.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Entity mentions' and timeperiod == 'Yearly':
return emvy.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Clusters':
# !!! DO NOT modify global variables
phrases_df_copy = phrases_df.copy()
# Add a new column which is 1 only for the cluster in which the term in input box is found.
phrases_df_copy['clusterfound'] = phrases_df_copy['phrases'].apply(lambda x: 1 if x.find(input_box.strip()) != -1 else 0)
if (phrases_df_copy.clusterfound==0).all():
return html.H5('Noun phrase "{}" not found. Try searching again!'.format(input_box.strip()),
style={'color': colours['text']}
)
# one_phrase_df will contain only one row
one_phrase_df = phrases_df_copy.loc[phrases_df_copy.clusterfound==1]
current_cluster = one_phrase_df.index.values[0]
current_cluster_message = 'Other noun phrases in same cluster (cluster {}):\n'.format(str(current_cluster))
current_cluster = 'Cluster {}'.format(current_cluster)
# Get the list of words using iloc[0] (only one row) and build it into a string with commas (input file had semicolons)
current_cluster_phrases = ', | style={'color': colours['text']}
)
app = dash.Dash(__name__) | random_line_split |
phrases_or_entities_over_time_first.py | dataframe.
years = ['2007', '2008', '2009', '2010', '2011', '2012', '2013', '2014', '2015', '2016', '2017']
# zscores for years are 10 columns, 1st column is cluster number
col_list = ['cluster_number'].extend(years)
centres_df = pd.read_csv('centres_df.tsv', sep='\t', names=col_list)
centres_df = centres_df.set_index('cluster_number', drop=True)
phrases_df = pd.read_csv('cluster_phrase_semicolon.txt', sep='\t', names=['cluster_number', 'phrases'])
phrases_df = phrases_df.set_index('cluster_number', drop=True)
def phrases_df_notfound_message(nounphrase):
""" Takes a noun phrase which is not found in the phrases_df input filef and prints a messages
saying that it is not found. It also includes suitable styling (in an <h3> tag).
ARGUMENTS: nounphrase: searched noun phrses
RETURNS: a html h5 message with a message listing the terms not found"""
return html.H5('Noun phrases not found: {}.'.format(notfound),
style={'color': colours['text']}
)
app = dash.Dash(__name__)
# Add the default Dash CSS, and some custom (very simple) CSS to remove the undo button
# app.css.append_css({'external_url': 'https://www.jsdelivr.com/package/npm/normalize.css'})
#app.css.append_css({'external_url': 'https://unpkg.com/sakura.css/css/sakura.css'})
app.css.append_css({'external_url': 'https://codepen.io/chriddyp/pen/bWLwgP.css'})
#app.css.append_css({'external_url': 'https://rawgit.com/lwileczek/Dash/master/undo_redo5.css'})
app.css.append_css({'external_url': '/static/reset.css'})
colours = {
'background': '#111111',
'text': '#0080A5'
}
app.layout = html.Div(style={'backgroundColor': colours['background'],
'height':'100vh', 'width': '100%'},
children=[
html.H2(children='Distribution of Noun phrases/Entity Mentions over time',
style={
'textAlign': 'center',
'color': colours['text']
}
),
html.Label(id='setlabel',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em',
'margin-left': '1%'
}),
dcc.Input(id='npinput1-state', value='', type='text', style={'width': '75%', 'margin-left': '1%'}),
html.Div([
html.Div([
html.Label('Type:',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em'
}),
dcc.RadioItems(
id='type_of_term',
options=[{'label': i, 'value': i} for i in ['Noun phrases', 'Entity mentions', 'Clusters']],
value='Noun phrases',
style= {
'color': colours['text'],
'fontSize': '1.4em'
},
labelStyle={'display': 'inline-block'}
)
], style={'width': '50%', 'margin-left': '1%', 'float':'left'}),
html.Div([
html.Label('Time Period: ',
style={
'textAlign': 'left',
'color': colours['text'],
'fontSize': '1.4em'
}),
dcc.RadioItems(
id='time_period',
options=[{'label': i, 'value': i} for i in ['Monthly', 'Yearly']],
value='Monthly',
style= {
'color': colours['text'],
'fontSize': '1.4em'
},
labelStyle={'display': 'inline-block'}
)
], style={'width': '505', 'margin-right': '1%', 'float': 'left'})
], style={'width': '100%', 'overflow': 'hidden'}),
#html.Button(id='submit-button', n_clicks=0, children='Submit', style={'margin-top': '2%', 'margin-left': 'auto',
# 'margin-right': 'auto', 'width': '20%', 'display': 'block'}),
html.Button(id='submit-button', n_clicks=0, children='Submit', style={'margin-top': '2%', 'margin-left': '1%'}),
# 'margin-right': 'auto', 'width': '20%', 'display': 'block'}),
html.Div(id='output1'),
html.Div(id='output2')
])
@app.callback(
Output('setlabel', 'children'),
[Input('type_of_term', 'value'),
Input('time_period', 'value')])
def set_label(termtype, timeperiod):
""" Sets label based on the radio buttons selected"""
label = 'Graph these comma-separated noun phrases (yearly frequencies):' if termtype == 'Noun phrases' and timeperiod == 'Yearly' \
else 'Graph these comma-separated noun phrases (monthly frequencies):' if termtype == 'Noun phrases' and timeperiod == 'Monthly' \
else 'Graph these comma-separated entity mentions (yearly frequencies):' if termtype == 'Entity mentions' and timeperiod == 'Yearly' \
else 'Graph these comma-separated entity mentions (monthly frequencies):' if termtype == 'entity mentions' and timeperiod == 'Monthly' \
else 'Enter a phrase and get similar terms and the distribution of its "cluster"'
return label
@app.callback(
Output('npinput1-state', 'placeholder'),
[Input('type_of_term', 'value')])
def set_placeholder(termtype):
""" Sets input placeholder based on the radio buttons selected"""
placeholder = 'E.g. search: "machine learning, model validation"' if termtype == 'Noun phrases'\
else 'E.g. search: "machine learning, model validation": each search term will automatically be converted to http://en.wikipedia.org/wiki/<search_term>' \
if termtype == 'Entity mentions' else 'E.g. model validation (one phrase only)'
return placeholder
@app.callback(
Output('output1', 'children'),
[Input('type_of_term', 'value'),
Input('time_period', 'value'),
Input('submit-button', 'n_clicks')],
[State('npinput1-state', 'value')])
def create_graph(termtype, timeperiod, n_clicks, input_box):
""" Wrapped function which takes user input in a text box, and 2 radio buttons, returns the
appropriate graph if the query produces a hit in Solr, returns an error message otherwise.
ARGUMENTS: n_clicks: a parameter of the HTML button which indicates it has
been clicked
input_box: the content of the text box in which the user has
entered a comma-separated search query.
type_of_term: radio button with values 'Entity mention' or 'Noun phrase'
time_period: radio button with values 'Monthly' or 'Yearly'
RETURNS: 1 graph (total occurrences) of all terms which have results from
Solr, error messages of all terms which don't have results from Solr.
The 1 graph is generated based on the radio buttons' values. """
if termtype == 'Noun phrases' and timeperiod == 'Monthly':
# Call function show_graph_total_not_callback which is a normal function, not a decorator
return npvm.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Entity mentions' and timeperiod == 'Monthly':
return emvm.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Noun phrases' and timeperiod == 'Yearly':
|
if termtype == 'Entity mentions' and timeperiod == 'Yearly':
return emvy.show_graph_unique_not_callback(n_clicks, input_box)
if termtype == 'Clusters':
# !!! DO NOT modify global variables
phrases_df_copy = phrases_df.copy()
# Add a new column which is 1 only for the cluster in which the term in input box is found.
phrases_df_copy['clusterfound'] = phrases_df_copy['phrases'].apply(lambda x: 1 if x.find(input_box.strip()) != -1 else 0)
if (phrases_df_copy.clusterfound==0).all():
return html.H5('Noun phrase "{}" not found. Try searching again!'.format(input_box.strip()),
style={'color': colours['text']}
)
# one_phrase_df will contain only one row
one_phrase_df = phrases_df_copy.loc[phrases_df_copy.clusterfound==1]
current_cluster = one_phrase_df.index.values[0]
current_cluster_message = 'Other noun phrases in same cluster (cluster {}):\n'.format(str(current_cluster))
current_cluster = 'Cluster {}'.format(current_cluster)
# Get the list of words using iloc[0] (only one row) and build it into a string with commas (input file had semicolons)
current_cluster_phrases = ', | return npvy.show_graph_unique_not_callback(n_clicks, input_box) | conditional_block |
categorical.rs | .5, 0.0, 1.0, 0.0, 0.0]
//! // &[1.5, 1.0, 0.0, 1.5, 0.0, 0.0, 1.0, 0.0]
//! // &[1.5, 0.0, 1.0, 1.5, 0.0, 0.0, 0.0, 1.0]
//! ```
use std::iter;
use crate::error::Failed;
use crate::linalg::Matrix;
use crate::preprocessing::data_traits::{CategoricalFloat, Categorizable};
use crate::preprocessing::series_encoder::CategoryMapper;
/// OneHotEncoder Parameters
#[derive(Debug, Clone)]
pub struct OneHotEncoderParams {
/// Column number that contain categorical variable
pub col_idx_categorical: Option<Vec<usize>>,
/// (Currently not implemented) Try and infer which of the matrix columns are categorical variables
infer_categorical: bool,
}
impl OneHotEncoderParams {
/// Generate parameters from categorical variable column numbers
pub fn from_cat_idx(categorical_params: &[usize]) -> Self {
Self {
col_idx_categorical: Some(categorical_params.to_vec()),
infer_categorical: false,
}
}
}
/// Calculate the offset to parameters to due introduction of one-hot encoding
fn find_new_idxs(num_params: usize, cat_sizes: &[usize], cat_idxs: &[usize]) -> Vec<usize> {
// This functions uses iterators and returns a vector.
// In case we get a huge amount of paramenters this might be a problem
// todo: Change this such that it will return an iterator
let cat_idx = cat_idxs.iter().copied().chain((num_params..).take(1));
// Offset is constant between two categorical values, here we calculate the number of steps
// that remain constant
let repeats = cat_idx.scan(0, |a, v| {
let im = v + 1 - *a;
*a = v;
Some(im)
});
// Calculate the offset to parameter idx due to newly intorduced one-hot vectors
let offset_ = cat_sizes.iter().scan(0, |a, &v| {
*a = *a + v - 1;
Some(*a)
});
let offset = (0..1).chain(offset_);
let new_param_idxs: Vec<usize> = (0..num_params)
.zip(
repeats
.zip(offset)
.map(|(r, o)| iter::repeat(o).take(r))
.flatten(),
)
.map(|(idx, ofst)| idx + ofst)
.collect();
new_param_idxs
}
fn validate_col_is_categorical<T: Categorizable>(data: &[T]) -> bool {
for v in data {
if !v.is_valid() {
return false;
}
}
true
}
/// Encode Categorical variavbles of data matrix to one-hot
#[derive(Debug, Clone)]
pub struct | {
category_mappers: Vec<CategoryMapper<CategoricalFloat>>,
col_idx_categorical: Vec<usize>,
}
impl OneHotEncoder {
/// Create an encoder instance with categories infered from data matrix
pub fn fit<T, M>(data: &M, params: OneHotEncoderParams) -> Result<OneHotEncoder, Failed>
where
T: Categorizable,
M: Matrix<T>,
{
match (params.col_idx_categorical, params.infer_categorical) {
(None, false) => Err(Failed::fit(
"Must pass categorical series ids or infer flag",
)),
(Some(_idxs), true) => Err(Failed::fit(
"Ambigous parameters, got both infer and categroy ids",
)),
(Some(mut idxs), false) => {
// make sure categories have same order as data columns
idxs.sort_unstable();
let (nrows, _) = data.shape();
// col buffer to avoid allocations
let mut col_buf: Vec<T> = iter::repeat(T::zero()).take(nrows).collect();
let mut res: Vec<CategoryMapper<CategoricalFloat>> = Vec::with_capacity(idxs.len());
for &idx in &idxs {
data.copy_col_as_vec(idx, &mut col_buf);
if !validate_col_is_categorical(&col_buf) {
let msg = format!(
"Column {} of data matrix containts non categorizable (integer) values",
idx
);
return Err(Failed::fit(&msg[..]));
}
let hashable_col = col_buf.iter().map(|v| v.to_category());
res.push(CategoryMapper::fit_to_iter(hashable_col));
}
Ok(Self {
category_mappers: res,
col_idx_categorical: idxs,
})
}
(None, true) => {
todo!("Auto-Inference for Categorical Variables not yet implemented")
}
}
}
/// Transform categorical variables to one-hot encoded and return a new matrix
pub fn transform<T, M>(&self, x: &M) -> Result<M, Failed>
where
T: Categorizable,
M: Matrix<T>,
{
let (nrows, p) = x.shape();
let additional_params: Vec<usize> = self
.category_mappers
.iter()
.map(|enc| enc.num_categories())
.collect();
// Eac category of size v adds v-1 params
let expandws_p: usize = p + additional_params.iter().fold(0, |cs, &v| cs + v - 1);
let new_col_idx = find_new_idxs(p, &additional_params[..], &self.col_idx_categorical[..]);
let mut res = M::zeros(nrows, expandws_p);
for (pidx, &old_cidx) in self.col_idx_categorical.iter().enumerate() {
let cidx = new_col_idx[old_cidx];
let col_iter = (0..nrows).map(|r| x.get(r, old_cidx).to_category());
let sencoder = &self.category_mappers[pidx];
let oh_series = col_iter.map(|c| sencoder.get_one_hot::<T, Vec<T>>(&c));
for (row, oh_vec) in oh_series.enumerate() {
match oh_vec {
None => {
// Since we support T types, bad value in a series causes in to be invalid
let msg = format!("At least one value in column {} doesn't conform to category definition", old_cidx);
return Err(Failed::transform(&msg[..]));
}
Some(v) => {
// copy one hot vectors to their place in the data matrix;
for (col_ofst, &val) in v.iter().enumerate() {
res.set(row, cidx + col_ofst, val);
}
}
}
}
}
// copy old data in x to their new location while skipping catergorical vars (already treated)
let mut skip_idx_iter = self.col_idx_categorical.iter();
let mut cur_skip = skip_idx_iter.next();
for (old_p, &new_p) in new_col_idx.iter().enumerate() {
// if found treated varible, skip it
if let Some(&v) = cur_skip {
if v == old_p {
cur_skip = skip_idx_iter.next();
continue;
}
}
for r in 0..nrows {
let val = x.get(r, old_p);
res.set(r, new_p, val);
}
}
Ok(res)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::linalg::naive::dense_matrix::DenseMatrix;
use crate::preprocessing::series_encoder::CategoryMapper;
#[test]
fn adjust_idxs() {
assert_eq!(find_new_idxs(0, &[], &[]), Vec::<usize>::new());
// [0,1,2] -> [0, 1, 1, 1, 2]
assert_eq!(find_new_idxs(3, &[3], &[1]), vec![0, 1, 4]);
}
fn build_cat_first_and_last() -> (DenseMatrix<f64>, DenseMatrix<f64>) {
let orig = DenseMatrix::from_2d_array(&[
&[1.0, 1.5, 3.0],
&[2.0, 1.5, 4.0],
&[1.0, 1.5, 5.0],
&[2.0, 1.5, 6.0],
]);
let oh_enc = DenseMatrix::from_2d_array(&[
&[1.0, 0.0, 1.5, 1.0, 0.0, 0.0, 0.0],
&[0.0, 1.0, 1.5, 0.0, 1.0, 0.0, 0.0],
&[1.0, 0.0, 1.5, 0.0, 0.0, 1.0, 0.0],
&[0.0, 1.0, 1.5, 0.0, 0.0, 0.0, 1.0],
| OneHotEncoder | identifier_name |
categorical.rs | .5, 0.0, 1.0, 0.0, 0.0]
//! // &[1.5, 1.0, 0.0, 1.5, 0.0, 0.0, 1.0, 0.0]
//! // &[1.5, 0.0, 1.0, 1.5, 0.0, 0.0, 0.0, 1.0]
//! ```
use std::iter;
use crate::error::Failed;
use crate::linalg::Matrix;
use crate::preprocessing::data_traits::{CategoricalFloat, Categorizable};
use crate::preprocessing::series_encoder::CategoryMapper;
/// OneHotEncoder Parameters
#[derive(Debug, Clone)]
pub struct OneHotEncoderParams {
/// Column number that contain categorical variable
pub col_idx_categorical: Option<Vec<usize>>,
/// (Currently not implemented) Try and infer which of the matrix columns are categorical variables
infer_categorical: bool,
}
impl OneHotEncoderParams {
/// Generate parameters from categorical variable column numbers
pub fn from_cat_idx(categorical_params: &[usize]) -> Self {
Self {
col_idx_categorical: Some(categorical_params.to_vec()),
infer_categorical: false,
}
}
}
/// Calculate the offset to parameters to due introduction of one-hot encoding
fn find_new_idxs(num_params: usize, cat_sizes: &[usize], cat_idxs: &[usize]) -> Vec<usize> {
// This functions uses iterators and returns a vector.
// In case we get a huge amount of paramenters this might be a problem
// todo: Change this such that it will return an iterator
| let repeats = cat_idx.scan(0, |a, v| {
let im = v + 1 - *a;
*a = v;
Some(im)
});
// Calculate the offset to parameter idx due to newly intorduced one-hot vectors
let offset_ = cat_sizes.iter().scan(0, |a, &v| {
*a = *a + v - 1;
Some(*a)
});
let offset = (0..1).chain(offset_);
let new_param_idxs: Vec<usize> = (0..num_params)
.zip(
repeats
.zip(offset)
.map(|(r, o)| iter::repeat(o).take(r))
.flatten(),
)
.map(|(idx, ofst)| idx + ofst)
.collect();
new_param_idxs
}
fn validate_col_is_categorical<T: Categorizable>(data: &[T]) -> bool {
for v in data {
if !v.is_valid() {
return false;
}
}
true
}
/// Encode Categorical variavbles of data matrix to one-hot
#[derive(Debug, Clone)]
pub struct OneHotEncoder {
category_mappers: Vec<CategoryMapper<CategoricalFloat>>,
col_idx_categorical: Vec<usize>,
}
impl OneHotEncoder {
/// Create an encoder instance with categories infered from data matrix
pub fn fit<T, M>(data: &M, params: OneHotEncoderParams) -> Result<OneHotEncoder, Failed>
where
T: Categorizable,
M: Matrix<T>,
{
match (params.col_idx_categorical, params.infer_categorical) {
(None, false) => Err(Failed::fit(
"Must pass categorical series ids or infer flag",
)),
(Some(_idxs), true) => Err(Failed::fit(
"Ambigous parameters, got both infer and categroy ids",
)),
(Some(mut idxs), false) => {
// make sure categories have same order as data columns
idxs.sort_unstable();
let (nrows, _) = data.shape();
// col buffer to avoid allocations
let mut col_buf: Vec<T> = iter::repeat(T::zero()).take(nrows).collect();
let mut res: Vec<CategoryMapper<CategoricalFloat>> = Vec::with_capacity(idxs.len());
for &idx in &idxs {
data.copy_col_as_vec(idx, &mut col_buf);
if !validate_col_is_categorical(&col_buf) {
let msg = format!(
"Column {} of data matrix containts non categorizable (integer) values",
idx
);
return Err(Failed::fit(&msg[..]));
}
let hashable_col = col_buf.iter().map(|v| v.to_category());
res.push(CategoryMapper::fit_to_iter(hashable_col));
}
Ok(Self {
category_mappers: res,
col_idx_categorical: idxs,
})
}
(None, true) => {
todo!("Auto-Inference for Categorical Variables not yet implemented")
}
}
}
/// Transform categorical variables to one-hot encoded and return a new matrix
pub fn transform<T, M>(&self, x: &M) -> Result<M, Failed>
where
T: Categorizable,
M: Matrix<T>,
{
let (nrows, p) = x.shape();
let additional_params: Vec<usize> = self
.category_mappers
.iter()
.map(|enc| enc.num_categories())
.collect();
// Eac category of size v adds v-1 params
let expandws_p: usize = p + additional_params.iter().fold(0, |cs, &v| cs + v - 1);
let new_col_idx = find_new_idxs(p, &additional_params[..], &self.col_idx_categorical[..]);
let mut res = M::zeros(nrows, expandws_p);
for (pidx, &old_cidx) in self.col_idx_categorical.iter().enumerate() {
let cidx = new_col_idx[old_cidx];
let col_iter = (0..nrows).map(|r| x.get(r, old_cidx).to_category());
let sencoder = &self.category_mappers[pidx];
let oh_series = col_iter.map(|c| sencoder.get_one_hot::<T, Vec<T>>(&c));
for (row, oh_vec) in oh_series.enumerate() {
match oh_vec {
None => {
// Since we support T types, bad value in a series causes in to be invalid
let msg = format!("At least one value in column {} doesn't conform to category definition", old_cidx);
return Err(Failed::transform(&msg[..]));
}
Some(v) => {
// copy one hot vectors to their place in the data matrix;
for (col_ofst, &val) in v.iter().enumerate() {
res.set(row, cidx + col_ofst, val);
}
}
}
}
}
// copy old data in x to their new location while skipping catergorical vars (already treated)
let mut skip_idx_iter = self.col_idx_categorical.iter();
let mut cur_skip = skip_idx_iter.next();
for (old_p, &new_p) in new_col_idx.iter().enumerate() {
// if found treated varible, skip it
if let Some(&v) = cur_skip {
if v == old_p {
cur_skip = skip_idx_iter.next();
continue;
}
}
for r in 0..nrows {
let val = x.get(r, old_p);
res.set(r, new_p, val);
}
}
Ok(res)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::linalg::naive::dense_matrix::DenseMatrix;
use crate::preprocessing::series_encoder::CategoryMapper;
#[test]
fn adjust_idxs() {
assert_eq!(find_new_idxs(0, &[], &[]), Vec::<usize>::new());
// [0,1,2] -> [0, 1, 1, 1, 2]
assert_eq!(find_new_idxs(3, &[3], &[1]), vec![0, 1, 4]);
}
fn build_cat_first_and_last() -> (DenseMatrix<f64>, DenseMatrix<f64>) {
let orig = DenseMatrix::from_2d_array(&[
&[1.0, 1.5, 3.0],
&[2.0, 1.5, 4.0],
&[1.0, 1.5, 5.0],
&[2.0, 1.5, 6.0],
]);
let oh_enc = DenseMatrix::from_2d_array(&[
&[1.0, 0.0, 1.5, 1.0, 0.0, 0.0, 0.0],
&[0.0, 1.0, 1.5, 0.0, 1.0, 0.0, 0.0],
&[1.0, 0.0, 1.5, 0.0, 0.0, 1.0, 0.0],
&[0.0, 1.0, 1.5, 0.0, 0.0, 0.0, 1.0],
]);
| let cat_idx = cat_idxs.iter().copied().chain((num_params..).take(1));
// Offset is constant between two categorical values, here we calculate the number of steps
// that remain constant | random_line_split |
categorical.rs | 5, 0.0, 1.0, 0.0, 0.0]
//! // &[1.5, 1.0, 0.0, 1.5, 0.0, 0.0, 1.0, 0.0]
//! // &[1.5, 0.0, 1.0, 1.5, 0.0, 0.0, 0.0, 1.0]
//! ```
use std::iter;
use crate::error::Failed;
use crate::linalg::Matrix;
use crate::preprocessing::data_traits::{CategoricalFloat, Categorizable};
use crate::preprocessing::series_encoder::CategoryMapper;
/// OneHotEncoder Parameters
#[derive(Debug, Clone)]
pub struct OneHotEncoderParams {
/// Column number that contain categorical variable
pub col_idx_categorical: Option<Vec<usize>>,
/// (Currently not implemented) Try and infer which of the matrix columns are categorical variables
infer_categorical: bool,
}
impl OneHotEncoderParams {
/// Generate parameters from categorical variable column numbers
pub fn from_cat_idx(categorical_params: &[usize]) -> Self |
}
/// Calculate the offset to parameters to due introduction of one-hot encoding
fn find_new_idxs(num_params: usize, cat_sizes: &[usize], cat_idxs: &[usize]) -> Vec<usize> {
// This functions uses iterators and returns a vector.
// In case we get a huge amount of paramenters this might be a problem
// todo: Change this such that it will return an iterator
let cat_idx = cat_idxs.iter().copied().chain((num_params..).take(1));
// Offset is constant between two categorical values, here we calculate the number of steps
// that remain constant
let repeats = cat_idx.scan(0, |a, v| {
let im = v + 1 - *a;
*a = v;
Some(im)
});
// Calculate the offset to parameter idx due to newly intorduced one-hot vectors
let offset_ = cat_sizes.iter().scan(0, |a, &v| {
*a = *a + v - 1;
Some(*a)
});
let offset = (0..1).chain(offset_);
let new_param_idxs: Vec<usize> = (0..num_params)
.zip(
repeats
.zip(offset)
.map(|(r, o)| iter::repeat(o).take(r))
.flatten(),
)
.map(|(idx, ofst)| idx + ofst)
.collect();
new_param_idxs
}
fn validate_col_is_categorical<T: Categorizable>(data: &[T]) -> bool {
for v in data {
if !v.is_valid() {
return false;
}
}
true
}
/// Encode Categorical variavbles of data matrix to one-hot
#[derive(Debug, Clone)]
pub struct OneHotEncoder {
category_mappers: Vec<CategoryMapper<CategoricalFloat>>,
col_idx_categorical: Vec<usize>,
}
impl OneHotEncoder {
/// Create an encoder instance with categories infered from data matrix
pub fn fit<T, M>(data: &M, params: OneHotEncoderParams) -> Result<OneHotEncoder, Failed>
where
T: Categorizable,
M: Matrix<T>,
{
match (params.col_idx_categorical, params.infer_categorical) {
(None, false) => Err(Failed::fit(
"Must pass categorical series ids or infer flag",
)),
(Some(_idxs), true) => Err(Failed::fit(
"Ambigous parameters, got both infer and categroy ids",
)),
(Some(mut idxs), false) => {
// make sure categories have same order as data columns
idxs.sort_unstable();
let (nrows, _) = data.shape();
// col buffer to avoid allocations
let mut col_buf: Vec<T> = iter::repeat(T::zero()).take(nrows).collect();
let mut res: Vec<CategoryMapper<CategoricalFloat>> = Vec::with_capacity(idxs.len());
for &idx in &idxs {
data.copy_col_as_vec(idx, &mut col_buf);
if !validate_col_is_categorical(&col_buf) {
let msg = format!(
"Column {} of data matrix containts non categorizable (integer) values",
idx
);
return Err(Failed::fit(&msg[..]));
}
let hashable_col = col_buf.iter().map(|v| v.to_category());
res.push(CategoryMapper::fit_to_iter(hashable_col));
}
Ok(Self {
category_mappers: res,
col_idx_categorical: idxs,
})
}
(None, true) => {
todo!("Auto-Inference for Categorical Variables not yet implemented")
}
}
}
/// Transform categorical variables to one-hot encoded and return a new matrix
pub fn transform<T, M>(&self, x: &M) -> Result<M, Failed>
where
T: Categorizable,
M: Matrix<T>,
{
let (nrows, p) = x.shape();
let additional_params: Vec<usize> = self
.category_mappers
.iter()
.map(|enc| enc.num_categories())
.collect();
// Eac category of size v adds v-1 params
let expandws_p: usize = p + additional_params.iter().fold(0, |cs, &v| cs + v - 1);
let new_col_idx = find_new_idxs(p, &additional_params[..], &self.col_idx_categorical[..]);
let mut res = M::zeros(nrows, expandws_p);
for (pidx, &old_cidx) in self.col_idx_categorical.iter().enumerate() {
let cidx = new_col_idx[old_cidx];
let col_iter = (0..nrows).map(|r| x.get(r, old_cidx).to_category());
let sencoder = &self.category_mappers[pidx];
let oh_series = col_iter.map(|c| sencoder.get_one_hot::<T, Vec<T>>(&c));
for (row, oh_vec) in oh_series.enumerate() {
match oh_vec {
None => {
// Since we support T types, bad value in a series causes in to be invalid
let msg = format!("At least one value in column {} doesn't conform to category definition", old_cidx);
return Err(Failed::transform(&msg[..]));
}
Some(v) => {
// copy one hot vectors to their place in the data matrix;
for (col_ofst, &val) in v.iter().enumerate() {
res.set(row, cidx + col_ofst, val);
}
}
}
}
}
// copy old data in x to their new location while skipping catergorical vars (already treated)
let mut skip_idx_iter = self.col_idx_categorical.iter();
let mut cur_skip = skip_idx_iter.next();
for (old_p, &new_p) in new_col_idx.iter().enumerate() {
// if found treated varible, skip it
if let Some(&v) = cur_skip {
if v == old_p {
cur_skip = skip_idx_iter.next();
continue;
}
}
for r in 0..nrows {
let val = x.get(r, old_p);
res.set(r, new_p, val);
}
}
Ok(res)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::linalg::naive::dense_matrix::DenseMatrix;
use crate::preprocessing::series_encoder::CategoryMapper;
#[test]
fn adjust_idxs() {
assert_eq!(find_new_idxs(0, &[], &[]), Vec::<usize>::new());
// [0,1,2] -> [0, 1, 1, 1, 2]
assert_eq!(find_new_idxs(3, &[3], &[1]), vec![0, 1, 4]);
}
fn build_cat_first_and_last() -> (DenseMatrix<f64>, DenseMatrix<f64>) {
let orig = DenseMatrix::from_2d_array(&[
&[1.0, 1.5, 3.0],
&[2.0, 1.5, 4.0],
&[1.0, 1.5, 5.0],
&[2.0, 1.5, 6.0],
]);
let oh_enc = DenseMatrix::from_2d_array(&[
&[1.0, 0.0, 1.5, 1.0, 0.0, 0.0, 0.0],
&[0.0, 1.0, 1.5, 0.0, 1.0, 0.0, 0.0],
&[1.0, 0.0, 1.5, 0.0, 0.0, 1.0, 0.0],
&[0.0, 1.0, 1.5, 0.0, 0.0, 0.0, 1.0],
| {
Self {
col_idx_categorical: Some(categorical_params.to_vec()),
infer_categorical: false,
}
} | identifier_body |
view.rs |
// found in the LICENSE file.
use crate::{app::App, geometry::Size};
use failure::Error;
use fidl::endpoints::{create_endpoints, create_proxy, ServerEnd};
use fidl_fuchsia_ui_gfx as gfx;
use fidl_fuchsia_ui_input;
use fidl_fuchsia_ui_scenic::{SessionListenerMarker, SessionListenerRequest};
use fidl_fuchsia_ui_viewsv1::{ViewListenerMarker, ViewListenerRequest};
use fuchsia_async as fasync;
use fuchsia_scenic::{ImportNode, Session, SessionPtr};
use fuchsia_zircon as zx;
use futures::{TryFutureExt, TryStreamExt};
use std::any::Any;
/// enum that defines all messages sent with `App::send_message` that
/// the view struct will understand and process.
pub enum ViewMessages {
/// Message that requests that a view redraw itself.
Update,
}
/// parameter struct passed to setup and update trait methods.
#[allow(missing_docs)]
pub struct ViewAssistantContext<'a> {
pub view_container: &'a mut fidl_fuchsia_ui_viewsv1::ViewContainerProxy,
pub import_node: &'a ImportNode,
pub session: &'a SessionPtr,
pub key: ViewKey,
pub logical_size: Size,
pub size: Size,
pub metrics: Size,
pub messages: Vec<Box<dyn Any>>,
}
impl<'a> ViewAssistantContext<'a> {
/// Queue up a message for delivery
pub fn queue_message<A: Any>(&mut self, message: A) {
self.messages.push(Box::new(message));
}
}
/// Trait that allows mod developers to customize the behavior of view controllers.
pub trait ViewAssistant {
/// This method is called once when a view is created. It is a good point to create scenic
/// commands that apply throughout the lifetime of the view.
fn setup(&mut self, context: &ViewAssistantContext) -> Result<(), Error>;
/// This method is called when a view controller has been asked to update the view.
fn update(&mut self, context: &ViewAssistantContext) -> Result<(), Error>;
/// This method is called when input events come from scenic to this view.
fn handle_input_event(
&mut self,
_context: &mut ViewAssistantContext,
_event: &fidl_fuchsia_ui_input::InputEvent,
) -> Result<(), Error> {
Ok(())
}
/// This method is called when `App::send_message` is called with the associated
/// view controller's `ViewKey` and the view controller does not handle the message.
fn handle_message(&mut self, _message: &Any) {}
}
/// Reference to an app assistant. _This type is likely to change in the future so
/// using this type alias might make for easier forward migration._
pub type ViewAssistantPtr = Box<dyn ViewAssistant>;
/// Key identifying a view.
pub type ViewKey = u64;
/// This struct takes care of all the boilerplate needed for implementing a Fuchsia
/// view, forwarding the interesting implementation points to a struct implementing
/// the `ViewAssistant` trait.
pub struct ViewController {
#[allow(unused)]
view: fidl_fuchsia_ui_viewsv1::ViewProxy,
view_container: fidl_fuchsia_ui_viewsv1::ViewContainerProxy,
session: SessionPtr,
import_node: ImportNode,
#[allow(unused)]
key: ViewKey,
assistant: ViewAssistantPtr,
metrics: Size,
physical_size: Size,
logical_size: Size,
}
impl ViewController {
pub(crate) fn new(
app: &mut App,
view_token: gfx::ExportToken,
key: ViewKey,
) -> Result<ViewController, Error> {
let (view, view_server_end) = create_proxy()?;
let (view_listener, view_listener_request) = create_endpoints()?;
let (mine, theirs) = zx::EventPair::create()?;
app.view_manager.create_view2(
view_server_end,
view_token.value,
view_listener,
theirs,
None,
)?;
let (session_listener, session_listener_request) = create_endpoints()?;
let (session_proxy, session_request) = create_proxy()?;
app.scenic.create_session(session_request, Some(session_listener))?;
let session = Session::new(session_proxy);
let mut view_assistant = app.create_view_assistant(&session)?;
let mut import_node = ImportNode::new(session.clone(), mine);
let (mut view_container, view_container_request) = create_proxy()?;
view.get_container(view_container_request)?;
let context = ViewAssistantContext {
view_container: &mut view_container,
import_node: &mut import_node,
session: &session,
key,
logical_size: Size::zero(),
size: Size::zero(),
metrics: Size::zero(),
messages: Vec::new(),
};
view_assistant.setup(&context)?;
let view_controller = ViewController {
view,
view_container: view_container,
session,
import_node,
metrics: Size::zero(),
physical_size: Size::zero(),
logical_size: Size::zero(),
key,
assistant: view_assistant,
};
Self::setup_session_listener(key, session_listener_request)?;
Self::setup_view_listener(key, view_listener_request)?;
Ok(view_controller)
}
fn setup_session_listener(
key: ViewKey,
session_listener_request: ServerEnd<SessionListenerMarker>,
) -> Result<(), Error> {
fasync::spawn_local(
session_listener_request
.into_stream()?
.map_ok(move |request| match request {
SessionListenerRequest::OnScenicEvent { events, .. } => App::with(|app| {
app.with_view(key, |view| {
view.handle_session_events(events);
})
}),
_ => (),
})
.try_collect::<()>()
.unwrap_or_else(|e| eprintln!("view listener error: {:?}", e)),
);
Ok(())
}
fn setup_view_listener(
key: ViewKey,
view_listener_request: ServerEnd<ViewListenerMarker>,
) -> Result<(), Error> {
fasync::spawn_local(
view_listener_request
.into_stream()?
.try_for_each(
move |ViewListenerRequest::OnPropertiesChanged { properties, responder }| {
App::with(|app| {
app.with_view(key, |view| {
view.handle_properties_changed(&properties);
});
});
futures::future::ready(responder.send())
},
)
.unwrap_or_else(|e| eprintln!("view listener error: {:?}", e)),
);
Ok(())
}
fn update(&mut self) {
let context = ViewAssistantContext {
view_container: &mut self.view_container,
import_node: &mut self.import_node,
session: &self.session,
key: self.key,
logical_size: self.logical_size,
size: self.physical_size,
metrics: self.metrics,
messages: Vec::new(),
};
self.assistant.update(&context).unwrap_or_else(|e| panic!("Update error: {:?}", e));
self.present();
}
fn handle_session_events(&mut self, events: Vec<fidl_fuchsia_ui_scenic::Event>) {
events.iter().for_each(|event| match event {
fidl_fuchsia_ui_scenic::Event::Gfx(gfx::Event::Metrics(event)) => {
self.metrics = Size::new(event.metrics.scale_x, event.metrics.scale_y);
self.logical_size = Size::new(
self.physical_size.width * self.metrics.width,
self.physical_size.height * self.metrics.height,
);
self.update();
}
fidl_fuchsia_ui_scenic::Event::Input(event) => {
let mut context = ViewAssistantContext {
view_container: &mut self.view_container,
import_node: &mut self.import_node,
session: &self.session,
key: self.key,
logical_size: self.logical_size,
size: self.physical_size,
metrics: self.metrics,
messages: Vec::new(),
};
self.assistant
.handle_input_event(&mut context, &event)
.unwrap_or_else(|e| eprintln!("handle_event: {:?}", e));
for msg in context.messages {
self.send_message(&msg);
}
self.update();
}
_ => (),
});
}
fn present(&self) {
fasync::spawn_local(
self.session
.lock()
.present(0)
.map_ok(|_| ())
.unwrap_or_else(|e| panic!("present error: {:?}", e)),
);
}
fn handle_properties_changed(&mut self, properties: &fidl_fuchsia_ui_viewsv1::ViewProperties) {
if let Some(ref view_properties) = properties.view_layout {
self.physical_size = Size::new(view_properties.size.width, view_properties.size.height);
self.logical_size = Size::new(
self.physical_size.width * self.metrics.width,
self.physical_size.height * self.metrics.height,
);
self.update();
}
}
/// This method sends an arbitrary message to this view. If it is not
/// handled directly by `ViewController::send_message` it will be forwarded
/// to the view assistant.
pub fn send_message(&mut self, msg: &Any) {
if let Some(view_msg) = msg.downcast_ref::<ViewMessages>() | {
match view_msg {
ViewMessages::Update => {
self.update();
}
}
} | conditional_block |
|
view.rs | reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use crate::{app::App, geometry::Size};
use failure::Error;
use fidl::endpoints::{create_endpoints, create_proxy, ServerEnd};
use fidl_fuchsia_ui_gfx as gfx;
use fidl_fuchsia_ui_input;
use fidl_fuchsia_ui_scenic::{SessionListenerMarker, SessionListenerRequest};
use fidl_fuchsia_ui_viewsv1::{ViewListenerMarker, ViewListenerRequest};
use fuchsia_async as fasync;
use fuchsia_scenic::{ImportNode, Session, SessionPtr};
use fuchsia_zircon as zx;
use futures::{TryFutureExt, TryStreamExt};
use std::any::Any;
/// enum that defines all messages sent with `App::send_message` that
/// the view struct will understand and process.
pub enum ViewMessages {
/// Message that requests that a view redraw itself.
Update,
}
/// parameter struct passed to setup and update trait methods.
#[allow(missing_docs)]
pub struct ViewAssistantContext<'a> {
pub view_container: &'a mut fidl_fuchsia_ui_viewsv1::ViewContainerProxy,
pub import_node: &'a ImportNode,
pub session: &'a SessionPtr,
pub key: ViewKey,
pub logical_size: Size,
pub size: Size,
pub metrics: Size,
pub messages: Vec<Box<dyn Any>>,
}
impl<'a> ViewAssistantContext<'a> {
/// Queue up a message for delivery
pub fn queue_message<A: Any>(&mut self, message: A) {
self.messages.push(Box::new(message));
}
}
/// Trait that allows mod developers to customize the behavior of view controllers.
pub trait ViewAssistant {
/// This method is called once when a view is created. It is a good point to create scenic
/// commands that apply throughout the lifetime of the view.
fn setup(&mut self, context: &ViewAssistantContext) -> Result<(), Error>;
/// This method is called when a view controller has been asked to update the view.
fn update(&mut self, context: &ViewAssistantContext) -> Result<(), Error>;
/// This method is called when input events come from scenic to this view.
fn handle_input_event(
&mut self,
_context: &mut ViewAssistantContext,
_event: &fidl_fuchsia_ui_input::InputEvent,
) -> Result<(), Error> {
Ok(())
}
/// This method is called when `App::send_message` is called with the associated
/// view controller's `ViewKey` and the view controller does not handle the message.
fn handle_message(&mut self, _message: &Any) {}
}
/// Reference to an app assistant. _This type is likely to change in the future so
/// using this type alias might make for easier forward migration._
pub type ViewAssistantPtr = Box<dyn ViewAssistant>;
/// Key identifying a view.
pub type ViewKey = u64;
/// This struct takes care of all the boilerplate needed for implementing a Fuchsia
/// view, forwarding the interesting implementation points to a struct implementing
/// the `ViewAssistant` trait.
pub struct ViewController {
#[allow(unused)]
view: fidl_fuchsia_ui_viewsv1::ViewProxy,
view_container: fidl_fuchsia_ui_viewsv1::ViewContainerProxy,
session: SessionPtr,
import_node: ImportNode,
#[allow(unused)]
key: ViewKey,
assistant: ViewAssistantPtr,
metrics: Size,
physical_size: Size,
logical_size: Size,
}
impl ViewController {
pub(crate) fn new(
app: &mut App,
view_token: gfx::ExportToken,
key: ViewKey,
) -> Result<ViewController, Error> {
let (view, view_server_end) = create_proxy()?;
let (view_listener, view_listener_request) = create_endpoints()?;
let (mine, theirs) = zx::EventPair::create()?;
app.view_manager.create_view2(
view_server_end,
view_token.value,
view_listener,
theirs,
None,
)?;
let (session_listener, session_listener_request) = create_endpoints()?;
let (session_proxy, session_request) = create_proxy()?;
app.scenic.create_session(session_request, Some(session_listener))?;
let session = Session::new(session_proxy);
let mut view_assistant = app.create_view_assistant(&session)?;
let mut import_node = ImportNode::new(session.clone(), mine);
let (mut view_container, view_container_request) = create_proxy()?;
view.get_container(view_container_request)?;
let context = ViewAssistantContext {
view_container: &mut view_container,
import_node: &mut import_node,
session: &session,
key,
logical_size: Size::zero(),
size: Size::zero(),
metrics: Size::zero(),
messages: Vec::new(),
};
view_assistant.setup(&context)?;
let view_controller = ViewController {
view,
view_container: view_container,
session,
import_node,
metrics: Size::zero(),
physical_size: Size::zero(),
logical_size: Size::zero(),
key,
assistant: view_assistant,
};
Self::setup_session_listener(key, session_listener_request)?;
Self::setup_view_listener(key, view_listener_request)?;
Ok(view_controller)
}
fn setup_session_listener(
key: ViewKey,
session_listener_request: ServerEnd<SessionListenerMarker>,
) -> Result<(), Error> {
fasync::spawn_local(
session_listener_request
.into_stream()?
.map_ok(move |request| match request {
SessionListenerRequest::OnScenicEvent { events, .. } => App::with(|app| {
app.with_view(key, |view| {
view.handle_session_events(events);
})
}),
_ => (),
})
.try_collect::<()>()
.unwrap_or_else(|e| eprintln!("view listener error: {:?}", e)),
);
Ok(())
}
fn setup_view_listener(
key: ViewKey,
view_listener_request: ServerEnd<ViewListenerMarker>,
) -> Result<(), Error> {
fasync::spawn_local(
view_listener_request
.into_stream()?
.try_for_each(
move |ViewListenerRequest::OnPropertiesChanged { properties, responder }| {
App::with(|app| {
app.with_view(key, |view| {
view.handle_properties_changed(&properties);
});
});
futures::future::ready(responder.send())
},
)
.unwrap_or_else(|e| eprintln!("view listener error: {:?}", e)),
);
Ok(())
}
fn update(&mut self) {
let context = ViewAssistantContext {
view_container: &mut self.view_container,
import_node: &mut self.import_node,
session: &self.session,
key: self.key,
logical_size: self.logical_size,
size: self.physical_size,
metrics: self.metrics,
messages: Vec::new(),
};
self.assistant.update(&context).unwrap_or_else(|e| panic!("Update error: {:?}", e));
self.present();
}
fn handle_session_events(&mut self, events: Vec<fidl_fuchsia_ui_scenic::Event>) | };
self.assistant
.handle_input_event(&mut context, &event)
.unwrap_or_else(|e| eprintln!("handle_event: {:?}", e));
for msg in context.messages {
self.send_message(&msg);
}
self.update();
}
_ => (),
});
}
fn present(&self) {
fasync::spawn_local(
self.session
.lock()
.present(0)
.map_ok(|_| ())
.unwrap_or_else(|e| panic!("present error: {:?}", e)),
);
}
fn handle_properties_changed(&mut self, properties: &fidl_fuchsia_ui_viewsv1::ViewProperties) {
if let Some(ref view_properties) = properties.view_layout {
self.physical_size = Size::new(view_properties.size.width, view_properties.size.height);
self.logical_size = Size::new(
self.physical_size.width * self.metrics.width,
self.physical_size.height * self.metrics.height,
);
self.update();
}
}
/// This method sends an arbitrary message to this view. If it is not
/// handled directly by `ViewController::send_message` it will be forwarded
/// to the view assistant.
pub fn send_message(&mut self, msg: &Any) {
if let Some(view_msg) = msg.downcast_ref::<ViewMessages>() {
match view_msg | {
events.iter().for_each(|event| match event {
fidl_fuchsia_ui_scenic::Event::Gfx(gfx::Event::Metrics(event)) => {
self.metrics = Size::new(event.metrics.scale_x, event.metrics.scale_y);
self.logical_size = Size::new(
self.physical_size.width * self.metrics.width,
self.physical_size.height * self.metrics.height,
);
self.update();
}
fidl_fuchsia_ui_scenic::Event::Input(event) => {
let mut context = ViewAssistantContext {
view_container: &mut self.view_container,
import_node: &mut self.import_node,
session: &self.session,
key: self.key,
logical_size: self.logical_size,
size: self.physical_size,
metrics: self.metrics,
messages: Vec::new(), | identifier_body |
view.rs | reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use crate::{app::App, geometry::Size};
use failure::Error;
use fidl::endpoints::{create_endpoints, create_proxy, ServerEnd};
use fidl_fuchsia_ui_gfx as gfx;
use fidl_fuchsia_ui_input;
use fidl_fuchsia_ui_scenic::{SessionListenerMarker, SessionListenerRequest};
use fidl_fuchsia_ui_viewsv1::{ViewListenerMarker, ViewListenerRequest};
use fuchsia_async as fasync;
use fuchsia_scenic::{ImportNode, Session, SessionPtr};
use fuchsia_zircon as zx;
use futures::{TryFutureExt, TryStreamExt};
use std::any::Any;
/// enum that defines all messages sent with `App::send_message` that
/// the view struct will understand and process.
pub enum ViewMessages {
/// Message that requests that a view redraw itself.
Update,
}
/// parameter struct passed to setup and update trait methods.
#[allow(missing_docs)]
pub struct ViewAssistantContext<'a> {
pub view_container: &'a mut fidl_fuchsia_ui_viewsv1::ViewContainerProxy,
pub import_node: &'a ImportNode,
pub session: &'a SessionPtr,
pub key: ViewKey,
pub logical_size: Size,
pub size: Size,
pub metrics: Size,
pub messages: Vec<Box<dyn Any>>,
}
impl<'a> ViewAssistantContext<'a> {
/// Queue up a message for delivery
pub fn queue_message<A: Any>(&mut self, message: A) {
self.messages.push(Box::new(message));
}
}
/// Trait that allows mod developers to customize the behavior of view controllers.
pub trait ViewAssistant {
/// This method is called once when a view is created. It is a good point to create scenic
/// commands that apply throughout the lifetime of the view.
fn setup(&mut self, context: &ViewAssistantContext) -> Result<(), Error>;
/// This method is called when a view controller has been asked to update the view.
fn update(&mut self, context: &ViewAssistantContext) -> Result<(), Error>;
/// This method is called when input events come from scenic to this view.
fn handle_input_event(
&mut self,
_context: &mut ViewAssistantContext,
_event: &fidl_fuchsia_ui_input::InputEvent,
) -> Result<(), Error> {
Ok(())
}
/// This method is called when `App::send_message` is called with the associated
/// view controller's `ViewKey` and the view controller does not handle the message.
fn handle_message(&mut self, _message: &Any) {}
}
/// Reference to an app assistant. _This type is likely to change in the future so
/// using this type alias might make for easier forward migration._
pub type ViewAssistantPtr = Box<dyn ViewAssistant>;
/// Key identifying a view.
pub type ViewKey = u64;
/// This struct takes care of all the boilerplate needed for implementing a Fuchsia
/// view, forwarding the interesting implementation points to a struct implementing
/// the `ViewAssistant` trait.
pub struct ViewController {
#[allow(unused)]
view: fidl_fuchsia_ui_viewsv1::ViewProxy,
view_container: fidl_fuchsia_ui_viewsv1::ViewContainerProxy,
session: SessionPtr,
import_node: ImportNode,
#[allow(unused)]
key: ViewKey,
assistant: ViewAssistantPtr,
metrics: Size,
physical_size: Size,
logical_size: Size,
}
impl ViewController {
pub(crate) fn new(
app: &mut App,
view_token: gfx::ExportToken,
key: ViewKey,
) -> Result<ViewController, Error> {
let (view, view_server_end) = create_proxy()?;
let (view_listener, view_listener_request) = create_endpoints()?;
let (mine, theirs) = zx::EventPair::create()?;
app.view_manager.create_view2(
view_server_end,
view_token.value,
view_listener,
theirs,
None,
)?;
let (session_listener, session_listener_request) = create_endpoints()?;
let (session_proxy, session_request) = create_proxy()?;
app.scenic.create_session(session_request, Some(session_listener))?;
let session = Session::new(session_proxy);
let mut view_assistant = app.create_view_assistant(&session)?;
let mut import_node = ImportNode::new(session.clone(), mine);
let (mut view_container, view_container_request) = create_proxy()?;
view.get_container(view_container_request)?;
let context = ViewAssistantContext {
view_container: &mut view_container,
import_node: &mut import_node,
session: &session,
key,
logical_size: Size::zero(),
size: Size::zero(),
metrics: Size::zero(),
messages: Vec::new(),
};
view_assistant.setup(&context)?;
let view_controller = ViewController {
view,
view_container: view_container,
session,
import_node,
metrics: Size::zero(),
physical_size: Size::zero(),
logical_size: Size::zero(),
key,
assistant: view_assistant,
};
Self::setup_session_listener(key, session_listener_request)?;
Self::setup_view_listener(key, view_listener_request)?;
Ok(view_controller)
}
fn | (
key: ViewKey,
session_listener_request: ServerEnd<SessionListenerMarker>,
) -> Result<(), Error> {
fasync::spawn_local(
session_listener_request
.into_stream()?
.map_ok(move |request| match request {
SessionListenerRequest::OnScenicEvent { events, .. } => App::with(|app| {
app.with_view(key, |view| {
view.handle_session_events(events);
})
}),
_ => (),
})
.try_collect::<()>()
.unwrap_or_else(|e| eprintln!("view listener error: {:?}", e)),
);
Ok(())
}
fn setup_view_listener(
key: ViewKey,
view_listener_request: ServerEnd<ViewListenerMarker>,
) -> Result<(), Error> {
fasync::spawn_local(
view_listener_request
.into_stream()?
.try_for_each(
move |ViewListenerRequest::OnPropertiesChanged { properties, responder }| {
App::with(|app| {
app.with_view(key, |view| {
view.handle_properties_changed(&properties);
});
});
futures::future::ready(responder.send())
},
)
.unwrap_or_else(|e| eprintln!("view listener error: {:?}", e)),
);
Ok(())
}
fn update(&mut self) {
let context = ViewAssistantContext {
view_container: &mut self.view_container,
import_node: &mut self.import_node,
session: &self.session,
key: self.key,
logical_size: self.logical_size,
size: self.physical_size,
metrics: self.metrics,
messages: Vec::new(),
};
self.assistant.update(&context).unwrap_or_else(|e| panic!("Update error: {:?}", e));
self.present();
}
fn handle_session_events(&mut self, events: Vec<fidl_fuchsia_ui_scenic::Event>) {
events.iter().for_each(|event| match event {
fidl_fuchsia_ui_scenic::Event::Gfx(gfx::Event::Metrics(event)) => {
self.metrics = Size::new(event.metrics.scale_x, event.metrics.scale_y);
self.logical_size = Size::new(
self.physical_size.width * self.metrics.width,
self.physical_size.height * self.metrics.height,
);
self.update();
}
fidl_fuchsia_ui_scenic::Event::Input(event) => {
let mut context = ViewAssistantContext {
view_container: &mut self.view_container,
import_node: &mut self.import_node,
session: &self.session,
key: self.key,
logical_size: self.logical_size,
size: self.physical_size,
metrics: self.metrics,
messages: Vec::new(),
};
self.assistant
.handle_input_event(&mut context, &event)
.unwrap_or_else(|e| eprintln!("handle_event: {:?}", e));
for msg in context.messages {
self.send_message(&msg);
}
self.update();
}
_ => (),
});
}
fn present(&self) {
fasync::spawn_local(
self.session
.lock()
.present(0)
.map_ok(|_| ())
.unwrap_or_else(|e| panic!("present error: {:?}", e)),
);
}
fn handle_properties_changed(&mut self, properties: &fidl_fuchsia_ui_viewsv1::ViewProperties) {
if let Some(ref view_properties) = properties.view_layout {
self.physical_size = Size::new(view_properties.size.width, view_properties.size.height);
self.logical_size = Size::new(
self.physical_size.width * self.metrics.width,
self.physical_size.height * self.metrics.height,
);
self.update();
}
}
/// This method sends an arbitrary message to this view. If it is not
/// handled directly by `ViewController::send_message` it will be forwarded
/// to the view assistant.
pub fn send_message(&mut self, msg: &Any) {
if let Some(view_msg) = msg.downcast_ref::<ViewMessages>() {
match view_msg | setup_session_listener | identifier_name |
view.rs | rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use crate::{app::App, geometry::Size};
use failure::Error;
use fidl::endpoints::{create_endpoints, create_proxy, ServerEnd};
use fidl_fuchsia_ui_gfx as gfx;
use fidl_fuchsia_ui_input;
use fidl_fuchsia_ui_scenic::{SessionListenerMarker, SessionListenerRequest};
use fidl_fuchsia_ui_viewsv1::{ViewListenerMarker, ViewListenerRequest};
use fuchsia_async as fasync;
use fuchsia_scenic::{ImportNode, Session, SessionPtr};
use fuchsia_zircon as zx;
use futures::{TryFutureExt, TryStreamExt};
use std::any::Any;
/// enum that defines all messages sent with `App::send_message` that
/// the view struct will understand and process.
pub enum ViewMessages {
/// Message that requests that a view redraw itself.
Update,
}
/// parameter struct passed to setup and update trait methods.
#[allow(missing_docs)]
pub struct ViewAssistantContext<'a> {
pub view_container: &'a mut fidl_fuchsia_ui_viewsv1::ViewContainerProxy,
pub import_node: &'a ImportNode,
pub session: &'a SessionPtr,
pub key: ViewKey,
pub logical_size: Size,
pub size: Size,
pub metrics: Size,
pub messages: Vec<Box<dyn Any>>,
}
impl<'a> ViewAssistantContext<'a> {
/// Queue up a message for delivery
pub fn queue_message<A: Any>(&mut self, message: A) {
self.messages.push(Box::new(message));
}
}
/// Trait that allows mod developers to customize the behavior of view controllers.
pub trait ViewAssistant {
/// This method is called once when a view is created. It is a good point to create scenic
/// commands that apply throughout the lifetime of the view.
fn setup(&mut self, context: &ViewAssistantContext) -> Result<(), Error>;
/// This method is called when a view controller has been asked to update the view.
fn update(&mut self, context: &ViewAssistantContext) -> Result<(), Error>;
/// This method is called when input events come from scenic to this view.
fn handle_input_event(
&mut self,
_context: &mut ViewAssistantContext,
_event: &fidl_fuchsia_ui_input::InputEvent,
) -> Result<(), Error> {
Ok(())
}
/// This method is called when `App::send_message` is called with the associated
/// view controller's `ViewKey` and the view controller does not handle the message.
fn handle_message(&mut self, _message: &Any) {}
}
/// Reference to an app assistant. _This type is likely to change in the future so
/// using this type alias might make for easier forward migration._
pub type ViewAssistantPtr = Box<dyn ViewAssistant>;
/// Key identifying a view.
pub type ViewKey = u64;
/// This struct takes care of all the boilerplate needed for implementing a Fuchsia
/// view, forwarding the interesting implementation points to a struct implementing
/// the `ViewAssistant` trait.
pub struct ViewController {
#[allow(unused)]
view: fidl_fuchsia_ui_viewsv1::ViewProxy,
view_container: fidl_fuchsia_ui_viewsv1::ViewContainerProxy,
session: SessionPtr,
import_node: ImportNode,
#[allow(unused)]
key: ViewKey,
assistant: ViewAssistantPtr,
metrics: Size,
physical_size: Size,
logical_size: Size,
}
impl ViewController {
pub(crate) fn new(
app: &mut App,
view_token: gfx::ExportToken,
key: ViewKey,
) -> Result<ViewController, Error> {
let (view, view_server_end) = create_proxy()?;
let (view_listener, view_listener_request) = create_endpoints()?;
let (mine, theirs) = zx::EventPair::create()?;
app.view_manager.create_view2(
view_server_end, | view_listener,
theirs,
None,
)?;
let (session_listener, session_listener_request) = create_endpoints()?;
let (session_proxy, session_request) = create_proxy()?;
app.scenic.create_session(session_request, Some(session_listener))?;
let session = Session::new(session_proxy);
let mut view_assistant = app.create_view_assistant(&session)?;
let mut import_node = ImportNode::new(session.clone(), mine);
let (mut view_container, view_container_request) = create_proxy()?;
view.get_container(view_container_request)?;
let context = ViewAssistantContext {
view_container: &mut view_container,
import_node: &mut import_node,
session: &session,
key,
logical_size: Size::zero(),
size: Size::zero(),
metrics: Size::zero(),
messages: Vec::new(),
};
view_assistant.setup(&context)?;
let view_controller = ViewController {
view,
view_container: view_container,
session,
import_node,
metrics: Size::zero(),
physical_size: Size::zero(),
logical_size: Size::zero(),
key,
assistant: view_assistant,
};
Self::setup_session_listener(key, session_listener_request)?;
Self::setup_view_listener(key, view_listener_request)?;
Ok(view_controller)
}
fn setup_session_listener(
key: ViewKey,
session_listener_request: ServerEnd<SessionListenerMarker>,
) -> Result<(), Error> {
fasync::spawn_local(
session_listener_request
.into_stream()?
.map_ok(move |request| match request {
SessionListenerRequest::OnScenicEvent { events, .. } => App::with(|app| {
app.with_view(key, |view| {
view.handle_session_events(events);
})
}),
_ => (),
})
.try_collect::<()>()
.unwrap_or_else(|e| eprintln!("view listener error: {:?}", e)),
);
Ok(())
}
fn setup_view_listener(
key: ViewKey,
view_listener_request: ServerEnd<ViewListenerMarker>,
) -> Result<(), Error> {
fasync::spawn_local(
view_listener_request
.into_stream()?
.try_for_each(
move |ViewListenerRequest::OnPropertiesChanged { properties, responder }| {
App::with(|app| {
app.with_view(key, |view| {
view.handle_properties_changed(&properties);
});
});
futures::future::ready(responder.send())
},
)
.unwrap_or_else(|e| eprintln!("view listener error: {:?}", e)),
);
Ok(())
}
fn update(&mut self) {
let context = ViewAssistantContext {
view_container: &mut self.view_container,
import_node: &mut self.import_node,
session: &self.session,
key: self.key,
logical_size: self.logical_size,
size: self.physical_size,
metrics: self.metrics,
messages: Vec::new(),
};
self.assistant.update(&context).unwrap_or_else(|e| panic!("Update error: {:?}", e));
self.present();
}
fn handle_session_events(&mut self, events: Vec<fidl_fuchsia_ui_scenic::Event>) {
events.iter().for_each(|event| match event {
fidl_fuchsia_ui_scenic::Event::Gfx(gfx::Event::Metrics(event)) => {
self.metrics = Size::new(event.metrics.scale_x, event.metrics.scale_y);
self.logical_size = Size::new(
self.physical_size.width * self.metrics.width,
self.physical_size.height * self.metrics.height,
);
self.update();
}
fidl_fuchsia_ui_scenic::Event::Input(event) => {
let mut context = ViewAssistantContext {
view_container: &mut self.view_container,
import_node: &mut self.import_node,
session: &self.session,
key: self.key,
logical_size: self.logical_size,
size: self.physical_size,
metrics: self.metrics,
messages: Vec::new(),
};
self.assistant
.handle_input_event(&mut context, &event)
.unwrap_or_else(|e| eprintln!("handle_event: {:?}", e));
for msg in context.messages {
self.send_message(&msg);
}
self.update();
}
_ => (),
});
}
fn present(&self) {
fasync::spawn_local(
self.session
.lock()
.present(0)
.map_ok(|_| ())
.unwrap_or_else(|e| panic!("present error: {:?}", e)),
);
}
fn handle_properties_changed(&mut self, properties: &fidl_fuchsia_ui_viewsv1::ViewProperties) {
if let Some(ref view_properties) = properties.view_layout {
self.physical_size = Size::new(view_properties.size.width, view_properties.size.height);
self.logical_size = Size::new(
self.physical_size.width * self.metrics.width,
self.physical_size.height * self.metrics.height,
);
self.update();
}
}
/// This method sends an arbitrary message to this view. If it is not
/// handled directly by `ViewController::send_message` it will be forwarded
/// to the view assistant.
pub fn send_message(&mut self, msg: &Any) {
if let Some(view_msg) = msg.downcast_ref::<ViewMessages>() {
match view_msg | view_token.value, | random_line_split |
main.rs | sprite::{SpriteRender, SpriteSheet, SpriteSheetFormat, SpriteSheetHandle},
types::DefaultBackend,
GraphCreator,
RenderingSystem,
Texture,
},
ui::UiBundle,
utils::{application_root_dir, scene::BasicScenePrefab},
window::{ScreenDimensions, Window, WindowBundle},
};
use amethyst_physics::PhysicsBundle;
use specs_physics::{
bodies::BodyStatus,
colliders::Shape,
PhysicsBody,
PhysicsBodyBuilder,
PhysicsColliderBuilder,
};
pub type GamePrefabData = BasicScenePrefab<(Vec<Position>, Vec<Normal>, Vec<TexCoord>)>;
/// The Player `Resources` contains player relevant data and holds a reference
/// to the `Entity` that defines the player.
#[derive(Debug)]
pub struct Player {
/// The player `Entity`.
pub player: Entity,
}
#[derive(Default)]
struct GameState<'a, 'b> {
/// `State` specific dispatcher.
dispatcher: Option<Dispatcher<'a, 'b>>,
}
impl<'a, 'b> SimpleState for GameState<'a, 'b> {
fn on_start(&mut self, data: StateData<GameData>) {
info!("GameState.on_start");
let world = data.world;
// load scene handle
let scene_handle = world.exec(|loader: PrefabLoader<'_, GamePrefabData>| {
loader.load("prefab/scene.ron", RonFormat, ())
});
// load sprite sheets
let character_handle =
self.load_sprite_sheet("texture/character.png", "texture/character.ron", world);
let objects_handle =
self.load_sprite_sheet("texture/objects.png", "texture/objects.ron", world);
// create dispatcher
self.create_dispatcher(world);
// initialise scene
world.create_entity().with(scene_handle.clone()).build();
// create player Entity
let player = world
.create_entity()
.with(SpriteRender {
sprite_sheet: character_handle.clone(),
sprite_number: 0,
})
.with(PhysicsBodyBuilder::<Float>::from(BodyStatus::Dynamic).build())
.with(
PhysicsColliderBuilder::<Float>::from(Shape::Rectangle(
15.0.into(),
22.0.into(),
1.0.into(),
))
.build(),
)
.with(Transform::from(Vector3::new(25.0, 50.0, 0.0)))
.build();
// create the player Resource
world.add_resource(Player { player });
// create obstacle Entity
world
.create_entity()
.with(SpriteRender {
sprite_sheet: objects_handle.clone(),
sprite_number: 0,
})
.with(PhysicsBodyBuilder::<Float>::from(BodyStatus::Static).build())
.with(
PhysicsColliderBuilder::<Float>::from(Shape::Rectangle(
15.0.into(),
16.0.into(),
1.0.into(),
))
.build(),
)
.with(Transform::from(Vector3::new(75.0, 50.0, 0.0)))
.build();
}
fn fixed_update(&mut self, data: StateData<GameData>) -> SimpleTrans {
if let Some(dispatcher) = self.dispatcher.as_mut() {
dispatcher.dispatch(&data.world.res);
}
Trans::None
}
}
impl<'a, 'b> GameState<'a, 'b> {
fn | (
&mut self,
texture_path: &str,
ron_path: &str,
world: &mut World,
) -> SpriteSheetHandle {
// Load the sprite sheet necessary to render the graphics.
// The texture is the pixel data
// `sprite_sheet` is the layout of the sprites on the image
// `texture_handle` is a cloneable reference to the texture
let texture_handle = {
let loader = world.read_resource::<Loader>();
let texture_storage = world.read_resource::<AssetStorage<Texture>>();
loader.load(texture_path, ImageFormat::default(), (), &texture_storage)
};
let loader = world.read_resource::<Loader>();
let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>();
loader.load(
ron_path, // Here we load the associated ron file
SpriteSheetFormat(texture_handle),
(),
&sprite_sheet_store,
)
}
/// Creates the `State` specific `Dispatcher`.
fn create_dispatcher(&mut self, world: &mut World) {
if self.dispatcher.is_none() {
let mut dispatcher_builder = DispatcherBuilder::new();
PhysicsBundle::default()
.with_debug_lines()
.build(&mut dispatcher_builder)
.expect("Failed to register PhysicsBundle");
let mut dispatcher = dispatcher_builder.build();
dispatcher.setup(&mut world.res);
self.dispatcher = Some(dispatcher);
}
}
}
#[derive(Default)]
struct PlayerMovementSystem;
impl<'s> System<'s> for PlayerMovementSystem {
type SystemData = (
Read<'s, InputHandler<StringBindings>>,
ReadExpect<'s, Player>,
WriteStorage<'s, PhysicsBody<Float>>,
);
fn run(&mut self, data: Self::SystemData) {
let (input, player, mut physics_bodies) = data;
if let Some(physics_body) = physics_bodies.get_mut(player.player) {
// handle movement on X axis
if let Some(movement) = input.axis_value("leftright") {
physics_body.velocity.x = movement.into();
}
// handle movement on Y axis
if let Some(movement) = input.axis_value("updown") {
physics_body.velocity.y = movement.into();
}
}
}
}
fn main() -> amethyst::Result<()> {
//amethyst::start_logger(Default::default());
amethyst::Logger::from_config(Default::default())
.level_for("gfx_backend_vulkan", amethyst::LogLevelFilter::Warn)
.level_for("rendy_factory::factory", amethyst::LogLevelFilter::Warn)
.level_for(
"rendy_memory::allocator::dynamic",
amethyst::LogLevelFilter::Warn,
)
.level_for(
"rendy_graph::node::render::pass",
amethyst::LogLevelFilter::Warn,
)
.level_for("rendy_graph::node::present", amethyst::LogLevelFilter::Warn)
.level_for("rendy_graph::graph", amethyst::LogLevelFilter::Warn)
.level_for(
"rendy_memory::allocator::linear",
amethyst::LogLevelFilter::Warn,
)
.level_for("rendy_wsi", amethyst::LogLevelFilter::Warn)
.start();
let app_root = application_root_dir()?;
// display configuration
let display_config_path = app_root.join("examples/resources/display_config.ron");
// key bindings
let key_bindings_path = app_root.join("examples/resources/input.ron");
let game_data = GameDataBuilder::default()
.with_bundle(WindowBundle::from_config_path(display_config_path))?
.with_bundle(TransformBundle::new())?
.with_bundle(
InputBundle::<StringBindings>::new().with_bindings_from_file(key_bindings_path)?,
)?
.with_bundle(UiBundle::<DefaultBackend, StringBindings>::new())?
//.with_bundle(PhysicsBundle::default().with_debug_lines())?
.with(
Processor::<SpriteSheet>::new(),
"sprite_sheet_processor",
&[],
)
.with(PrefabLoaderSystem::<GamePrefabData>::default(), "", &[])
.with(
PlayerMovementSystem::default(),
"player_movement_system",
&[],
)
.with_thread_local(RenderingSystem::<DefaultBackend, _>::new(
ExampleGraph::default(),
));
let mut game = Application::build(app_root.join("examples/assets"), GameState::default())?
.build(game_data)?;
game.run();
Ok(())
}
// This graph structure is used for creating a proper `RenderGraph` for
// rendering. A renderGraph can be thought of as the stages during a render
// pass. In our case, we are only executing one subpass (DrawFlat2D, or the
// sprite pass). This graph also needs to be rebuilt whenever the window is
// resized, so the boilerplate code for that operation is also here.
#[derive(Default)]
struct ExampleGraph {
dimensions: Option<ScreenDimensions>,
surface_format: Option<Format>,
dirty: bool,
}
impl GraphCreator<DefaultBackend> for ExampleGraph {
// This trait method reports to the renderer if the graph must be rebuilt,
// usually because the window has been resized. This implementation checks
// the screen size and returns true if it has changed.
fn rebuild(&mut self, res: &Resources) -> bool {
// Rebuild when dimensions change, but wait until at least two frames have the
// same.
let new_dimensions = res.try_fetch::<ScreenDimensions>();
use std::ops::Deref;
if self.dimensions.as_ref() != new_dimensions.as_ref().map(|d| d.deref()) {
self.dirty = true;
self.dimensions = new_dimensions.map(|d| d.clone());
return false;
}
return self.dirty;
}
// This is the core of a RenderGraph, which is building the actual graph with
// subpasses and target images.
fn builder(
&mut self,
factory: &mut Factory<DefaultBackend>,
res: &Resources,
| load_sprite_sheet | identifier_name |
main.rs | sprite::{SpriteRender, SpriteSheet, SpriteSheetFormat, SpriteSheetHandle},
types::DefaultBackend,
GraphCreator,
RenderingSystem,
Texture,
},
ui::UiBundle,
utils::{application_root_dir, scene::BasicScenePrefab},
window::{ScreenDimensions, Window, WindowBundle},
};
use amethyst_physics::PhysicsBundle;
use specs_physics::{
bodies::BodyStatus,
colliders::Shape,
PhysicsBody,
PhysicsBodyBuilder,
PhysicsColliderBuilder,
};
pub type GamePrefabData = BasicScenePrefab<(Vec<Position>, Vec<Normal>, Vec<TexCoord>)>;
/// The Player `Resources` contains player relevant data and holds a reference
/// to the `Entity` that defines the player.
#[derive(Debug)]
pub struct Player {
/// The player `Entity`.
pub player: Entity,
}
#[derive(Default)]
struct GameState<'a, 'b> {
/// `State` specific dispatcher.
dispatcher: Option<Dispatcher<'a, 'b>>,
}
impl<'a, 'b> SimpleState for GameState<'a, 'b> {
fn on_start(&mut self, data: StateData<GameData>) {
info!("GameState.on_start");
let world = data.world;
// load scene handle
let scene_handle = world.exec(|loader: PrefabLoader<'_, GamePrefabData>| {
loader.load("prefab/scene.ron", RonFormat, ())
});
// load sprite sheets
let character_handle =
self.load_sprite_sheet("texture/character.png", "texture/character.ron", world);
let objects_handle =
self.load_sprite_sheet("texture/objects.png", "texture/objects.ron", world);
// create dispatcher
self.create_dispatcher(world);
// initialise scene
world.create_entity().with(scene_handle.clone()).build();
// create player Entity
let player = world
.create_entity()
.with(SpriteRender {
sprite_sheet: character_handle.clone(),
sprite_number: 0,
})
.with(PhysicsBodyBuilder::<Float>::from(BodyStatus::Dynamic).build())
.with(
PhysicsColliderBuilder::<Float>::from(Shape::Rectangle(
15.0.into(),
22.0.into(),
1.0.into(),
))
.build(),
)
.with(Transform::from(Vector3::new(25.0, 50.0, 0.0)))
.build();
// create the player Resource
world.add_resource(Player { player });
// create obstacle Entity
world
.create_entity()
.with(SpriteRender {
sprite_sheet: objects_handle.clone(),
sprite_number: 0,
})
.with(PhysicsBodyBuilder::<Float>::from(BodyStatus::Static).build())
.with(
PhysicsColliderBuilder::<Float>::from(Shape::Rectangle(
15.0.into(),
16.0.into(),
1.0.into(),
))
.build(),
)
.with(Transform::from(Vector3::new(75.0, 50.0, 0.0)))
.build();
}
fn fixed_update(&mut self, data: StateData<GameData>) -> SimpleTrans {
if let Some(dispatcher) = self.dispatcher.as_mut() {
dispatcher.dispatch(&data.world.res);
}
Trans::None
}
}
impl<'a, 'b> GameState<'a, 'b> {
fn load_sprite_sheet(
&mut self,
texture_path: &str,
ron_path: &str,
world: &mut World,
) -> SpriteSheetHandle {
// Load the sprite sheet necessary to render the graphics.
// The texture is the pixel data
// `sprite_sheet` is the layout of the sprites on the image
// `texture_handle` is a cloneable reference to the texture
let texture_handle = {
let loader = world.read_resource::<Loader>();
let texture_storage = world.read_resource::<AssetStorage<Texture>>();
loader.load(texture_path, ImageFormat::default(), (), &texture_storage)
};
let loader = world.read_resource::<Loader>();
let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>();
loader.load(
ron_path, // Here we load the associated ron file
SpriteSheetFormat(texture_handle),
(),
&sprite_sheet_store,
)
}
/// Creates the `State` specific `Dispatcher`.
fn create_dispatcher(&mut self, world: &mut World) {
if self.dispatcher.is_none() {
let mut dispatcher_builder = DispatcherBuilder::new();
PhysicsBundle::default()
.with_debug_lines()
.build(&mut dispatcher_builder)
.expect("Failed to register PhysicsBundle");
let mut dispatcher = dispatcher_builder.build();
dispatcher.setup(&mut world.res);
self.dispatcher = Some(dispatcher);
}
}
}
#[derive(Default)]
struct PlayerMovementSystem;
impl<'s> System<'s> for PlayerMovementSystem {
type SystemData = (
Read<'s, InputHandler<StringBindings>>,
ReadExpect<'s, Player>,
WriteStorage<'s, PhysicsBody<Float>>,
);
fn run(&mut self, data: Self::SystemData) {
let (input, player, mut physics_bodies) = data;
if let Some(physics_body) = physics_bodies.get_mut(player.player) {
// handle movement on X axis
if let Some(movement) = input.axis_value("leftright") {
physics_body.velocity.x = movement.into();
}
// handle movement on Y axis
if let Some(movement) = input.axis_value("updown") {
physics_body.velocity.y = movement.into();
}
}
}
}
fn main() -> amethyst::Result<()> | .start();
let app_root = application_root_dir()?;
// display configuration
let display_config_path = app_root.join("examples/resources/display_config.ron");
// key bindings
let key_bindings_path = app_root.join("examples/resources/input.ron");
let game_data = GameDataBuilder::default()
.with_bundle(WindowBundle::from_config_path(display_config_path))?
.with_bundle(TransformBundle::new())?
.with_bundle(
InputBundle::<StringBindings>::new().with_bindings_from_file(key_bindings_path)?,
)?
.with_bundle(UiBundle::<DefaultBackend, StringBindings>::new())?
//.with_bundle(PhysicsBundle::default().with_debug_lines())?
.with(
Processor::<SpriteSheet>::new(),
"sprite_sheet_processor",
&[],
)
.with(PrefabLoaderSystem::<GamePrefabData>::default(), "", &[])
.with(
PlayerMovementSystem::default(),
"player_movement_system",
&[],
)
.with_thread_local(RenderingSystem::<DefaultBackend, _>::new(
ExampleGraph::default(),
));
let mut game = Application::build(app_root.join("examples/assets"), GameState::default())?
.build(game_data)?;
game.run();
Ok(())
}
// This graph structure is used for creating a proper `RenderGraph` for
// rendering. A renderGraph can be thought of as the stages during a render
// pass. In our case, we are only executing one subpass (DrawFlat2D, or the
// sprite pass). This graph also needs to be rebuilt whenever the window is
// resized, so the boilerplate code for that operation is also here.
#[derive(Default)]
struct ExampleGraph {
dimensions: Option<ScreenDimensions>,
surface_format: Option<Format>,
dirty: bool,
}
impl GraphCreator<DefaultBackend> for ExampleGraph {
// This trait method reports to the renderer if the graph must be rebuilt,
// usually because the window has been resized. This implementation checks
// the screen size and returns true if it has changed.
fn rebuild(&mut self, res: &Resources) -> bool {
// Rebuild when dimensions change, but wait until at least two frames have the
// same.
let new_dimensions = res.try_fetch::<ScreenDimensions>();
use std::ops::Deref;
if self.dimensions.as_ref() != new_dimensions.as_ref().map(|d| d.deref()) {
self.dirty = true;
self.dimensions = new_dimensions.map(|d| d.clone());
return false;
}
return self.dirty;
}
// This is the core of a RenderGraph, which is building the actual graph with
// subpasses and target images.
fn builder(
&mut self,
factory: &mut Factory<DefaultBackend>,
res: &Resources,
| {
//amethyst::start_logger(Default::default());
amethyst::Logger::from_config(Default::default())
.level_for("gfx_backend_vulkan", amethyst::LogLevelFilter::Warn)
.level_for("rendy_factory::factory", amethyst::LogLevelFilter::Warn)
.level_for(
"rendy_memory::allocator::dynamic",
amethyst::LogLevelFilter::Warn,
)
.level_for(
"rendy_graph::node::render::pass",
amethyst::LogLevelFilter::Warn,
)
.level_for("rendy_graph::node::present", amethyst::LogLevelFilter::Warn)
.level_for("rendy_graph::graph", amethyst::LogLevelFilter::Warn)
.level_for(
"rendy_memory::allocator::linear",
amethyst::LogLevelFilter::Warn,
)
.level_for("rendy_wsi", amethyst::LogLevelFilter::Warn) | identifier_body |
main.rs | prelude::*,
renderer::{
formats::texture::ImageFormat,
pass::{DrawDebugLinesDesc, DrawFlat2DDesc},
rendy::{
factory::Factory,
graph::{
render::{RenderGroupDesc, SubpassBuilder},
GraphBuilder,
},
hal::{format::Format, image},
mesh::{Normal, Position, TexCoord},
},
sprite::{SpriteRender, SpriteSheet, SpriteSheetFormat, SpriteSheetHandle},
types::DefaultBackend,
GraphCreator,
RenderingSystem,
Texture,
},
ui::UiBundle,
utils::{application_root_dir, scene::BasicScenePrefab},
window::{ScreenDimensions, Window, WindowBundle},
};
use amethyst_physics::PhysicsBundle;
use specs_physics::{
bodies::BodyStatus,
colliders::Shape,
PhysicsBody,
PhysicsBodyBuilder,
PhysicsColliderBuilder,
};
pub type GamePrefabData = BasicScenePrefab<(Vec<Position>, Vec<Normal>, Vec<TexCoord>)>;
/// The Player `Resources` contains player relevant data and holds a reference
/// to the `Entity` that defines the player.
#[derive(Debug)]
pub struct Player {
/// The player `Entity`.
pub player: Entity,
}
#[derive(Default)]
struct GameState<'a, 'b> {
/// `State` specific dispatcher.
dispatcher: Option<Dispatcher<'a, 'b>>,
}
impl<'a, 'b> SimpleState for GameState<'a, 'b> {
fn on_start(&mut self, data: StateData<GameData>) {
info!("GameState.on_start");
let world = data.world;
// load scene handle
let scene_handle = world.exec(|loader: PrefabLoader<'_, GamePrefabData>| {
loader.load("prefab/scene.ron", RonFormat, ())
});
// load sprite sheets
let character_handle =
self.load_sprite_sheet("texture/character.png", "texture/character.ron", world);
let objects_handle =
self.load_sprite_sheet("texture/objects.png", "texture/objects.ron", world);
// create dispatcher
self.create_dispatcher(world);
// initialise scene
world.create_entity().with(scene_handle.clone()).build();
// create player Entity
let player = world
.create_entity()
.with(SpriteRender {
sprite_sheet: character_handle.clone(),
sprite_number: 0,
})
.with(PhysicsBodyBuilder::<Float>::from(BodyStatus::Dynamic).build())
.with(
PhysicsColliderBuilder::<Float>::from(Shape::Rectangle(
15.0.into(),
22.0.into(),
1.0.into(),
))
.build(),
)
.with(Transform::from(Vector3::new(25.0, 50.0, 0.0)))
.build();
// create the player Resource
world.add_resource(Player { player });
// create obstacle Entity
world
.create_entity()
.with(SpriteRender {
sprite_sheet: objects_handle.clone(),
sprite_number: 0,
})
.with(PhysicsBodyBuilder::<Float>::from(BodyStatus::Static).build())
.with(
PhysicsColliderBuilder::<Float>::from(Shape::Rectangle(
15.0.into(),
16.0.into(),
1.0.into(),
))
.build(),
)
.with(Transform::from(Vector3::new(75.0, 50.0, 0.0)))
.build();
}
fn fixed_update(&mut self, data: StateData<GameData>) -> SimpleTrans {
if let Some(dispatcher) = self.dispatcher.as_mut() {
dispatcher.dispatch(&data.world.res);
}
Trans::None
}
}
impl<'a, 'b> GameState<'a, 'b> {
fn load_sprite_sheet(
&mut self,
texture_path: &str,
ron_path: &str,
world: &mut World,
) -> SpriteSheetHandle {
// Load the sprite sheet necessary to render the graphics.
// The texture is the pixel data
// `sprite_sheet` is the layout of the sprites on the image
// `texture_handle` is a cloneable reference to the texture
let texture_handle = {
let loader = world.read_resource::<Loader>();
let texture_storage = world.read_resource::<AssetStorage<Texture>>();
loader.load(texture_path, ImageFormat::default(), (), &texture_storage)
};
let loader = world.read_resource::<Loader>();
let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>();
loader.load(
ron_path, // Here we load the associated ron file
SpriteSheetFormat(texture_handle),
(),
&sprite_sheet_store,
)
}
/// Creates the `State` specific `Dispatcher`.
fn create_dispatcher(&mut self, world: &mut World) {
if self.dispatcher.is_none() {
let mut dispatcher_builder = DispatcherBuilder::new();
PhysicsBundle::default()
.with_debug_lines()
.build(&mut dispatcher_builder)
.expect("Failed to register PhysicsBundle");
let mut dispatcher = dispatcher_builder.build();
dispatcher.setup(&mut world.res);
self.dispatcher = Some(dispatcher);
}
}
}
#[derive(Default)]
struct PlayerMovementSystem;
impl<'s> System<'s> for PlayerMovementSystem {
type SystemData = (
Read<'s, InputHandler<StringBindings>>,
ReadExpect<'s, Player>,
WriteStorage<'s, PhysicsBody<Float>>,
);
fn run(&mut self, data: Self::SystemData) {
let (input, player, mut physics_bodies) = data;
if let Some(physics_body) = physics_bodies.get_mut(player.player) {
// handle movement on X axis
if let Some(movement) = input.axis_value("leftright") {
physics_body.velocity.x = movement.into();
}
// handle movement on Y axis
if let Some(movement) = input.axis_value("updown") {
physics_body.velocity.y = movement.into();
}
}
}
}
fn main() -> amethyst::Result<()> {
//amethyst::start_logger(Default::default());
amethyst::Logger::from_config(Default::default())
.level_for("gfx_backend_vulkan", amethyst::LogLevelFilter::Warn)
.level_for("rendy_factory::factory", amethyst::LogLevelFilter::Warn)
.level_for(
"rendy_memory::allocator::dynamic",
amethyst::LogLevelFilter::Warn,
)
.level_for(
"rendy_graph::node::render::pass",
amethyst::LogLevelFilter::Warn,
)
.level_for("rendy_graph::node::present", amethyst::LogLevelFilter::Warn)
.level_for("rendy_graph::graph", amethyst::LogLevelFilter::Warn)
.level_for(
"rendy_memory::allocator::linear",
amethyst::LogLevelFilter::Warn,
)
.level_for("rendy_wsi", amethyst::LogLevelFilter::Warn)
.start();
let app_root = application_root_dir()?;
// display configuration
let display_config_path = app_root.join("examples/resources/display_config.ron");
// key bindings
let key_bindings_path = app_root.join("examples/resources/input.ron");
let game_data = GameDataBuilder::default()
.with_bundle(WindowBundle::from_config_path(display_config_path))?
.with_bundle(TransformBundle::new())?
.with_bundle(
InputBundle::<StringBindings>::new().with_bindings_from_file(key_bindings_path)?,
)?
.with_bundle(UiBundle::<DefaultBackend, StringBindings>::new())?
//.with_bundle(PhysicsBundle::default().with_debug_lines())?
.with(
Processor::<SpriteSheet>::new(),
"sprite_sheet_processor",
&[],
)
.with(PrefabLoaderSystem::<GamePrefabData>::default(), "", &[])
.with(
PlayerMovementSystem::default(),
"player_movement_system",
&[],
)
.with_thread_local(RenderingSystem::<DefaultBackend, _>::new(
ExampleGraph::default(),
));
let mut game = Application::build(app_root.join("examples/assets"), GameState::default())?
.build(game_data)?;
game.run();
Ok(())
}
// This graph structure is used for creating a proper `RenderGraph` for
// rendering. A renderGraph can be thought of as the stages during a render
// pass. In our case, we are only executing one subpass (DrawFlat2D, or the
// sprite pass). This graph also needs to be rebuilt whenever the window is
// resized, so the boilerplate code for that operation is also here.
#[derive(Default)]
struct ExampleGraph {
dimensions: Option<ScreenDimensions>,
surface_format: Option<Format>,
dirty: bool,
}
impl GraphCreator<DefaultBackend> for ExampleGraph {
// This trait method reports to the renderer if the graph must be rebuilt,
// usually because the window has been resized. This implementation checks
// the screen size and returns true if it has changed.
fn rebuild(&mut self, res: &Resources) -> bool {
// Rebuild when dimensions change, but wait until at least two frames have the
// same.
let new_dimensions = res.try_fetch::<ScreenDimensions>();
use std::ops::Deref;
if | System,
SystemData,
WriteStorage,
},
input::{InputBundle, InputHandler, StringBindings}, | random_line_split |
|
main.rs | sprite::{SpriteRender, SpriteSheet, SpriteSheetFormat, SpriteSheetHandle},
types::DefaultBackend,
GraphCreator,
RenderingSystem,
Texture,
},
ui::UiBundle,
utils::{application_root_dir, scene::BasicScenePrefab},
window::{ScreenDimensions, Window, WindowBundle},
};
use amethyst_physics::PhysicsBundle;
use specs_physics::{
bodies::BodyStatus,
colliders::Shape,
PhysicsBody,
PhysicsBodyBuilder,
PhysicsColliderBuilder,
};
pub type GamePrefabData = BasicScenePrefab<(Vec<Position>, Vec<Normal>, Vec<TexCoord>)>;
/// The Player `Resources` contains player relevant data and holds a reference
/// to the `Entity` that defines the player.
#[derive(Debug)]
pub struct Player {
/// The player `Entity`.
pub player: Entity,
}
#[derive(Default)]
struct GameState<'a, 'b> {
/// `State` specific dispatcher.
dispatcher: Option<Dispatcher<'a, 'b>>,
}
impl<'a, 'b> SimpleState for GameState<'a, 'b> {
fn on_start(&mut self, data: StateData<GameData>) {
info!("GameState.on_start");
let world = data.world;
// load scene handle
let scene_handle = world.exec(|loader: PrefabLoader<'_, GamePrefabData>| {
loader.load("prefab/scene.ron", RonFormat, ())
});
// load sprite sheets
let character_handle =
self.load_sprite_sheet("texture/character.png", "texture/character.ron", world);
let objects_handle =
self.load_sprite_sheet("texture/objects.png", "texture/objects.ron", world);
// create dispatcher
self.create_dispatcher(world);
// initialise scene
world.create_entity().with(scene_handle.clone()).build();
// create player Entity
let player = world
.create_entity()
.with(SpriteRender {
sprite_sheet: character_handle.clone(),
sprite_number: 0,
})
.with(PhysicsBodyBuilder::<Float>::from(BodyStatus::Dynamic).build())
.with(
PhysicsColliderBuilder::<Float>::from(Shape::Rectangle(
15.0.into(),
22.0.into(),
1.0.into(),
))
.build(),
)
.with(Transform::from(Vector3::new(25.0, 50.0, 0.0)))
.build();
// create the player Resource
world.add_resource(Player { player });
// create obstacle Entity
world
.create_entity()
.with(SpriteRender {
sprite_sheet: objects_handle.clone(),
sprite_number: 0,
})
.with(PhysicsBodyBuilder::<Float>::from(BodyStatus::Static).build())
.with(
PhysicsColliderBuilder::<Float>::from(Shape::Rectangle(
15.0.into(),
16.0.into(),
1.0.into(),
))
.build(),
)
.with(Transform::from(Vector3::new(75.0, 50.0, 0.0)))
.build();
}
fn fixed_update(&mut self, data: StateData<GameData>) -> SimpleTrans {
if let Some(dispatcher) = self.dispatcher.as_mut() |
Trans::None
}
}
impl<'a, 'b> GameState<'a, 'b> {
fn load_sprite_sheet(
&mut self,
texture_path: &str,
ron_path: &str,
world: &mut World,
) -> SpriteSheetHandle {
// Load the sprite sheet necessary to render the graphics.
// The texture is the pixel data
// `sprite_sheet` is the layout of the sprites on the image
// `texture_handle` is a cloneable reference to the texture
let texture_handle = {
let loader = world.read_resource::<Loader>();
let texture_storage = world.read_resource::<AssetStorage<Texture>>();
loader.load(texture_path, ImageFormat::default(), (), &texture_storage)
};
let loader = world.read_resource::<Loader>();
let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>();
loader.load(
ron_path, // Here we load the associated ron file
SpriteSheetFormat(texture_handle),
(),
&sprite_sheet_store,
)
}
/// Creates the `State` specific `Dispatcher`.
fn create_dispatcher(&mut self, world: &mut World) {
if self.dispatcher.is_none() {
let mut dispatcher_builder = DispatcherBuilder::new();
PhysicsBundle::default()
.with_debug_lines()
.build(&mut dispatcher_builder)
.expect("Failed to register PhysicsBundle");
let mut dispatcher = dispatcher_builder.build();
dispatcher.setup(&mut world.res);
self.dispatcher = Some(dispatcher);
}
}
}
#[derive(Default)]
struct PlayerMovementSystem;
impl<'s> System<'s> for PlayerMovementSystem {
type SystemData = (
Read<'s, InputHandler<StringBindings>>,
ReadExpect<'s, Player>,
WriteStorage<'s, PhysicsBody<Float>>,
);
fn run(&mut self, data: Self::SystemData) {
let (input, player, mut physics_bodies) = data;
if let Some(physics_body) = physics_bodies.get_mut(player.player) {
// handle movement on X axis
if let Some(movement) = input.axis_value("leftright") {
physics_body.velocity.x = movement.into();
}
// handle movement on Y axis
if let Some(movement) = input.axis_value("updown") {
physics_body.velocity.y = movement.into();
}
}
}
}
fn main() -> amethyst::Result<()> {
//amethyst::start_logger(Default::default());
amethyst::Logger::from_config(Default::default())
.level_for("gfx_backend_vulkan", amethyst::LogLevelFilter::Warn)
.level_for("rendy_factory::factory", amethyst::LogLevelFilter::Warn)
.level_for(
"rendy_memory::allocator::dynamic",
amethyst::LogLevelFilter::Warn,
)
.level_for(
"rendy_graph::node::render::pass",
amethyst::LogLevelFilter::Warn,
)
.level_for("rendy_graph::node::present", amethyst::LogLevelFilter::Warn)
.level_for("rendy_graph::graph", amethyst::LogLevelFilter::Warn)
.level_for(
"rendy_memory::allocator::linear",
amethyst::LogLevelFilter::Warn,
)
.level_for("rendy_wsi", amethyst::LogLevelFilter::Warn)
.start();
let app_root = application_root_dir()?;
// display configuration
let display_config_path = app_root.join("examples/resources/display_config.ron");
// key bindings
let key_bindings_path = app_root.join("examples/resources/input.ron");
let game_data = GameDataBuilder::default()
.with_bundle(WindowBundle::from_config_path(display_config_path))?
.with_bundle(TransformBundle::new())?
.with_bundle(
InputBundle::<StringBindings>::new().with_bindings_from_file(key_bindings_path)?,
)?
.with_bundle(UiBundle::<DefaultBackend, StringBindings>::new())?
//.with_bundle(PhysicsBundle::default().with_debug_lines())?
.with(
Processor::<SpriteSheet>::new(),
"sprite_sheet_processor",
&[],
)
.with(PrefabLoaderSystem::<GamePrefabData>::default(), "", &[])
.with(
PlayerMovementSystem::default(),
"player_movement_system",
&[],
)
.with_thread_local(RenderingSystem::<DefaultBackend, _>::new(
ExampleGraph::default(),
));
let mut game = Application::build(app_root.join("examples/assets"), GameState::default())?
.build(game_data)?;
game.run();
Ok(())
}
// This graph structure is used for creating a proper `RenderGraph` for
// rendering. A renderGraph can be thought of as the stages during a render
// pass. In our case, we are only executing one subpass (DrawFlat2D, or the
// sprite pass). This graph also needs to be rebuilt whenever the window is
// resized, so the boilerplate code for that operation is also here.
#[derive(Default)]
struct ExampleGraph {
dimensions: Option<ScreenDimensions>,
surface_format: Option<Format>,
dirty: bool,
}
impl GraphCreator<DefaultBackend> for ExampleGraph {
// This trait method reports to the renderer if the graph must be rebuilt,
// usually because the window has been resized. This implementation checks
// the screen size and returns true if it has changed.
fn rebuild(&mut self, res: &Resources) -> bool {
// Rebuild when dimensions change, but wait until at least two frames have the
// same.
let new_dimensions = res.try_fetch::<ScreenDimensions>();
use std::ops::Deref;
if self.dimensions.as_ref() != new_dimensions.as_ref().map(|d| d.deref()) {
self.dirty = true;
self.dimensions = new_dimensions.map(|d| d.clone());
return false;
}
return self.dirty;
}
// This is the core of a RenderGraph, which is building the actual graph with
// subpasses and target images.
fn builder(
&mut self,
factory: &mut Factory<DefaultBackend>,
res: &Resources,
| {
dispatcher.dispatch(&data.world.res);
} | conditional_block |
awscache.go | AWSClients struct {
session *session.Session
cleanup *cleanup.Cleanup
pollInterval time.Duration
accountID oncecache.StringCache
myToken string
mu sync.Mutex
}
func (a *AWSClients) token() string {
a.mu.Lock()
defer a.mu.Unlock()
if a.myToken == "" {
a.myToken = strconv.FormatInt(time.Now().UnixNano(), 16)
}
return a.myToken
}
func (a *AWSClients) Region() string {
return *a.session.Config.Region
}
func (a *AWSClients) AccountID() (string, error) {
return a.accountID.Do(func() (string, error) {
stsClient := sts.New(a.session)
out, err := stsClient.GetCallerIdentity(&sts.GetCallerIdentityInput{})
if err != nil {
return "", errors.Wrap(err, "unable to fetch identity ID")
}
return *out.Account, nil
})
}
func (a *AWSClients) DescribeStack(ctx context.Context, name string) (*cloudformation.Stack, error) {
cf := cloudformation.New(a.session)
res, err := cf.DescribeStacksWithContext(ctx, &cloudformation.DescribeStacksInput{
StackName: &name,
})
if err != nil {
if strings.Contains(err.Error(), "does not exist") {
return nil, nil
}
return nil, errors.Wrapf(err, "unable to describe stack %s", name)
}
if len(res.Stacks) == 0 {
return nil, nil
}
return res.Stacks[0], nil
}
func guessChangesetType(ctx context.Context, cloudformationClient *cloudformation.CloudFormation, in *cloudformation.CreateChangeSetInput) *cloudformation.CreateChangeSetInput {
if in == nil || in.ChangeSetType == nil {
return in
}
if *in.ChangeSetType != "GUESS" {
return in
}
_, err := cloudformationClient.DescribeStacksWithContext(ctx, &cloudformation.DescribeStacksInput{
StackName: in.StackName,
})
if err != nil {
// stack does not exist (probably)
in.ChangeSetType = aws.String("CREATE")
} else {
in.ChangeSetType = aws.String("UPDATE")
}
return in
}
func isAlreadyExistsException(err error) bool {
return isAWSError(err, "AlreadyExistsException")
}
func isAWSError(err error, code string) bool {
if err == nil {
return false
}
r := errors.Cause(err)
if ae, ok := r.(awserr.Error); ok {
return ae.Code() == code
}
return strings.Contains(r.Error(), code)
}
func (a *AWSClients) createChangeset(ctx context.Context, cf *cloudformation.CloudFormation, in *cloudformation.CreateChangeSetInput, hasAlreadyDeletedChangeSet bool) (*cloudformation.CreateChangeSetOutput, error) {
res, err := cf.CreateChangeSetWithContext(ctx, in)
if err == nil {
return res, nil
}
if !hasAlreadyDeletedChangeSet && isAlreadyExistsException(err) {
_, err := cf.DeleteChangeSetWithContext(ctx, &cloudformation.DeleteChangeSetInput{
ChangeSetName: in.ChangeSetName,
StackName: in.StackName,
})
if err != nil {
return nil, errors.Wrap(err, "deleting changeset failed")
}
return a.createChangeset(ctx, cf, in, true)
}
return nil, errors.Wrap(err, "unable to create changeset")
}
func stringsReplaceAllRepeated(s string, old string, new string) string {
prev := len(s)
for len(s) > 0 {
s = strings.Replace(s, old, new, -1)
if prev == len(s) {
return s
}
}
return s
}
func sanitizeBucketName(s string) string {
// from https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-s3-bucket-naming-requirements.html
s = strings.ToLower(s)
s = strings.Map(func(r rune) rune {
switch {
case r >= 'a' && r <= 'z':
return r
case r >= '0' && r <= '9':
return r
case r == '.' || r == '-':
return r
}
return '-'
}, s)
if len(s) < 3 {
s = "aaa"
}
if s[0] == '-' || s[0] == '.' {
s = "a" + s
}
s = strings.TrimSuffix(s, "-")
s = stringsReplaceAllRepeated(s, "..", ".")
s = stringsReplaceAllRepeated(s, ".-", "-")
s = stringsReplaceAllRepeated(s, "-.", "-")
return s
}
func (a *AWSClients) FixTemplateBody(ctx context.Context, in *cloudformation.CreateChangeSetInput, bucket string, logger *logger.Logger) error {
if in.TemplateBody == nil {
return nil
}
tb := *in.TemplateBody
// Actual number is 51200 but we give ourselves some buffer
if len(tb) < 51100 {
return nil
}
logger.Log(1, "template body too large (%d): setting in s3", len(tb))
if bucket == "" {
bucket = sanitizeBucketName(fmt.Sprintf("cfmanage_%s", *in.StackName))
logger.Log(1, "Making bucket %s because no bucket set", bucket)
clients3 := s3.New(a.session)
out, err := clients3.CreateBucket(&s3.CreateBucketInput{
Bucket: &bucket,
})
if err != nil | else {
logger.Log(1, "Bucket created with URL %s", *out.Location)
}
}
uploader := s3manager.NewUploader(a.session)
itemKey := fmt.Sprintf("cfmanage_%s_%s", *in.StackName, time.Now().UTC())
out, err := uploader.UploadWithContext(ctx, &s3manager.UploadInput{
Bucket: &bucket,
Key: &itemKey,
Body: strings.NewReader(tb),
})
if err != nil {
return errors.Wrapf(err, "unable to upload body to bucket %s", bucket)
}
logger.Log(1, "template body uploaded to %s", out.Location)
in.TemplateBody = nil
in.TemplateURL = &out.Location
a.cleanup.Add(func(ctx context.Context) error {
logger.Log(2, "Cleaning up %s/%s", bucket, itemKey)
clients3 := s3.New(a.session)
_, err := clients3.DeleteObject(&s3.DeleteObjectInput{
Bucket: &bucket,
Key: &itemKey,
})
return errors.Wrapf(err, "Unable to delete bucket=%s key=%s", bucket, itemKey)
})
return nil
}
func (a *AWSClients) CreateChangesetWaitForStatus(ctx context.Context, in *cloudformation.CreateChangeSetInput, existingStack *cloudformation.Stack, logger *logger.Logger) (*cloudformation.DescribeChangeSetOutput, error) {
if in.ChangeSetName == nil {
in.ChangeSetName = aws.String("A" + strconv.FormatInt(time.Now().UnixNano(), 16))
}
in.ClientToken = aws.String(a.token())
cf := cloudformation.New(a.session)
in = guessChangesetType(ctx, cf, in)
res, err := a.createChangeset(ctx, cf, in, false)
if err != nil {
return nil, errors.Wrap(err, "creating changeset failed")
}
a.cleanup.Add(func(ctx context.Context) error {
_, err := cf.DeleteChangeSetWithContext(ctx, &cloudformation.DeleteChangeSetInput{
ChangeSetName: res.Id,
})
return err
})
if existingStack == nil {
// Clean up the stack created by the changeset
a.cleanup.Add(func(ctx context.Context) error {
finishingStack, err := a.DescribeStack(ctx, *in.StackName)
if err != nil {
return errors.Wrapf(err, "unable to describe stack %s", *in.StackName)
}
if *finishingStack.StackStatus == "REVIEW_IN_PROGRESS" {
_, err := cf.DeleteStack(&cloudformation.DeleteStackInput{
ClientRequestToken: aws.String(a.token()),
StackName: in.StackName,
})
return errors.Wrapf(err, "unable to delete stack %s", *in.StackName)
}
return nil
})
}
return a.waitForChangesetToFinishCreating(ctx, cf, *res.Id, logger, nil)
}
func (a *AWSClients) ExecuteChangeset(ctx context.Context, changesetARN string) error {
cf := cloudformation.New(a.session)
_, err := cf.ExecuteChangeSetWithContext(ctx, &cloudformation.ExecuteChangeSetInput{
ChangeSetName: &changesetARN,
ClientRequestToken: aws.String(a.token()),
})
return errors.Wrapf(err, "unable to execute changeset %s", changesetARN)
}
func (a *AWSClients) CancelStackUpdate(ctx context.Context, stackName string) error | {
if !isAWSError(err, "BucketAlreadyOwnedByYou") {
return errors.Wrapf(err, "unable to create bucket %s correctly", bucket)
}
logger.Log(1, "bucket already owend by you")
} | conditional_block |
awscache.go | "..", ".")
s = stringsReplaceAllRepeated(s, ".-", "-")
s = stringsReplaceAllRepeated(s, "-.", "-")
return s
}
func (a *AWSClients) FixTemplateBody(ctx context.Context, in *cloudformation.CreateChangeSetInput, bucket string, logger *logger.Logger) error {
if in.TemplateBody == nil {
return nil
}
tb := *in.TemplateBody
// Actual number is 51200 but we give ourselves some buffer
if len(tb) < 51100 {
return nil
}
logger.Log(1, "template body too large (%d): setting in s3", len(tb))
if bucket == "" {
bucket = sanitizeBucketName(fmt.Sprintf("cfmanage_%s", *in.StackName))
logger.Log(1, "Making bucket %s because no bucket set", bucket)
clients3 := s3.New(a.session)
out, err := clients3.CreateBucket(&s3.CreateBucketInput{
Bucket: &bucket,
})
if err != nil {
if !isAWSError(err, "BucketAlreadyOwnedByYou") {
return errors.Wrapf(err, "unable to create bucket %s correctly", bucket)
}
logger.Log(1, "bucket already owend by you")
} else {
logger.Log(1, "Bucket created with URL %s", *out.Location)
}
}
uploader := s3manager.NewUploader(a.session)
itemKey := fmt.Sprintf("cfmanage_%s_%s", *in.StackName, time.Now().UTC())
out, err := uploader.UploadWithContext(ctx, &s3manager.UploadInput{
Bucket: &bucket,
Key: &itemKey,
Body: strings.NewReader(tb),
})
if err != nil {
return errors.Wrapf(err, "unable to upload body to bucket %s", bucket)
}
logger.Log(1, "template body uploaded to %s", out.Location)
in.TemplateBody = nil
in.TemplateURL = &out.Location
a.cleanup.Add(func(ctx context.Context) error {
logger.Log(2, "Cleaning up %s/%s", bucket, itemKey)
clients3 := s3.New(a.session)
_, err := clients3.DeleteObject(&s3.DeleteObjectInput{
Bucket: &bucket,
Key: &itemKey,
})
return errors.Wrapf(err, "Unable to delete bucket=%s key=%s", bucket, itemKey)
})
return nil
}
func (a *AWSClients) CreateChangesetWaitForStatus(ctx context.Context, in *cloudformation.CreateChangeSetInput, existingStack *cloudformation.Stack, logger *logger.Logger) (*cloudformation.DescribeChangeSetOutput, error) {
if in.ChangeSetName == nil {
in.ChangeSetName = aws.String("A" + strconv.FormatInt(time.Now().UnixNano(), 16))
}
in.ClientToken = aws.String(a.token())
cf := cloudformation.New(a.session)
in = guessChangesetType(ctx, cf, in)
res, err := a.createChangeset(ctx, cf, in, false)
if err != nil {
return nil, errors.Wrap(err, "creating changeset failed")
}
a.cleanup.Add(func(ctx context.Context) error {
_, err := cf.DeleteChangeSetWithContext(ctx, &cloudformation.DeleteChangeSetInput{
ChangeSetName: res.Id,
})
return err
})
if existingStack == nil {
// Clean up the stack created by the changeset
a.cleanup.Add(func(ctx context.Context) error {
finishingStack, err := a.DescribeStack(ctx, *in.StackName)
if err != nil {
return errors.Wrapf(err, "unable to describe stack %s", *in.StackName)
}
if *finishingStack.StackStatus == "REVIEW_IN_PROGRESS" {
_, err := cf.DeleteStack(&cloudformation.DeleteStackInput{
ClientRequestToken: aws.String(a.token()),
StackName: in.StackName,
})
return errors.Wrapf(err, "unable to delete stack %s", *in.StackName)
}
return nil
})
}
return a.waitForChangesetToFinishCreating(ctx, cf, *res.Id, logger, nil)
}
func (a *AWSClients) ExecuteChangeset(ctx context.Context, changesetARN string) error {
cf := cloudformation.New(a.session)
_, err := cf.ExecuteChangeSetWithContext(ctx, &cloudformation.ExecuteChangeSetInput{
ChangeSetName: &changesetARN,
ClientRequestToken: aws.String(a.token()),
})
return errors.Wrapf(err, "unable to execute changeset %s", changesetARN)
}
func (a *AWSClients) CancelStackUpdate(ctx context.Context, stackName string) error {
cf := cloudformation.New(a.session)
_, err := cf.CancelUpdateStackWithContext(ctx, &cloudformation.CancelUpdateStackInput{
// Note: Stack cancels should *not* use the same client request token as the create request
StackName: &stackName,
})
return errors.Wrapf(err, "unable to cancel stack update to %s", stackName)
}
func isThrottleError(err error) bool {
if err == nil {
return false
}
return strings.Contains(errors.Cause(err).Error(), "Throttling")
}
func (a *AWSClients) waitForChangesetToFinishCreating(ctx context.Context, cloudformationClient *cloudformation.CloudFormation, changesetARN string, logger *logger.Logger, cleanShutdown <-chan struct{}) (*cloudformation.DescribeChangeSetOutput, error) {
lastChangesetStatus := ""
backoff := aimd.Aimd{
Min: a.getPollInterval(),
}
for {
select {
case <-time.After(backoff.Get()):
case <-ctx.Done():
return nil, errors.Wrapf(ctx.Err(), "context died waiting for changeset %s", changesetARN)
case <-cleanShutdown:
return nil, nil
}
out, err := cloudformationClient.DescribeChangeSetWithContext(ctx, &cloudformation.DescribeChangeSetInput{
ChangeSetName: &changesetARN,
})
if err != nil {
if isThrottleError(err) {
backoff.OnError()
continue
}
return nil, errors.Wrapf(err, "unable to describe changeset %s", changesetARN)
}
backoff.OnOk()
stat := emptyOnNil(out.Status)
if stat != lastChangesetStatus {
logger.Log(1, "ChangeSet status set to %s: %s", stat, emptyOnNil(out.StatusReason))
lastChangesetStatus = stat
}
// All terminal states
if stat == "CREATE_COMPLETE" || stat == "FAILED" || stat == "DELETE_COMPLETE" {
return out, nil
}
}
}
func (a *AWSClients) getPollInterval() time.Duration {
if a.pollInterval == 0 {
return time.Second
}
return a.pollInterval
}
// waitForTerminalState loops forever until either the context ends, or something fails
func (a *AWSClients) WaitForTerminalState(ctx context.Context, stackID string, log *logger.Logger) error {
lastStackStatus := ""
cfClient := cloudformation.New(a.session)
backoff := aimd.Aimd{
Min: a.getPollInterval(),
}
for {
select {
case <-ctx.Done():
return errors.Wrap(ctx.Err(), "context died waiting for terminal state")
case <-time.After(backoff.Get()):
}
descOut, err := cfClient.DescribeStacksWithContext(ctx, &cloudformation.DescribeStacksInput{
StackName: &stackID,
})
if err != nil {
if isThrottleError(err) {
backoff.OnError()
continue
}
return errors.Wrapf(err, "unable to describe stack %s", stackID)
}
backoff.OnOk()
if len(descOut.Stacks) != 1 {
return errors.Errorf("unable to correctly find stack %s", stackID)
}
thisStack := descOut.Stacks[0]
if *thisStack.StackStatus != lastStackStatus {
log.Log(1, "Stack status set to %s: %s", *thisStack.StackStatus, emptyOnNil(thisStack.StackStatusReason))
lastStackStatus = *thisStack.StackStatus
}
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/using-cfn-describing-stacks.html
terminalFailureStatusStates := map[string]struct{}{
"CREATE_FAILED": {},
"DELETE_FAILED": {},
"ROLLBACK_FAILED": {},
"UPDATE_ROLLBACK_FAILED": {},
"ROLLBACK_COMPLETE": {},
"UPDATE_ROLLBACK_COMPLETE": {},
}
if _, exists := terminalFailureStatusStates[emptyOnNil(thisStack.StackStatus)]; exists {
return errors.Errorf("Terminal stack state failure: %s %s", emptyOnNil(thisStack.StackStatus), emptyOnNil(thisStack.StackStatusReason))
}
terminalOkStatusStates := map[string]struct{}{
"CREATE_COMPLETE": {},
"DELETE_COMPLETE": {},
"UPDATE_COMPLETE": {},
}
if _, exists := terminalOkStatusStates[emptyOnNil(thisStack.StackStatus)]; exists {
return nil
}
}
}
func | emptyOnNil | identifier_name |
|
awscache.go | AWSClients struct {
session *session.Session
cleanup *cleanup.Cleanup
pollInterval time.Duration
accountID oncecache.StringCache
myToken string
mu sync.Mutex
}
func (a *AWSClients) token() string {
a.mu.Lock()
defer a.mu.Unlock()
if a.myToken == "" {
a.myToken = strconv.FormatInt(time.Now().UnixNano(), 16)
}
return a.myToken
}
func (a *AWSClients) Region() string |
func (a *AWSClients) AccountID() (string, error) {
return a.accountID.Do(func() (string, error) {
stsClient := sts.New(a.session)
out, err := stsClient.GetCallerIdentity(&sts.GetCallerIdentityInput{})
if err != nil {
return "", errors.Wrap(err, "unable to fetch identity ID")
}
return *out.Account, nil
})
}
func (a *AWSClients) DescribeStack(ctx context.Context, name string) (*cloudformation.Stack, error) {
cf := cloudformation.New(a.session)
res, err := cf.DescribeStacksWithContext(ctx, &cloudformation.DescribeStacksInput{
StackName: &name,
})
if err != nil {
if strings.Contains(err.Error(), "does not exist") {
return nil, nil
}
return nil, errors.Wrapf(err, "unable to describe stack %s", name)
}
if len(res.Stacks) == 0 {
return nil, nil
}
return res.Stacks[0], nil
}
func guessChangesetType(ctx context.Context, cloudformationClient *cloudformation.CloudFormation, in *cloudformation.CreateChangeSetInput) *cloudformation.CreateChangeSetInput {
if in == nil || in.ChangeSetType == nil {
return in
}
if *in.ChangeSetType != "GUESS" {
return in
}
_, err := cloudformationClient.DescribeStacksWithContext(ctx, &cloudformation.DescribeStacksInput{
StackName: in.StackName,
})
if err != nil {
// stack does not exist (probably)
in.ChangeSetType = aws.String("CREATE")
} else {
in.ChangeSetType = aws.String("UPDATE")
}
return in
}
func isAlreadyExistsException(err error) bool {
return isAWSError(err, "AlreadyExistsException")
}
func isAWSError(err error, code string) bool {
if err == nil {
return false
}
r := errors.Cause(err)
if ae, ok := r.(awserr.Error); ok {
return ae.Code() == code
}
return strings.Contains(r.Error(), code)
}
func (a *AWSClients) createChangeset(ctx context.Context, cf *cloudformation.CloudFormation, in *cloudformation.CreateChangeSetInput, hasAlreadyDeletedChangeSet bool) (*cloudformation.CreateChangeSetOutput, error) {
res, err := cf.CreateChangeSetWithContext(ctx, in)
if err == nil {
return res, nil
}
if !hasAlreadyDeletedChangeSet && isAlreadyExistsException(err) {
_, err := cf.DeleteChangeSetWithContext(ctx, &cloudformation.DeleteChangeSetInput{
ChangeSetName: in.ChangeSetName,
StackName: in.StackName,
})
if err != nil {
return nil, errors.Wrap(err, "deleting changeset failed")
}
return a.createChangeset(ctx, cf, in, true)
}
return nil, errors.Wrap(err, "unable to create changeset")
}
func stringsReplaceAllRepeated(s string, old string, new string) string {
prev := len(s)
for len(s) > 0 {
s = strings.Replace(s, old, new, -1)
if prev == len(s) {
return s
}
}
return s
}
func sanitizeBucketName(s string) string {
// from https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-s3-bucket-naming-requirements.html
s = strings.ToLower(s)
s = strings.Map(func(r rune) rune {
switch {
case r >= 'a' && r <= 'z':
return r
case r >= '0' && r <= '9':
return r
case r == '.' || r == '-':
return r
}
return '-'
}, s)
if len(s) < 3 {
s = "aaa"
}
if s[0] == '-' || s[0] == '.' {
s = "a" + s
}
s = strings.TrimSuffix(s, "-")
s = stringsReplaceAllRepeated(s, "..", ".")
s = stringsReplaceAllRepeated(s, ".-", "-")
s = stringsReplaceAllRepeated(s, "-.", "-")
return s
}
func (a *AWSClients) FixTemplateBody(ctx context.Context, in *cloudformation.CreateChangeSetInput, bucket string, logger *logger.Logger) error {
if in.TemplateBody == nil {
return nil
}
tb := *in.TemplateBody
// Actual number is 51200 but we give ourselves some buffer
if len(tb) < 51100 {
return nil
}
logger.Log(1, "template body too large (%d): setting in s3", len(tb))
if bucket == "" {
bucket = sanitizeBucketName(fmt.Sprintf("cfmanage_%s", *in.StackName))
logger.Log(1, "Making bucket %s because no bucket set", bucket)
clients3 := s3.New(a.session)
out, err := clients3.CreateBucket(&s3.CreateBucketInput{
Bucket: &bucket,
})
if err != nil {
if !isAWSError(err, "BucketAlreadyOwnedByYou") {
return errors.Wrapf(err, "unable to create bucket %s correctly", bucket)
}
logger.Log(1, "bucket already owend by you")
} else {
logger.Log(1, "Bucket created with URL %s", *out.Location)
}
}
uploader := s3manager.NewUploader(a.session)
itemKey := fmt.Sprintf("cfmanage_%s_%s", *in.StackName, time.Now().UTC())
out, err := uploader.UploadWithContext(ctx, &s3manager.UploadInput{
Bucket: &bucket,
Key: &itemKey,
Body: strings.NewReader(tb),
})
if err != nil {
return errors.Wrapf(err, "unable to upload body to bucket %s", bucket)
}
logger.Log(1, "template body uploaded to %s", out.Location)
in.TemplateBody = nil
in.TemplateURL = &out.Location
a.cleanup.Add(func(ctx context.Context) error {
logger.Log(2, "Cleaning up %s/%s", bucket, itemKey)
clients3 := s3.New(a.session)
_, err := clients3.DeleteObject(&s3.DeleteObjectInput{
Bucket: &bucket,
Key: &itemKey,
})
return errors.Wrapf(err, "Unable to delete bucket=%s key=%s", bucket, itemKey)
})
return nil
}
func (a *AWSClients) CreateChangesetWaitForStatus(ctx context.Context, in *cloudformation.CreateChangeSetInput, existingStack *cloudformation.Stack, logger *logger.Logger) (*cloudformation.DescribeChangeSetOutput, error) {
if in.ChangeSetName == nil {
in.ChangeSetName = aws.String("A" + strconv.FormatInt(time.Now().UnixNano(), 16))
}
in.ClientToken = aws.String(a.token())
cf := cloudformation.New(a.session)
in = guessChangesetType(ctx, cf, in)
res, err := a.createChangeset(ctx, cf, in, false)
if err != nil {
return nil, errors.Wrap(err, "creating changeset failed")
}
a.cleanup.Add(func(ctx context.Context) error {
_, err := cf.DeleteChangeSetWithContext(ctx, &cloudformation.DeleteChangeSetInput{
ChangeSetName: res.Id,
})
return err
})
if existingStack == nil {
// Clean up the stack created by the changeset
a.cleanup.Add(func(ctx context.Context) error {
finishingStack, err := a.DescribeStack(ctx, *in.StackName)
if err != nil {
return errors.Wrapf(err, "unable to describe stack %s", *in.StackName)
}
if *finishingStack.StackStatus == "REVIEW_IN_PROGRESS" {
_, err := cf.DeleteStack(&cloudformation.DeleteStackInput{
ClientRequestToken: aws.String(a.token()),
StackName: in.StackName,
})
return errors.Wrapf(err, "unable to delete stack %s", *in.StackName)
}
return nil
})
}
return a.waitForChangesetToFinishCreating(ctx, cf, *res.Id, logger, nil)
}
func (a *AWSClients) ExecuteChangeset(ctx context.Context, changesetARN string) error {
cf := cloudformation.New(a.session)
_, err := cf.ExecuteChangeSetWithContext(ctx, &cloudformation.ExecuteChangeSetInput{
ChangeSetName: &changesetARN,
ClientRequestToken: aws.String(a.token()),
})
return errors.Wrapf(err, "unable to execute changeset %s", changesetARN)
}
func (a *AWSClients) CancelStackUpdate(ctx context.Context, stackName string) | {
return *a.session.Config.Region
} | identifier_body |
awscache.go | AWSClients struct {
session *session.Session
cleanup *cleanup.Cleanup
pollInterval time.Duration
accountID oncecache.StringCache
myToken string
mu sync.Mutex
}
func (a *AWSClients) token() string {
a.mu.Lock()
defer a.mu.Unlock()
if a.myToken == "" {
a.myToken = strconv.FormatInt(time.Now().UnixNano(), 16)
}
return a.myToken
}
func (a *AWSClients) Region() string { | }
func (a *AWSClients) AccountID() (string, error) {
return a.accountID.Do(func() (string, error) {
stsClient := sts.New(a.session)
out, err := stsClient.GetCallerIdentity(&sts.GetCallerIdentityInput{})
if err != nil {
return "", errors.Wrap(err, "unable to fetch identity ID")
}
return *out.Account, nil
})
}
func (a *AWSClients) DescribeStack(ctx context.Context, name string) (*cloudformation.Stack, error) {
cf := cloudformation.New(a.session)
res, err := cf.DescribeStacksWithContext(ctx, &cloudformation.DescribeStacksInput{
StackName: &name,
})
if err != nil {
if strings.Contains(err.Error(), "does not exist") {
return nil, nil
}
return nil, errors.Wrapf(err, "unable to describe stack %s", name)
}
if len(res.Stacks) == 0 {
return nil, nil
}
return res.Stacks[0], nil
}
func guessChangesetType(ctx context.Context, cloudformationClient *cloudformation.CloudFormation, in *cloudformation.CreateChangeSetInput) *cloudformation.CreateChangeSetInput {
if in == nil || in.ChangeSetType == nil {
return in
}
if *in.ChangeSetType != "GUESS" {
return in
}
_, err := cloudformationClient.DescribeStacksWithContext(ctx, &cloudformation.DescribeStacksInput{
StackName: in.StackName,
})
if err != nil {
// stack does not exist (probably)
in.ChangeSetType = aws.String("CREATE")
} else {
in.ChangeSetType = aws.String("UPDATE")
}
return in
}
func isAlreadyExistsException(err error) bool {
return isAWSError(err, "AlreadyExistsException")
}
func isAWSError(err error, code string) bool {
if err == nil {
return false
}
r := errors.Cause(err)
if ae, ok := r.(awserr.Error); ok {
return ae.Code() == code
}
return strings.Contains(r.Error(), code)
}
func (a *AWSClients) createChangeset(ctx context.Context, cf *cloudformation.CloudFormation, in *cloudformation.CreateChangeSetInput, hasAlreadyDeletedChangeSet bool) (*cloudformation.CreateChangeSetOutput, error) {
res, err := cf.CreateChangeSetWithContext(ctx, in)
if err == nil {
return res, nil
}
if !hasAlreadyDeletedChangeSet && isAlreadyExistsException(err) {
_, err := cf.DeleteChangeSetWithContext(ctx, &cloudformation.DeleteChangeSetInput{
ChangeSetName: in.ChangeSetName,
StackName: in.StackName,
})
if err != nil {
return nil, errors.Wrap(err, "deleting changeset failed")
}
return a.createChangeset(ctx, cf, in, true)
}
return nil, errors.Wrap(err, "unable to create changeset")
}
func stringsReplaceAllRepeated(s string, old string, new string) string {
prev := len(s)
for len(s) > 0 {
s = strings.Replace(s, old, new, -1)
if prev == len(s) {
return s
}
}
return s
}
func sanitizeBucketName(s string) string {
// from https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-s3-bucket-naming-requirements.html
s = strings.ToLower(s)
s = strings.Map(func(r rune) rune {
switch {
case r >= 'a' && r <= 'z':
return r
case r >= '0' && r <= '9':
return r
case r == '.' || r == '-':
return r
}
return '-'
}, s)
if len(s) < 3 {
s = "aaa"
}
if s[0] == '-' || s[0] == '.' {
s = "a" + s
}
s = strings.TrimSuffix(s, "-")
s = stringsReplaceAllRepeated(s, "..", ".")
s = stringsReplaceAllRepeated(s, ".-", "-")
s = stringsReplaceAllRepeated(s, "-.", "-")
return s
}
func (a *AWSClients) FixTemplateBody(ctx context.Context, in *cloudformation.CreateChangeSetInput, bucket string, logger *logger.Logger) error {
if in.TemplateBody == nil {
return nil
}
tb := *in.TemplateBody
// Actual number is 51200 but we give ourselves some buffer
if len(tb) < 51100 {
return nil
}
logger.Log(1, "template body too large (%d): setting in s3", len(tb))
if bucket == "" {
bucket = sanitizeBucketName(fmt.Sprintf("cfmanage_%s", *in.StackName))
logger.Log(1, "Making bucket %s because no bucket set", bucket)
clients3 := s3.New(a.session)
out, err := clients3.CreateBucket(&s3.CreateBucketInput{
Bucket: &bucket,
})
if err != nil {
if !isAWSError(err, "BucketAlreadyOwnedByYou") {
return errors.Wrapf(err, "unable to create bucket %s correctly", bucket)
}
logger.Log(1, "bucket already owend by you")
} else {
logger.Log(1, "Bucket created with URL %s", *out.Location)
}
}
uploader := s3manager.NewUploader(a.session)
itemKey := fmt.Sprintf("cfmanage_%s_%s", *in.StackName, time.Now().UTC())
out, err := uploader.UploadWithContext(ctx, &s3manager.UploadInput{
Bucket: &bucket,
Key: &itemKey,
Body: strings.NewReader(tb),
})
if err != nil {
return errors.Wrapf(err, "unable to upload body to bucket %s", bucket)
}
logger.Log(1, "template body uploaded to %s", out.Location)
in.TemplateBody = nil
in.TemplateURL = &out.Location
a.cleanup.Add(func(ctx context.Context) error {
logger.Log(2, "Cleaning up %s/%s", bucket, itemKey)
clients3 := s3.New(a.session)
_, err := clients3.DeleteObject(&s3.DeleteObjectInput{
Bucket: &bucket,
Key: &itemKey,
})
return errors.Wrapf(err, "Unable to delete bucket=%s key=%s", bucket, itemKey)
})
return nil
}
func (a *AWSClients) CreateChangesetWaitForStatus(ctx context.Context, in *cloudformation.CreateChangeSetInput, existingStack *cloudformation.Stack, logger *logger.Logger) (*cloudformation.DescribeChangeSetOutput, error) {
if in.ChangeSetName == nil {
in.ChangeSetName = aws.String("A" + strconv.FormatInt(time.Now().UnixNano(), 16))
}
in.ClientToken = aws.String(a.token())
cf := cloudformation.New(a.session)
in = guessChangesetType(ctx, cf, in)
res, err := a.createChangeset(ctx, cf, in, false)
if err != nil {
return nil, errors.Wrap(err, "creating changeset failed")
}
a.cleanup.Add(func(ctx context.Context) error {
_, err := cf.DeleteChangeSetWithContext(ctx, &cloudformation.DeleteChangeSetInput{
ChangeSetName: res.Id,
})
return err
})
if existingStack == nil {
// Clean up the stack created by the changeset
a.cleanup.Add(func(ctx context.Context) error {
finishingStack, err := a.DescribeStack(ctx, *in.StackName)
if err != nil {
return errors.Wrapf(err, "unable to describe stack %s", *in.StackName)
}
if *finishingStack.StackStatus == "REVIEW_IN_PROGRESS" {
_, err := cf.DeleteStack(&cloudformation.DeleteStackInput{
ClientRequestToken: aws.String(a.token()),
StackName: in.StackName,
})
return errors.Wrapf(err, "unable to delete stack %s", *in.StackName)
}
return nil
})
}
return a.waitForChangesetToFinishCreating(ctx, cf, *res.Id, logger, nil)
}
func (a *AWSClients) ExecuteChangeset(ctx context.Context, changesetARN string) error {
cf := cloudformation.New(a.session)
_, err := cf.ExecuteChangeSetWithContext(ctx, &cloudformation.ExecuteChangeSetInput{
ChangeSetName: &changesetARN,
ClientRequestToken: aws.String(a.token()),
})
return errors.Wrapf(err, "unable to execute changeset %s", changesetARN)
}
func (a *AWSClients) CancelStackUpdate(ctx context.Context, stackName string) error {
cf | return *a.session.Config.Region | random_line_split |
movie-data-analysis.py | genre and get mean for each genre and each variable, divide by 1 mio for clarity and better visibility
md_genre_mean = md_split_genres.groupby(['genres']).mean()
md_genre_mean ['profit_million'] = md_genre_mean['profit']/1000000
del md_genre_mean['profit']
md_genre_mean['revenue_million'] = md_genre_mean['revenue']/1000000
del md_genre_mean['revenue']
md_genre_mean['budget_million'] =md_genre_mean['budget']/1000000
del md_genre_mean['budget']
# Get distribution of mean of variables grouped by genre
md_genre_mean.hist(color='DarkBlue',figsize= (10,10));
All means of variables per genre are skewed. Mean of runtime across genres is closest to being normally distributed.
# Overall Descriptive statistics
md.describe()
# Get movies with highest budget, profit, popularity
md.nlargest(3, 'budget')
md.nlargest(3, 'profit')
md.nlargest(3, 'popularity')
The Warrior's Way had the highest budget with 425 mio USD. Avatar made the most profit with 2,544 mio USD. The most popular movie was Jurassic World.
# Get movies made per year, create new data frame
md_year = pd.DataFrame(md_split_genres.groupby('release_year').original_title.nunique())
md_year.head()
# Get max of movies made per year
md_year.nlargest(5,'original_title')
# Plot data, line chart for showing development over the years
md_year.plot.line(title = 'Movies made per year',color='DarkBlue',figsize=(10, 8));
In this graph we see that over time more and more movies were made per year. Starting at just 32 movies in 1960 up to 627 per year in 2015 with a max of 699 movies in 2014.
# Get mean of variables grouped by year (new data frame) in order to see what changed
md_year_mean = md_split_genres.groupby('release_year').mean()
# Check results
md_year_mean.head()
# plot the development of revenue, profit and budget of movies over the years
md_year_mean[['revenue','profit','budget']].plot(title = 'TBD',color=('DarkBlue','c','crimson'),linestyle=('-'),figsize=(10, 8));
In the chart above we can observe that revenue and profit developed almost in parallel until the early 1980s. In the 1980s budget is increasing more sharply. Probably as a consequence the gap between revenue and profit is starting to emerge. Producing movies got more expensive while simultaneously more and more movies were made and more and more people started to watch movies. Thus, during the 1990s revenues keept increasing while profit was dropping. At the end of the 1990s budget starts decreasing, probably due to technological progress, and therefore profits start to increase again. Since still more and more movies are being made...
md_year_mean[['vote_average', 'vote_count']].plot(title = 'TBD',color=('DarkBlue','c'),figsize=(10, 8),secondary_y=['vote_average']);
In this graph we see that vote average is decreasing over the years while the vote count is rising constantly. So more people vote but in general movies are getting worse?! Or people seem to like movies less...
# Lets turn to genres, reminder of what the split looked like
md_split_genres.head()
# How many different genres do we have?
md_split_genres['genres'].unique()
len(md_split_genres['genres'].unique())
Overall, we have movies from 20 unique genres.
### Research Questions
<a id='q1'></a>
#### Q1. Which genres are the most common (number of movies made)?
# Group movies by genre using title as unique identifier and display all genres.
md_genre = (pd.DataFrame(md_split_genres.groupby('genres').original_title.nunique())).sort_values('original_title', ascending=True)
md_genre.head(20) |
# Display in bar chart
md_genre['original_title'].plot.barh(title = 'Movies per Genre',color='DarkBlue', figsize=(10, 9));
The most common genres are Drama (4672 movies, 17.6%) , Comedy (3750 movies, 14.2%) and Thriller (2841 movies, 10.7%).
<a id='q2'></a>
#### Q2. Which genres have high avg. budget and revenue?
# Check results
md_genre_mean.head()
# Sort data in acending order
md_genre_mean.sort_values('budget_million', ascending=True, inplace = True )
# Create bar chart with revenue and budget
md_genre_mean[['revenue_million', 'budget_million']].plot.barh(stacked=False, title = 'Budget and Revenue by Genre (US$ million)',color=('DarkBlue','c'), figsize=(15, 10));
In the graph above we clearly see that the genre Adventure has both the highest avg. budget and revenue. Fantasy comes second in budget and revenue. Interestingly, Animation has the third highest revenue but only the sixth highest budget. Meaning Animation movies are on avg. more profitable. Lets look at profitability of the genres.
<a id='q3'></a>
#### Q3. Which genres have high avg. profit?
md_genre_mean.sort_values('profit_million', ascending=True, inplace = True )
md_genre_mean['profit_million'].plot.barh(stacked=False, title = 'Profit by Genre (US$ million)',color='DarkBlue', figsize=(10, 9));
The top 5 genres in terms of avg. profit are Adventure, Fantasy, Animation, Family and Science Fiction.
<a id='q4'></a>
#### Q4. Which genres have high vote avg.?
md_genre_mean.sort_values('vote_average', ascending=True, inplace = True)
md_genre_mean[['vote_average']].plot.barh(stacked=True, title = 'Voting Avg by Genre',color='DarkBlue', figsize=(10, 9));
Documentaries, Music and History have the hightest voting avg. Then comes Animation.
<a id='q5'></a>
#### Q5. Which genres have high avg. popularity?
md_genre_mean.sort_values('popularity', ascending=True, inplace = True)
md_genre_mean[['popularity']].plot.barh(stacked=True, title = 'Genres by Avg Popularity',color='DarkBlue', figsize=(10, 9));
The most popular genres are Adventure, Science Fiction, Fantasy, Action and again Animation.
<aid='q6'></a>
#### Q6. Which genres have high avg. vote count?
md_genre_mean.sort_values('vote_count', ascending=True, inplace = True)
md_genre_mean[['vote_count']].plot.barh(stacked=True, title = 'Genres by Avg Vote Count',color='DarkBlue',figsize=(10, 9));
However, Documentary, Music and History have a relatively low number of votes. Compared to Adventure, Science Fiction and Fantasy. Then comes Action and again Animation.
<a id='q7'></a>
#### Q7. Which genres have high number of movies with an voting avg. >=8?
md_8 = md_split_genres[md_split_genres['vote_average']>=8]
md_8 = (pd.DataFrame(md_split_genres.groupby('genres').original_title.nunique())).sort_values('original_title', ascending=True )
md_8[['original_title']].plot.barh(stacked=True, title = 'Genres with >= 8 ratings', figsize=(10, 9),color='DarkBlue');
The genre drama has the most movies with a rating of at least 8.
<a id='analysis'></a>
#### Analysis of development of means of variables per genre over the years
# Reminder of how the data frame looked like, when we splitted for genres
md_split_genres.head()
# Create data frame grouped by genres AND release year, get means of variables of interest
md_year_genre_mean = pd.DataFrame(md_split_genres.groupby(['release_year','genres'])['revenue', 'budget','profit','vote_average','vote_count','popularity'].mean())
md_year_genre_mean.head()
#### Profit per genre per year
# Create data frame for average profit per genre per year
md_year_genre_profit = pd.DataFrame(md_split_genres.groupby(['release_year','genres'])['profit'].mean())
md_year_genre_profit.head()
# pivot data to get the shape that is necessary for a heatmap that displays genres, years and avg. profit per genre per year
md_heat_profit_pivot = pd.pivot_table(md_year_genre_profit, values='profit', index=['genres'], columns=['release_year'])
md_heat_profit_pivot.head()
# display heatmap
sns.set(rc={'figure.figsize':(15,10)})
sns.heatmap(md_heat_profit_pivot, linewidths=.5, cmap='YlGnBu');
This heatmap displays the average profit per genre per year from 1960 to 2015. The darker blue fields show higher profit, the brighter green into yellow fields show lower profit. In general, profits are increasing over time for especially for the genres Action, Adventure Animation, Family, Fantasy and Science Fiction.
Animation movies had a very profitable year in 1961. History in 1991 and Western in 199 |
md_genre['original_title'].plot.pie(title= 'Movies per Genre in %', figsize=(10,10), autopct='%1.1f%%',fontsize=15); | random_line_split |
lib.rs | \n"` and `"world"`. And
//! the rope `"Hello\nworld\n"` has three lines: `"Hello\n"`,
//! `"world\n"`, and `""`.
//!
//! Ropey can be configured at build time via feature flags to recognize
//! different line breaks. Ropey always recognizes:
//!
//! - `U+000A` — LF (Line Feed)
//! - `U+000D` `U+000A` — CRLF (Carriage Return + Line Feed)
//!
//! With the `cr_lines` feature, the following are also recognized:
//!
//! - `U+000D` — CR (Carriage Return)
//!
//! With the `unicode_lines` feature, in addition to all of the
//! above, the following are also recognized (bringing Ropey into
//! conformance with
//! [Unicode Annex #14](https://www.unicode.org/reports/tr14/#BK)):
//!
//! - `U+000B` — VT (Vertical Tab)
//! - `U+000C` — FF (Form Feed)
//! - `U+0085` — NEL (Next Line)
//! - `U+2028` — Line Separator
//! - `U+2029` — Paragraph Separator
//!
//! (Note: `unicode_lines` is enabled by default, and always implies
//! `cr_lines`.)
//!
//! CRLF pairs are always treated as a single line break, and are never split
//! across chunks. Note, however, that slicing can still split them.
//!
//!
//! # A Note About SIMD Acceleration
//!
//! Ropey has a `simd` feature flag (enabled by default) that enables
//! explicit SIMD on supported platforms to improve performance.
//!
//! There is a bit of a footgun here: if you disable default features to
//! configure line break behavior (as per the section above) then SIMD
//! will also get disabled, and performance will suffer. So be careful
//! to explicitly re-enable the `simd` feature flag (if desired) when
//! doing that.
#![allow(clippy::collapsible_if)]
#![allow(clippy::inline_always)]
#![allow(clippy::needless_return)]
#![allow(clippy::redundant_field_names)]
#![allow(clippy::type_complexity)]
extern crate smallvec;
extern crate str_indices;
mod crlf;
mod rope;
mod rope_builder;
mod slice;
mod tree;
pub mod iter;
pub mod str_utils;
use std::ops::Bound;
pub use crate::rope::Rope;
pub use crate::rope_builder::RopeBuilder;
pub use crate::slice::RopeSlice;
//==============================================================
// Error reporting types.
/// Ropey's result type.
pub type Result<T> = std::result::Result<T, Error>;
/// Ropey's error type.
#[derive(Clone, Copy)]
#[non_exhaustive]
pub enum Error {
/// Indicates that the passed byte index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in bytes, in that order.
ByteIndexOutOfBounds(usize, usize),
/// Indicates that the passed char index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in chars, in that order.
CharIndexOutOfBounds(usize, usize),
/// Indicates that the passed line index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in lines, in that order.
LineIndexOutOfBounds(usize, usize),
/// Indicates that the passed utf16 code-unit index was out of
/// bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in utf16 code units, in that order.
Utf16IndexOutOfBounds(usize, usize),
/// Indicates that the passed byte index was not a char boundary.
///
/// Contains the passed byte index.
ByteIndexNotCharBoundary(usize),
/// Indicates that the passed byte range didn't line up with char
/// boundaries.
///
/// Contains the [start, end) byte indices of the range, in that order.
/// When either the start or end are `None`, that indicates a half-open
/// range.
ByteRangeNotCharBoundary(
Option<usize>, // Start.
Option<usize>, // End.
),
/// Indicates that a reversed byte-index range (end < start) was
/// encountered.
///
/// Contains the [start, end) byte indices of the range, in that order.
ByteRangeInvalid(
usize, // Start.
usize, // End.
),
/// Indicates that a reversed char-index range (end < start) was
/// encountered.
///
/// Contains the [start, end) char indices of the range, in that order.
CharRangeInvalid(
usize, // Start.
usize, // End.
),
/// Indicates that the passed byte-index range was partially or fully
/// out of bounds.
///
/// Contains the [start, end) byte indices of the range and the actual
/// length of the `Rope`/`RopeSlice` in bytes, in that order. When
/// either the start or end are `None`, that indicates a half-open range.
ByteRangeOutOfBounds(
Option<usize>, // Start.
Option<usize>, // End.
usize, // Rope byte length.
),
/// Indicates that the passed char-index range was partially or fully
/// out of bounds.
///
/// Contains the [start, end) char indices of the range and the actual
/// length of the `Rope`/`RopeSlice` in chars, in that order. When
/// either the start or end are `None`, that indicates a half-open range.
CharRangeOutOfBounds(
Option<usize>, // Start.
Option<usize>, // End.
usize, // Rope char length.
),
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
// Deprecated in std.
fn description(&self) -> &str {
""
}
// Deprecated in std.
fn cause(&self) -> Option<&dyn std::error::Error> {
None
}
}
impl std::fmt::Debug for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
Error::ByteIndexOutOfBounds(index, len) => {
write!(
f,
"Byte index out of bounds: byte index {}, Rope/RopeSlice byte length {}",
index, len
)
}
Error::CharIndexOutOfBounds(index, len) => {
write!(
f,
"Char index out of bounds: char index {}, Rope/RopeSlice char length {}",
index, len
)
}
Error::LineIndexOutOfBounds(index, len) => {
write!(
f,
"Line index out of bounds: line index {}, Rope/RopeSlice line count {}",
index, len
)
}
Error::Utf16IndexOutOfBounds(index, len) => {
write!(f, "Utf16 code-unit index out of bounds: utf16 index {}, Rope/RopeSlice utf16 length {}", index, len)
}
Error::ByteIndexNotCharBoundary(index) => {
write!(
f,
"Byte index is not a valid char boundary: byte index {}",
index
)
}
Error::ByteRangeNotCharBoundary(start_idx_opt, end_idx_opt) => {
write!(f, "Byte range does not align with char boundaries: range ")?;
write_range(f, start_idx_opt, end_idx_opt)
}
Error::ByteRangeInvalid(start_idx, end_idx) => {
write!(
f,
"Invalid byte range {}..{}: start must be <= end",
start_idx, end_idx
)
}
Error::CharRangeInvalid(start_idx, end_idx) => {
write!(
f,
"Invalid char range {}..{}: start must be <= end",
start_idx, end_idx
)
}
Error::ByteRangeOutOfBounds(start_idx_opt, end_idx_opt, len) => {
write!(f, "Byte range out of bounds: byte range ")?;
write_range(f, start_idx_opt, end_idx_opt)?;
write!(f, ", Rope/RopeSlice byte length {}", len)
}
Error::CharRangeOutOfBounds(start_idx_opt, end_idx_opt, len) => {
write!(f, "Char range out of bounds: char range ")?;
write_range(f, start_idx_opt, end_idx_opt)?;
write!(f, ", Rope/RopeSlice char length {}", len)
}
}
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// Just re-use the debug impl.
std::fmt::Debug::fmt(self, f)
}
}
fn | write_range | identifier_name |
|
lib.rs | _idx));
//!
//! // Write the file back out to disk.
//! text.write_to(
//! BufWriter::new(File::create("my_great_book.txt")?)
//! )?;
//! # Ok(())
//! # }
//! # do_stuff().unwrap();
//! ```
//!
//! More examples can be found in the `examples` directory of the git
//! repository. Many of those examples demonstrate doing non-trivial things
//! with Ropey such as grapheme handling, search-and-replace, and streaming
//! loading of non-utf8 text files.
//!
//!
//! # Low-level APIs
//!
//! Ropey also provides access to some of its low-level APIs, enabling client
//! code to efficiently work with a `Rope`'s data and implement new
//! functionality. The most important of those API's are:
//!
//! - The [`chunk_at_*()`](Rope::chunk_at_byte)
//! chunk-fetching methods of `Rope` and `RopeSlice`.
//! - The [`Chunks`](iter::Chunks) iterator.
//! - The functions in [`str_utils`] for operating on
//! `&str` slices.
//!
//! Internally, each `Rope` stores text as a segemented collection of utf8
//! strings. The chunk-fetching methods and `Chunks` iterator provide direct
//! access to those strings (or "chunks") as `&str` slices, allowing client
//! code to work directly with the underlying utf8 data.
//!
//! The chunk-fetching methods and `str_utils` functions are the basic
//! building blocks that Ropey itself uses to build much of its functionality.
//! For example, the [`Rope::byte_to_char()`]
//! method can be reimplemented as a free function like this:
//!
//! ```no_run
//! use ropey::{
//! Rope,
//! str_utils::byte_to_char_idx
//! };
//!
//! fn byte_to_char(rope: &Rope, byte_idx: usize) -> usize {
//! let (chunk, b, c, _) = rope.chunk_at_byte(byte_idx);
//! c + byte_to_char_idx(chunk, byte_idx - b)
//! }
//! ```
//!
//! And this will be just as efficient as Ropey's implementation.
//!
//! The chunk-fetching methods in particular are among the fastest functions
//! that Ropey provides, generally operating in the sub-hundred nanosecond
//! range for medium-sized (~200kB) documents on recent-ish computer systems.
//!
//!
//! # A Note About Line Breaks
//!
//! Some of Ropey's APIs use the concept of line breaks or lines of text.
//!
//! Ropey considers the start of the rope and positions immediately
//! _after_ line breaks to be the start of new lines. And it treats
//! line breaks as being a part of the lines they mark the end of.
//!
//! For example, the rope `"Hello"` has a single line: `"Hello"`. The
//! rope `"Hello\nworld"` has two lines: `"Hello\n"` and `"world"`. And
//! the rope `"Hello\nworld\n"` has three lines: `"Hello\n"`,
//! `"world\n"`, and `""`.
//!
//! Ropey can be configured at build time via feature flags to recognize
//! different line breaks. Ropey always recognizes:
//!
//! - `U+000A` — LF (Line Feed)
//! - `U+000D` `U+000A` — CRLF (Carriage Return + Line Feed)
//!
//! With the `cr_lines` feature, the following are also recognized:
//!
//! - `U+000D` — CR (Carriage Return)
//!
//! With the `unicode_lines` feature, in addition to all of the
//! above, the following are also recognized (bringing Ropey into
//! conformance with
//! [Unicode Annex #14](https://www.unicode.org/reports/tr14/#BK)):
//!
//! - `U+000B` — VT (Vertical Tab)
//! - `U+000C` — FF (Form Feed)
//! - `U+0085` — NEL (Next Line)
//! - `U+2028` — Line Separator
//! - `U+2029` — Paragraph Separator
//!
//! (Note: `unicode_lines` is enabled by default, and always implies
//! `cr_lines`.)
//!
//! CRLF pairs are always treated as a single line break, and are never split
//! across chunks. Note, however, that slicing can still split them.
//!
//!
//! # A Note About SIMD Acceleration
//!
//! Ropey has a `simd` feature flag (enabled by default) that enables
//! explicit SIMD on supported platforms to improve performance.
//!
//! There is a bit of a footgun here: if you disable default features to
//! configure line break behavior (as per the section above) then SIMD
//! will also get disabled, and performance will suffer. So be careful
//! to explicitly re-enable the `simd` feature flag (if desired) when
//! doing that.
#![allow(clippy::collapsible_if)]
#![allow(clippy::inline_always)]
#![allow(clippy::needless_return)]
#![allow(clippy::redundant_field_names)]
#![allow(clippy::type_complexity)]
extern crate smallvec;
extern crate str_indices;
mod crlf;
mod rope;
mod rope_builder;
mod slice;
mod tree;
pub mod iter;
pub mod str_utils;
use std::ops::Bound;
pub use crate::rope::Rope;
pub use crate::rope_builder::RopeBuilder;
pub use crate::slice::RopeSlice;
//==============================================================
// Error reporting types.
/// Ropey's result type.
pub type Result<T> = std::result::Result<T, Error>;
/// Ropey's error type.
#[derive(Clone, Copy)]
#[non_exhaustive]
pub enum Error {
/// Indicates that the passed byte index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in bytes, in that order.
ByteIndexOutOfBounds(usize, usize),
/// Indicates that the passed char index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in chars, in that order.
CharIndexOutOfBounds(usize, usize),
/// Indicates that the passed line index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in lines, in that order.
LineIndexOutOfBounds(usize, usize),
/// Indicates that the passed utf16 code-unit index was out of
/// bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in utf16 code units, in that order.
Utf16IndexOutOfBounds(usize, usize),
/// Indicates that the passed byte index was not a char boundary.
///
/// Contains the passed byte index.
ByteIndexNotCharBoundary(usize),
/// Indicates that the passed byte range didn't line up with char
/// boundaries.
///
/// Contains the [start, end) byte indices of the range, in that order.
/// When either the start or end are `None`, that indicates a half-open
/// range.
ByteRangeNotCharBoundary(
Option<usize>, // Start.
Option<usize>, // End.
),
/// Indicates that a reversed byte-index range (end < start) was
/// encountered.
///
/// Contains the [start, end) byte indices of the range, in that order.
ByteRangeInvalid(
usize, // Start.
usize, // End.
),
/// Indicates that a reversed char-index range (end < start) was
/// encountered.
///
/// Contains the [start, end) char indices of the range, in that order.
CharRangeInvalid(
usize, // Start.
usize, // End.
),
/// Indicates that the passed byte-index range was partially or fully
/// out of bounds.
///
/// Contains the [start, end) byte indices of the range and the actual
/// length of the `Rope`/`RopeSlice` in bytes, in that order. When
/// either the start or end are `None`, that indicates a half-open range.
ByteRangeOutOfBounds(
Option<usize>, // Start.
Option<usize>, // End.
usize, // Rope byte length.
),
/// Indicates that the passed char-index range was partially or fully
/// out of bounds.
///
/// Contains the [start, end) char indices of the range and the actual
/// length of the `Rope`/`RopeSlice` in chars, in that order. When
/// either the start or end are `None`, that indicates a half-open range.
CharRangeOutOfBounds(
Option<usize>, // Start.
Option<usize>, // End.
usize, // Rope char length.
),
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
// Deprecated in std.
fn description(&self) -> &str | {
""
} | identifier_body |
|
lib.rs | //! text.remove(start_idx..end_idx);
//!
//! // ...and replace it with something better.
//! text.insert(start_idx, "The flowers are... so... dunno.\n");
//!
//! // Print the changes, along with the previous few lines for context.
//! let start_idx = text.line_to_char(511);
//! let end_idx = text.line_to_char(516);
//! println!("{}", text.slice(start_idx..end_idx));
//!
//! // Write the file back out to disk.
//! text.write_to(
//! BufWriter::new(File::create("my_great_book.txt")?)
//! )?;
//! # Ok(())
//! # }
//! # do_stuff().unwrap();
//! ```
//!
//! More examples can be found in the `examples` directory of the git
//! repository. Many of those examples demonstrate doing non-trivial things
//! with Ropey such as grapheme handling, search-and-replace, and streaming
//! loading of non-utf8 text files.
//!
//!
//! # Low-level APIs
//!
//! Ropey also provides access to some of its low-level APIs, enabling client
//! code to efficiently work with a `Rope`'s data and implement new
//! functionality. The most important of those API's are:
//!
//! - The [`chunk_at_*()`](Rope::chunk_at_byte)
//! chunk-fetching methods of `Rope` and `RopeSlice`.
//! - The [`Chunks`](iter::Chunks) iterator.
//! - The functions in [`str_utils`] for operating on
//! `&str` slices.
//!
//! Internally, each `Rope` stores text as a segemented collection of utf8
//! strings. The chunk-fetching methods and `Chunks` iterator provide direct
//! access to those strings (or "chunks") as `&str` slices, allowing client
//! code to work directly with the underlying utf8 data.
//!
//! The chunk-fetching methods and `str_utils` functions are the basic
//! building blocks that Ropey itself uses to build much of its functionality.
//! For example, the [`Rope::byte_to_char()`]
//! method can be reimplemented as a free function like this:
//!
//! ```no_run
//! use ropey::{
//! Rope,
//! str_utils::byte_to_char_idx
//! };
//!
//! fn byte_to_char(rope: &Rope, byte_idx: usize) -> usize {
//! let (chunk, b, c, _) = rope.chunk_at_byte(byte_idx);
//! c + byte_to_char_idx(chunk, byte_idx - b)
//! }
//! ```
//!
//! And this will be just as efficient as Ropey's implementation.
//!
//! The chunk-fetching methods in particular are among the fastest functions
//! that Ropey provides, generally operating in the sub-hundred nanosecond
//! range for medium-sized (~200kB) documents on recent-ish computer systems.
//!
//!
//! # A Note About Line Breaks
//!
//! Some of Ropey's APIs use the concept of line breaks or lines of text.
//!
//! Ropey considers the start of the rope and positions immediately
//! _after_ line breaks to be the start of new lines. And it treats
//! line breaks as being a part of the lines they mark the end of.
//!
//! For example, the rope `"Hello"` has a single line: `"Hello"`. The
//! rope `"Hello\nworld"` has two lines: `"Hello\n"` and `"world"`. And
//! the rope `"Hello\nworld\n"` has three lines: `"Hello\n"`,
//! `"world\n"`, and `""`.
//!
//! Ropey can be configured at build time via feature flags to recognize
//! different line breaks. Ropey always recognizes:
//!
//! - `U+000A` — LF (Line Feed)
//! - `U+000D` `U+000A` — CRLF (Carriage Return + Line Feed)
//!
//! With the `cr_lines` feature, the following are also recognized:
//!
//! - `U+000D` — CR (Carriage Return)
//!
//! With the `unicode_lines` feature, in addition to all of the
//! above, the following are also recognized (bringing Ropey into
//! conformance with
//! [Unicode Annex #14](https://www.unicode.org/reports/tr14/#BK)):
//!
//! - `U+000B` — VT (Vertical Tab)
//! - `U+000C` — FF (Form Feed)
//! - `U+0085` — NEL (Next Line)
//! - `U+2028` — Line Separator
//! - `U+2029` — Paragraph Separator
//!
//! (Note: `unicode_lines` is enabled by default, and always implies
//! `cr_lines`.)
//!
//! CRLF pairs are always treated as a single line break, and are never split
//! across chunks. Note, however, that slicing can still split them.
//!
//!
//! # A Note About SIMD Acceleration
//!
//! Ropey has a `simd` feature flag (enabled by default) that enables
//! explicit SIMD on supported platforms to improve performance.
//!
//! There is a bit of a footgun here: if you disable default features to
//! configure line break behavior (as per the section above) then SIMD
//! will also get disabled, and performance will suffer. So be careful
//! to explicitly re-enable the `simd` feature flag (if desired) when
//! doing that.
#![allow(clippy::collapsible_if)]
#![allow(clippy::inline_always)]
#![allow(clippy::needless_return)]
#![allow(clippy::redundant_field_names)]
#![allow(clippy::type_complexity)]
extern crate smallvec;
extern crate str_indices;
mod crlf;
mod rope;
mod rope_builder;
mod slice;
mod tree;
pub mod iter;
pub mod str_utils;
use std::ops::Bound;
pub use crate::rope::Rope;
pub use crate::rope_builder::RopeBuilder;
pub use crate::slice::RopeSlice;
//==============================================================
// Error reporting types.
/// Ropey's result type.
pub type Result<T> = std::result::Result<T, Error>;
/// Ropey's error type.
#[derive(Clone, Copy)]
#[non_exhaustive]
pub enum Error {
/// Indicates that the passed byte index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in bytes, in that order.
ByteIndexOutOfBounds(usize, usize), | /// Indicates that the passed char index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in chars, in that order.
CharIndexOutOfBounds(usize, usize),
/// Indicates that the passed line index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in lines, in that order.
LineIndexOutOfBounds(usize, usize),
/// Indicates that the passed utf16 code-unit index was out of
/// bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in utf16 code units, in that order.
Utf16IndexOutOfBounds(usize, usize),
/// Indicates that the passed byte index was not a char boundary.
///
/// Contains the passed byte index.
ByteIndexNotCharBoundary(usize),
/// Indicates that the passed byte range didn't line up with char
/// boundaries.
///
/// Contains the [start, end) byte indices of the range, in that order.
/// When either the start or end are `None`, that indicates a half-open
/// range.
ByteRangeNotCharBoundary(
Option<usize>, // Start.
Option<usize>, // End.
),
/// Indicates that a reversed byte-index range (end < start) was
/// encountered.
///
/// Contains the [start, end) byte indices of the range, in that order.
ByteRangeInvalid(
usize, // Start.
usize, // End.
),
/// Indicates that a reversed char-index range (end < start) was
/// encountered.
///
/// Contains the [start, end) char indices of the range, in that order.
CharRangeInvalid(
usize, // Start.
usize, // End.
),
/// Indicates that the passed byte-index range was partially or fully
/// out of bounds.
///
/// Contains the [start, end) byte indices of the range and the actual
/// length of the `Rope`/`RopeSlice` in bytes, in that order. When
/// either the start or end are `None`, that indicates a half-open range.
ByteRangeOutOfBounds(
Option<usize>, // Start.
Option<usize>, // End.
usize, // Rope byte length.
),
/// Indicates that the passed char-index range was partially or fully
/// out of bounds.
///
/// Contains the [start, end) char indices of the range and the actual
/// length of the `Rope`/`RopeSlice` in chars, in that order. When
/// either the start or end are `None`, that indicates a | random_line_split |
|
lib.rs | the following are also recognized:
//!
//! - `U+000D` — CR (Carriage Return)
//!
//! With the `unicode_lines` feature, in addition to all of the
//! above, the following are also recognized (bringing Ropey into
//! conformance with
//! [Unicode Annex #14](https://www.unicode.org/reports/tr14/#BK)):
//!
//! - `U+000B` — VT (Vertical Tab)
//! - `U+000C` — FF (Form Feed)
//! - `U+0085` — NEL (Next Line)
//! - `U+2028` — Line Separator
//! - `U+2029` — Paragraph Separator
//!
//! (Note: `unicode_lines` is enabled by default, and always implies
//! `cr_lines`.)
//!
//! CRLF pairs are always treated as a single line break, and are never split
//! across chunks. Note, however, that slicing can still split them.
//!
//!
//! # A Note About SIMD Acceleration
//!
//! Ropey has a `simd` feature flag (enabled by default) that enables
//! explicit SIMD on supported platforms to improve performance.
//!
//! There is a bit of a footgun here: if you disable default features to
//! configure line break behavior (as per the section above) then SIMD
//! will also get disabled, and performance will suffer. So be careful
//! to explicitly re-enable the `simd` feature flag (if desired) when
//! doing that.
#![allow(clippy::collapsible_if)]
#![allow(clippy::inline_always)]
#![allow(clippy::needless_return)]
#![allow(clippy::redundant_field_names)]
#![allow(clippy::type_complexity)]
extern crate smallvec;
extern crate str_indices;
mod crlf;
mod rope;
mod rope_builder;
mod slice;
mod tree;
pub mod iter;
pub mod str_utils;
use std::ops::Bound;
pub use crate::rope::Rope;
pub use crate::rope_builder::RopeBuilder;
pub use crate::slice::RopeSlice;
//==============================================================
// Error reporting types.
/// Ropey's result type.
pub type Result<T> = std::result::Result<T, Error>;
/// Ropey's error type.
#[derive(Clone, Copy)]
#[non_exhaustive]
pub enum Error {
/// Indicates that the passed byte index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in bytes, in that order.
ByteIndexOutOfBounds(usize, usize),
/// Indicates that the passed char index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in chars, in that order.
CharIndexOutOfBounds(usize, usize),
/// Indicates that the passed line index was out of bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in lines, in that order.
LineIndexOutOfBounds(usize, usize),
/// Indicates that the passed utf16 code-unit index was out of
/// bounds.
///
/// Contains the index attempted and the actual length of the
/// `Rope`/`RopeSlice` in utf16 code units, in that order.
Utf16IndexOutOfBounds(usize, usize),
/// Indicates that the passed byte index was not a char boundary.
///
/// Contains the passed byte index.
ByteIndexNotCharBoundary(usize),
/// Indicates that the passed byte range didn't line up with char
/// boundaries.
///
/// Contains the [start, end) byte indices of the range, in that order.
/// When either the start or end are `None`, that indicates a half-open
/// range.
ByteRangeNotCharBoundary(
Option<usize>, // Start.
Option<usize>, // End.
),
/// Indicates that a reversed byte-index range (end < start) was
/// encountered.
///
/// Contains the [start, end) byte indices of the range, in that order.
ByteRangeInvalid(
usize, // Start.
usize, // End.
),
/// Indicates that a reversed char-index range (end < start) was
/// encountered.
///
/// Contains the [start, end) char indices of the range, in that order.
CharRangeInvalid(
usize, // Start.
usize, // End.
),
/// Indicates that the passed byte-index range was partially or fully
/// out of bounds.
///
/// Contains the [start, end) byte indices of the range and the actual
/// length of the `Rope`/`RopeSlice` in bytes, in that order. When
/// either the start or end are `None`, that indicates a half-open range.
ByteRangeOutOfBounds(
Option<usize>, // Start.
Option<usize>, // End.
usize, // Rope byte length.
),
/// Indicates that the passed char-index range was partially or fully
/// out of bounds.
///
/// Contains the [start, end) char indices of the range and the actual
/// length of the `Rope`/`RopeSlice` in chars, in that order. When
/// either the start or end are `None`, that indicates a half-open range.
CharRangeOutOfBounds(
Option<usize>, // Start.
Option<usize>, // End.
usize, // Rope char length.
),
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
// Deprecated in std.
fn description(&self) -> &str {
""
}
// Deprecated in std.
fn cause(&self) -> Option<&dyn std::error::Error> {
None
}
}
impl std::fmt::Debug for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
Error::ByteIndexOutOfBounds(index, len) => {
write!(
f,
"Byte index out of bounds: byte index {}, Rope/RopeSlice byte length {}",
index, len
)
}
Error::CharIndexOutOfBounds(index, len) => {
write!(
f,
"Char index out of bounds: char index {}, Rope/RopeSlice char length {}",
index, len
)
}
Error::LineIndexOutOfBounds(index, len) => {
write!(
f,
"Line index out of bounds: line index {}, Rope/RopeSlice line count {}",
index, len
)
}
Error::Utf16IndexOutOfBounds(index, len) => {
write!(f, "Utf16 code-unit index out of bounds: utf16 index {}, Rope/RopeSlice utf16 length {}", index, len)
}
Error::ByteIndexNotCharBoundary(index) => {
write!(
f,
"Byte index is not a valid char boundary: byte index {}",
index
)
}
Error::ByteRangeNotCharBoundary(start_idx_opt, end_idx_opt) => {
write!(f, "Byte range does not align with char boundaries: range ")?;
write_range(f, start_idx_opt, end_idx_opt)
}
Error::ByteRangeInvalid(start_idx, end_idx) => {
write!(
f,
"Invalid byte range {}..{}: start must be <= end",
start_idx, end_idx
)
}
Error::CharRangeInvalid(start_idx, end_idx) => {
write!(
f,
"Invalid char range {}..{}: start must be <= end",
start_idx, end_idx
)
}
Error::ByteRangeOutOfBounds(start_idx_opt, end_idx_opt, len) => {
write!(f, "Byte range out of bounds: byte range ")?;
write_range(f, start_idx_opt, end_idx_opt)?;
write!(f, ", Rope/RopeSlice byte length {}", len)
}
Error::CharRangeOutOfBounds(start_idx_opt, end_idx_opt, len) => {
write!(f, "Char range out of bounds: char range ")?;
write_range(f, start_idx_opt, end_idx_opt)?;
write!(f, ", Rope/RopeSlice char length {}", len)
}
}
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// Just re-use the debug impl.
std::fmt::Debug::fmt(self, f)
}
}
fn write_range(
f: &mut std::fmt::Formatter<'_>,
start_idx: Option<usize>,
end_idx: Option<usize>,
) -> std::fmt::Result {
match (start_idx, end_idx) {
(None, None) => {
write!(f, "..")
}
(Some(start), None) => {
write!(f, "{}..", start)
}
(None, Some(end)) => {
write!(f, "..{}", end)
}
(Some(start), Some(end)) => | {
write!(f, "{}..{}", start, end)
} | conditional_block |
|
my_functions.py | a_1 = 0.5*(-(-p_prime[0, 0]-p_prime[1, 0]) + np.sqrt((-p_prime[0, 0]-p_prime[1, 0])**2 - 4*(p_prime[0, 0]*p_prime[1, 0] - p_prime[2, 0]**2/4) + 0j))
a_2 = 0.5*(-(-p_prime[0, 0]-p_prime[1, 0]) - np.sqrt((-p_prime[0, 0]-p_prime[1, 0])**2 - 4*(p_prime[0, 0]*p_prime[1, 0] - p_prime[2, 0]**2/4) + 0j))
a_1 = np.round(a_1, decimals=5)
a_2 = np.round(a_2, decimals=5)
if a_1 > 0 and (p_prime[0, 0] - a_1)/(p_prime[0, 0]+p_prime[1, 0] - 2*a_1) >= 0:
p[0] = np.real(a_1)
elif a_2 > 0 and (p_prime[0, 0] - a_2)/(p_prime[0, 0]+p_prime[1, 0] - 2*a_2) >= 0:
p[0] = np.real(a_2)
else:
p[0] = np.inf
p[1] = -p[0] + (p_prime[0, 0] + p_prime[1, 0])
if p[0] == p[1]:
p[2] = 0
else:
p[2] = 0.5*(np.arcsin(p_prime[2, 0]/(p[1] - p[0])))
p_prime_2 = np.linalg.lstsq(X[:, 0:3], term, rcond=-1)
p_prime_2 = p_prime_2[0]
# p_prime_2 = np.dot(np.linalg.pinv(X[:, 0:3]), term)
R_x = 1/p[0]
R_y = 1/p[1]
Q_x = R_x**2*(p_prime_2[0] - 0.5*p_prime_2[2]*np.tan(p[2])) - 1
Q_y = R_y**2*(p_prime_2[1] - 0.5*p_prime_2[2]*np.tan(p[2])) - 1
phi = p[2]
return R_x, R_y, phi, Q_x, Q_y
def | (init, *data):
"""biconical model; inital guess: init=[a',b',d',u',v',w'], data to fit to: data= [x_i,y_i,z_i]"""
data = data[0]
c = (init[3]*data[0, :]**2 + init[4]*data[1, :]**2 + init[5]*data[0, :]*data[1, :])/(init[0]*data[0, :]**2 + init[1]*data[1, :]**2 + init[2]*data[0, :]*data[1, :])
return np.sum(( init[0]*data[0, :]**2 + init[1]*data[1, :]**2 + init[2]*data[0, :]*data[1, :] + c*(data[2, :])**2 - 2*(data[2, :]) )**2)
def f2_biconic_model(init, *data):
data = data[0]
x = data[:, 0]
y = data[:, 1]
z = data[:, 2]
return np.sum((-z + init[4] + (x**2/init[0] + y**2/init[1])/(1 + np.sqrt(1 - (1+init[2])*x**2/init[0]**2 - (1+init[3])*y**2/init[1]**2)))**2)
def nm_biconic_fit(data):
x = np.reshape(data[:, 0], [len(data[:, 0]), 1])
y = np.reshape(data[:, 1], [len(data[:, 0]), 1])
z = np.reshape(data[:, 2], [len(data[:, 0]), 1])
init = np.array([1/7.6, 1/7.6, 0, 0, 0, 0])
res = optimize.minimize(f_biconic_model, init, np.array([x, y, z]), method='Nelder-Mead', options={'xtol': 1e-10})
p_prime = res.x
a_1 = 0.5 * (-(-p_prime[0] - p_prime[1]) + np.sqrt((-p_prime[0] - p_prime[1])**2 - 4*(p_prime[0]*p_prime[1] - p_prime[2]**2/4) + 0j))
a_2 = 0.5 * (-(-p_prime[0] - p_prime[1]) - np.sqrt((-p_prime[0] - p_prime[1])**2 - 4*(p_prime[0]*p_prime[1] - p_prime[2]**2/4) + 0j))
a_1 = np.round(a_1, decimals=5)
a_2 = np.round(a_2, decimals=5)
p = np.zeros([5,1])
if a_1 > 0 and (p_prime[0] - a_1) / (p_prime[0] + p_prime[1] - 2 * a_1) >= 0:
p[0] = np.real(a_1)
elif a_2 > 0 and (p_prime[0] - a_2) / (p_prime[0] + p_prime[1] - 2 * a_2) >= 0:
p[0] = np.real(a_2)
else:
p[0] = np.inf
p[1] = -p[0] + (p_prime[0] + p_prime[1])
if p[0] == p[1]:
p[2] = 0
else:
p[2] = 0.5 * (np.arcsin(p_prime[2] / (p[1] - p[0])))
R_x = 1 / p[0]
R_y = 1 / p[1]
Q_x = R_x**2*(p_prime[3] - 0.5*p_prime[5] * np.tan(p[2])) - 1
Q_y = R_y**2*(p_prime[4] - 0.5*p_prime[5] * np.tan(p[2])) - 1
phi = p[2]
return R_x, R_y, phi, Q_x, Q_y
def f_sphere(init, *data):
data = np.array(data[0:3])[:, :, 0]
x = data[0, :]
y = data[1, :]
z = data[2, :]
return (-init[0]**2 + x**2 + y**2 + (z-init[1])**2)**2
def sphere_fit(data):
x = np.reshape(data[:, 0], [len(data[:, 0]), 1])
y = np.reshape(data[:, 1], [len(data[:, 0]), 1])
z = np.reshape(data[:, 2], [len(data[:, 0]), 1])
init = np.array([7.6, 0])
res = optimize.least_squares(f_sphere, init, args=np.array([x, y, z]))
return res.x
def f_circ(init, *data):
data = np.array(data[0:2])[:, :, 0]
x = data[0, :]
y = data[1, :]
return (-init[0]**2 + x**2 + (y-init[1])**2)**2
def circ_fit(data):
x = np.reshape(data[:, 0], [len(data[:, 0]), 1])
y = np.reshape(data[:, 1], [len(data[:, 0]), 1])
init = np.array([7.6, 0])
res = optimize.least_squares(f_circ, init, args=np.array([x, y]))
return res.x
def keratometry(self, mode='biconic'):
# Coordinates of surface
x = self[:, 0]
y = self[:, 1]
z = self[:, 2]
# Least squares
# Create X matrix based on measurements
x2 = x ** 2
y2 = y ** 2
xy = x * y
z2 = z ** 2
z2_diag = np.diag(z2)
X = np.c_[x2, y2, xy, z2_diag]
# Create target vector
t = 2
z_target = t * z
# Solve | f_biconic_model | identifier_name |
my_functions.py | a_1 = 0.5*(-(-p_prime[0, 0]-p_prime[1, 0]) + np.sqrt((-p_prime[0, 0]-p_prime[1, 0])**2 - 4*(p_prime[0, 0]*p_prime[1, 0] - p_prime[2, 0]**2/4) + 0j))
a_2 = 0.5*(-(-p_prime[0, 0]-p_prime[1, 0]) - np.sqrt((-p_prime[0, 0]-p_prime[1, 0])**2 - 4*(p_prime[0, 0]*p_prime[1, 0] - p_prime[2, 0]**2/4) + 0j))
a_1 = np.round(a_1, decimals=5)
a_2 = np.round(a_2, decimals=5)
if a_1 > 0 and (p_prime[0, 0] - a_1)/(p_prime[0, 0]+p_prime[1, 0] - 2*a_1) >= 0:
p[0] = np.real(a_1)
elif a_2 > 0 and (p_prime[0, 0] - a_2)/(p_prime[0, 0]+p_prime[1, 0] - 2*a_2) >= 0:
p[0] = np.real(a_2)
else:
p[0] = np.inf
p[1] = -p[0] + (p_prime[0, 0] + p_prime[1, 0])
if p[0] == p[1]:
p[2] = 0
else:
p[2] = 0.5*(np.arcsin(p_prime[2, 0]/(p[1] - p[0])))
p_prime_2 = np.linalg.lstsq(X[:, 0:3], term, rcond=-1)
p_prime_2 = p_prime_2[0]
# p_prime_2 = np.dot(np.linalg.pinv(X[:, 0:3]), term)
R_x = 1/p[0]
R_y = 1/p[1]
Q_x = R_x**2*(p_prime_2[0] - 0.5*p_prime_2[2]*np.tan(p[2])) - 1
Q_y = R_y**2*(p_prime_2[1] - 0.5*p_prime_2[2]*np.tan(p[2])) - 1
phi = p[2]
return R_x, R_y, phi, Q_x, Q_y
def f_biconic_model(init, *data):
"""biconical model; inital guess: init=[a',b',d',u',v',w'], data to fit to: data= [x_i,y_i,z_i]"""
data = data[0]
c = (init[3]*data[0, :]**2 + init[4]*data[1, :]**2 + init[5]*data[0, :]*data[1, :])/(init[0]*data[0, :]**2 + init[1]*data[1, :]**2 + init[2]*data[0, :]*data[1, :])
return np.sum(( init[0]*data[0, :]**2 + init[1]*data[1, :]**2 + init[2]*data[0, :]*data[1, :] + c*(data[2, :])**2 - 2*(data[2, :]) )**2)
def f2_biconic_model(init, *data):
data = data[0]
x = data[:, 0]
y = data[:, 1]
z = data[:, 2]
return np.sum((-z + init[4] + (x**2/init[0] + y**2/init[1])/(1 + np.sqrt(1 - (1+init[2])*x**2/init[0]**2 - (1+init[3])*y**2/init[1]**2)))**2)
def nm_biconic_fit(data):
x = np.reshape(data[:, 0], [len(data[:, 0]), 1])
y = np.reshape(data[:, 1], [len(data[:, 0]), 1])
z = np.reshape(data[:, 2], [len(data[:, 0]), 1])
init = np.array([1/7.6, 1/7.6, 0, 0, 0, 0])
res = optimize.minimize(f_biconic_model, init, np.array([x, y, z]), method='Nelder-Mead', options={'xtol': 1e-10})
p_prime = res.x
a_1 = 0.5 * (-(-p_prime[0] - p_prime[1]) + np.sqrt((-p_prime[0] - p_prime[1])**2 - 4*(p_prime[0]*p_prime[1] - p_prime[2]**2/4) + 0j))
a_2 = 0.5 * (-(-p_prime[0] - p_prime[1]) - np.sqrt((-p_prime[0] - p_prime[1])**2 - 4*(p_prime[0]*p_prime[1] - p_prime[2]**2/4) + 0j))
a_1 = np.round(a_1, decimals=5)
a_2 = np.round(a_2, decimals=5)
p = np.zeros([5,1])
if a_1 > 0 and (p_prime[0] - a_1) / (p_prime[0] + p_prime[1] - 2 * a_1) >= 0:
p[0] = np.real(a_1)
elif a_2 > 0 and (p_prime[0] - a_2) / (p_prime[0] + p_prime[1] - 2 * a_2) >= 0:
p[0] = np.real(a_2)
else:
p[0] = np.inf
p[1] = -p[0] + (p_prime[0] + p_prime[1])
if p[0] == p[1]:
p[2] = 0
else:
p[2] = 0.5 * (np.arcsin(p_prime[2] / (p[1] - p[0])))
R_x = 1 / p[0]
R_y = 1 / p[1]
Q_x = R_x**2*(p_prime[3] - 0.5*p_prime[5] * np.tan(p[2])) - 1
Q_y = R_y**2*(p_prime[4] - 0.5*p_prime[5] * np.tan(p[2])) - 1
phi = p[2]
return R_x, R_y, phi, Q_x, Q_y
def f_sphere(init, *data):
data = np.array(data[0:3])[:, :, 0]
x = data[0, :]
y = data[1, :]
z = data[2, :]
return (-init[0]**2 + x**2 + y**2 + (z-init[1])**2)**2
def sphere_fit(data):
x = np.reshape(data[:, 0], [len(data[:, 0]), 1])
y = np.reshape(data[:, 1], [len(data[:, 0]), 1])
z = np.reshape(data[:, 2], [len(data[:, 0]), 1])
init = np.array([7.6, 0])
res = optimize.least_squares(f_sphere, init, args=np.array([x, y, z]))
return res.x
def f_circ(init, *data):
|
def circ_fit(data):
x = np.reshape(data[:, 0], [len(data[:, 0]), 1])
y = np.reshape(data[:, 1], [len(data[:, 0]), 1])
init = np.array([7.6, 0])
res = optimize.least_squares(f_circ, init, args=np.array([x, y]))
return res.x
def keratometry(self, mode='biconic'):
# Coordinates of surface
x = self[:, 0]
y = self[:, 1]
z = self[:, 2]
# Least squares
# Create X matrix based on measurements
x2 = x ** 2
y2 = y ** 2
xy = x * y
z2 = z ** 2
z2_diag = np.diag(z2)
X = np.c_[x2, y2, xy, z2_diag]
# Create target vector
t = 2
z_target = t * z
# Solve least | data = np.array(data[0:2])[:, :, 0]
x = data[0, :]
y = data[1, :]
return (-init[0]**2 + x**2 + (y-init[1])**2)**2 | identifier_body |
my_functions.py | a_1 = 0.5*(-(-p_prime[0, 0]-p_prime[1, 0]) + np.sqrt((-p_prime[0, 0]-p_prime[1, 0])**2 - 4*(p_prime[0, 0]*p_prime[1, 0] - p_prime[2, 0]**2/4) + 0j))
a_2 = 0.5*(-(-p_prime[0, 0]-p_prime[1, 0]) - np.sqrt((-p_prime[0, 0]-p_prime[1, 0])**2 - 4*(p_prime[0, 0]*p_prime[1, 0] - p_prime[2, 0]**2/4) + 0j))
a_1 = np.round(a_1, decimals=5)
a_2 = np.round(a_2, decimals=5)
if a_1 > 0 and (p_prime[0, 0] - a_1)/(p_prime[0, 0]+p_prime[1, 0] - 2*a_1) >= 0:
p[0] = np.real(a_1)
elif a_2 > 0 and (p_prime[0, 0] - a_2)/(p_prime[0, 0]+p_prime[1, 0] - 2*a_2) >= 0:
p[0] = np.real(a_2)
else:
p[0] = np.inf
p[1] = -p[0] + (p_prime[0, 0] + p_prime[1, 0])
if p[0] == p[1]:
p[2] = 0
else:
p[2] = 0.5*(np.arcsin(p_prime[2, 0]/(p[1] - p[0])))
p_prime_2 = np.linalg.lstsq(X[:, 0:3], term, rcond=-1)
p_prime_2 = p_prime_2[0]
# p_prime_2 = np.dot(np.linalg.pinv(X[:, 0:3]), term)
R_x = 1/p[0]
R_y = 1/p[1]
Q_x = R_x**2*(p_prime_2[0] - 0.5*p_prime_2[2]*np.tan(p[2])) - 1
Q_y = R_y**2*(p_prime_2[1] - 0.5*p_prime_2[2]*np.tan(p[2])) - 1
phi = p[2]
return R_x, R_y, phi, Q_x, Q_y
def f_biconic_model(init, *data):
"""biconical model; inital guess: init=[a',b',d',u',v',w'], data to fit to: data= [x_i,y_i,z_i]"""
data = data[0]
c = (init[3]*data[0, :]**2 + init[4]*data[1, :]**2 + init[5]*data[0, :]*data[1, :])/(init[0]*data[0, :]**2 + init[1]*data[1, :]**2 + init[2]*data[0, :]*data[1, :])
return np.sum(( init[0]*data[0, :]**2 + init[1]*data[1, :]**2 + init[2]*data[0, :]*data[1, :] + c*(data[2, :])**2 - 2*(data[2, :]) )**2)
def f2_biconic_model(init, *data):
data = data[0]
x = data[:, 0]
y = data[:, 1]
z = data[:, 2]
return np.sum((-z + init[4] + (x**2/init[0] + y**2/init[1])/(1 + np.sqrt(1 - (1+init[2])*x**2/init[0]**2 - (1+init[3])*y**2/init[1]**2)))**2)
def nm_biconic_fit(data):
x = np.reshape(data[:, 0], [len(data[:, 0]), 1])
y = np.reshape(data[:, 1], [len(data[:, 0]), 1])
z = np.reshape(data[:, 2], [len(data[:, 0]), 1])
init = np.array([1/7.6, 1/7.6, 0, 0, 0, 0])
res = optimize.minimize(f_biconic_model, init, np.array([x, y, z]), method='Nelder-Mead', options={'xtol': 1e-10})
p_prime = res.x
a_1 = 0.5 * (-(-p_prime[0] - p_prime[1]) + np.sqrt((-p_prime[0] - p_prime[1])**2 - 4*(p_prime[0]*p_prime[1] - p_prime[2]**2/4) + 0j))
a_2 = 0.5 * (-(-p_prime[0] - p_prime[1]) - np.sqrt((-p_prime[0] - p_prime[1])**2 - 4*(p_prime[0]*p_prime[1] - p_prime[2]**2/4) + 0j)) | a_2 = np.round(a_2, decimals=5)
p = np.zeros([5,1])
if a_1 > 0 and (p_prime[0] - a_1) / (p_prime[0] + p_prime[1] - 2 * a_1) >= 0:
p[0] = np.real(a_1)
elif a_2 > 0 and (p_prime[0] - a_2) / (p_prime[0] + p_prime[1] - 2 * a_2) >= 0:
p[0] = np.real(a_2)
else:
p[0] = np.inf
p[1] = -p[0] + (p_prime[0] + p_prime[1])
if p[0] == p[1]:
p[2] = 0
else:
p[2] = 0.5 * (np.arcsin(p_prime[2] / (p[1] - p[0])))
R_x = 1 / p[0]
R_y = 1 / p[1]
Q_x = R_x**2*(p_prime[3] - 0.5*p_prime[5] * np.tan(p[2])) - 1
Q_y = R_y**2*(p_prime[4] - 0.5*p_prime[5] * np.tan(p[2])) - 1
phi = p[2]
return R_x, R_y, phi, Q_x, Q_y
def f_sphere(init, *data):
data = np.array(data[0:3])[:, :, 0]
x = data[0, :]
y = data[1, :]
z = data[2, :]
return (-init[0]**2 + x**2 + y**2 + (z-init[1])**2)**2
def sphere_fit(data):
x = np.reshape(data[:, 0], [len(data[:, 0]), 1])
y = np.reshape(data[:, 1], [len(data[:, 0]), 1])
z = np.reshape(data[:, 2], [len(data[:, 0]), 1])
init = np.array([7.6, 0])
res = optimize.least_squares(f_sphere, init, args=np.array([x, y, z]))
return res.x
def f_circ(init, *data):
data = np.array(data[0:2])[:, :, 0]
x = data[0, :]
y = data[1, :]
return (-init[0]**2 + x**2 + (y-init[1])**2)**2
def circ_fit(data):
x = np.reshape(data[:, 0], [len(data[:, 0]), 1])
y = np.reshape(data[:, 1], [len(data[:, 0]), 1])
init = np.array([7.6, 0])
res = optimize.least_squares(f_circ, init, args=np.array([x, y]))
return res.x
def keratometry(self, mode='biconic'):
# Coordinates of surface
x = self[:, 0]
y = self[:, 1]
z = self[:, 2]
# Least squares
# Create X matrix based on measurements
x2 = x ** 2
y2 = y ** 2
xy = x * y
z2 = z ** 2
z2_diag = np.diag(z2)
X = np.c_[x2, y2, xy, z2_diag]
# Create target vector
t = 2
z_target = t * z
# Solve least | a_1 = np.round(a_1, decimals=5) | random_line_split |
my_functions.py |
f.write("\t</Parameters>\n")
f.write("</febio_spec>")
f.close()
def pre_stretch(ite_max, tol_error, path=''):
if not path == '':
os.chdir(path)
error = np.inf # [mm]
i = 0
# os.system('cp geometry_init.feb geometry_opt.feb')
X_aim = np.asarray(load_feb_file_nodes('geometry_init.feb', '<Nodes name=\"Cornea\">', path=path))
X_subopt = np.asarray(load_feb_file_nodes('geometry_opt.feb', '<Nodes name=\"Cornea\">', path=path))
X_opt = deepcopy(X_subopt)
#X_opt[:, 1:] = 0.875 * X_subopt[:, 1:]
write_febio_geometry_file('geometry_opt.feb', X_opt, path=path)
while (i < ite_max) and (error > tol_error):
os.system('/home/ubelix/artorg/shared/software/FEBio2.8.5/bin/febio2.lnx64 -i pre_stretch.feb')
X_subopt = np.asarray(load_feb_file_nodes('geometry_opt.feb', '<Nodes name=\"Cornea\">', path=path))
t, x = load_output_dat_file('disp_pre_stretch.dat', path=path)
x = np.asarray(x)
X_def = x[np.where(x[:, 0] == 1)[0][-1]:np.where(x[:, 0] == X_aim.shape[0])[0][-1] + 1, :]
X_error = X_aim[:, 1:] - X_def[:, 1:]
error = np.max(np.abs(X_error))
X_opt = deepcopy(X_def)
X_opt[:, 1:] = X_error + X_subopt[:, 1:]
write_febio_geometry_file('geometry_opt.feb', X_opt, path=path)
print(i, error)
i += 1
def write_febio_geometry_file(file_name, x, path=''):
if not path == '':
os.chdir(path)
i = 0
fh = open(file_name, 'r')
with open('temp.feb', 'w+') as temp:
for line in fh:
if not line.find('<node id=\"' + str(int(x[i, 0])) + '\">') == -1:
temp.write('\t\t\t<node id=\"' + str(int(x[i, 0])) + '\"> ' + str(x[i, 1]) + ', ' + str(x[i, 2]) + ', ' + str(x[i, 3]) + '</node>\n')
i += 1
i = int(np.min([i, x.shape[0]-1]))
else:
temp.write(line)
os.system('mv temp.feb ' + file_name)
def load_feb_file_nodes(filename, section, path=''):
if not path == '':
os.chdir(path)
nodes = []
with open(filename) as fh:
line = next(fh)
while line.find(section) == -1:
line = next(fh)
for line in fh:
if not line.find('</Nodes>') == -1:
break
id_1 = line.find("<node id=")
id_2 = line.find("> ")
id_3 = line.find("</node>")
nodes.append([int(line[id_1 + 10:id_2 - 1])] + [float(x) for x in line[id_2+3:id_3].split(',')])
return nodes
def load_feb_file_nodes_id(filename, section, path=''):
if not path == '':
os.chdir(path)
nodes_index = []
with open(filename) as fh:
line = next(fh)
while line.find(section) == -1:
line = next(fh)
for line in fh:
if not line.find('</NodeSet>') == -1:
break
id_1 = line.find("<node id=")
id_2 = line.find("/>")
nodes_index.append(int(line[id_1 + 10:id_2 - 1]))
return nodes_index
def load_output_dat_file(filename, path=''):
if not path == '':
os.chdir(path)
nodes_disp = []
t = []
with open(filename) as fh:
for line in fh:
if line.find('*Step') == 0:
line = next(fh)
id_1 = line.find('=')
t.append(float(line[id_1 + 1:-1]))
line = next(fh)
line = next(fh)
nodes_disp.append([float(x) for x in line.split(',')])
return t, nodes_disp
def biconic_fitting(data):
x = np.reshape(data[:, 0], [len(data[:, 0]), 1])
y = np.reshape(data[:, 1], [len(data[:, 0]), 1])
z = np.reshape(data[:, 2], [len(data[:, 0]), 1])
X = np.zeros([len(x), len(x)+3])
# create Matrix for least square minimization
for i in range(len(x)):
X[i, 0:3] = [x[i, 0]**2, y[i, 0]**2, x[i, 0]*y[i, 0]]
X[i, i+3] = z[i, 0]**2
p_prime = np.linalg.lstsq(X, 2*z, rcond=-1)
p_prime = p_prime[0]
# X_inv = np.linalg.pinv(X)
# p_prime = 2*np.dot(X_inv, z)
term = np.zeros([len(x), 1])
# create Matrix for least square minimization
for i in range(len(x)):
term[i, 0] = p_prime[i+3, 0]*(2*z[i, 0] - p_prime[i+3, 0]*z[i, 0]**2)
p = -np.ones([3, 1])
a_1 = 0.5*(-(-p_prime[0, 0]-p_prime[1, 0]) + np.sqrt((-p_prime[0, 0]-p_prime[1, 0])**2 - 4*(p_prime[0, 0]*p_prime[1, 0] - p_prime[2, 0]**2/4) + 0j))
a_2 = 0.5*(-(-p_prime[0, 0]-p_prime[1, 0]) - np.sqrt((-p_prime[0, 0]-p_prime[1, 0])**2 - 4*(p_prime[0, 0]*p_prime[1, 0] - p_prime[2, 0]**2/4) + 0j))
a_1 = np.round(a_1, decimals=5)
a_2 = np.round(a_2, decimals=5)
if a_1 > 0 and (p_prime[0, 0] - a_1)/(p_prime[0, 0]+p_prime[1, 0] - 2*a_1) >= 0:
p[0] = np.real(a_1)
elif a_2 > 0 and (p_prime[0, 0] - a_2)/(p_prime[0, 0]+p_prime[1, 0] - 2*a_2) >= 0:
p[0] = np.real(a_2)
else:
p[0] = np.inf
p[1] = -p[0] + (p_prime[0, 0] + p_prime[1, 0])
if p[0] == p[1]:
p[2] = 0
else:
p[2] = 0.5*(np.arcsin(p_prime[2, 0]/(p[1] - p[0])))
p_prime_2 = np.linalg.lstsq(X[:, 0:3], term, rcond=-1)
p_prime_2 = p_prime_2[0]
# p_prime_2 = np.dot(np.linalg.pinv(X[:, 0:3]), term)
R_x = 1/p[0]
R_y = 1/p[1]
Q_x = R_x**2*(p_prime_2[0] - 0.5*p_prime_2[2]*np.tan(p[2])) - 1
Q_y = R_y**2*(p_prime_2[1] - 0.5*p_prime_2[2]*np.tan(p[2])) - 1
phi = p[2]
return R_x, R_y, phi, Q_x, Q_y
def f_biconic_model(init, *data):
"""biconical model; inital guess: init=[a',b',d',u',v',w'], data to fit to: data= [x_i,y_i,z_i]"""
data = data[0]
c = (init[3]*data[0, :]**2 + init[4]*data[1, :]**2 + init[5]*data[0, :]*data[1, : | f.write("\t\t<param name=\"" + parm_name[i] + "\">" + str(param) + "</param>\n")
i += 1 | conditional_block |
|
click_differentiator_test_mode.py | 6]
time_1 = float(self.data_in[idx, 3])
time_2 = float(self.data_in[idx, 7])
audio1, sr = librosa.load(audio_dir_1, mono=False)
# find time of click's peak?
start_1 = 10925 + np.argmax(abs(audio1[1 , 10925 : 11035])) # why dim 1 and not 0?
audio2, sr = librosa.load(audio_dir_2, mono=False)
start_2 = 10925 + np.argmax(abs(audio2[1 , 10925 : 11035]))
audio = np.concatenate((audio1[:, start_2 : start_2 + 300], audio2[:, start_1 : start_1 +300]), axis=1)
if int(label_1) == int(label_2):
label = 1
else: | label = 0
## return audio, label, click_1_file_dir, click_1_time, click_2_file_dir, click_2_time
return (audio, label, audio_dir_1, time_1, audio_dir_2, time_2)
###### Model #################################
class SoundNet(nn.Module):
def __init__(self):
super(SoundNet, self).__init__()
self.conv1 = nn.Conv2d(1, 16, kernel_size=(64, 1), stride=(2, 1),
padding=(32, 0))
self.batchnorm1 = nn.BatchNorm2d(16, eps=1e-5, momentum=0.1)
self.relu1 = nn.ReLU(True)
self.maxpool1 = nn.MaxPool2d((8, 1), stride=(8, 1))
self.conv2 = nn.Conv2d(16, 32, kernel_size=(32, 1), stride=(2, 1),
padding=(16, 0))
self.batchnorm2 = nn.BatchNorm2d(32, eps=1e-5, momentum=0.1)
self.relu2 = nn.ReLU(True)
self.maxpool2 = nn.MaxPool2d((8, 1), stride=(8, 1))
self.conv3 = nn.Conv2d(32, 64, kernel_size=(16, 1), stride=(2, 1),
padding=(8, 0))
self.batchnorm3 = nn.BatchNorm2d(64, eps=1e-5, momentum=0.1)
self.relu3 = nn.ReLU(True)
self.conv4 = nn.Conv2d(64, 128, kernel_size=(8, 1), stride=(2, 1),
padding=(4, 0))
self.batchnorm4 = nn.BatchNorm2d(128, eps=1e-5, momentum=0.1)
self.relu4 = nn.ReLU(True)
self.conv5 = nn.Conv2d(128, 256, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm5 = nn.BatchNorm2d(256, eps=1e-5, momentum=0.1)
self.relu5 = nn.ReLU(True)
self.maxpool5 = nn.MaxPool2d((4, 1), stride=(4, 1))
self.conv6 = nn.Conv2d(256, 512, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm6 = nn.BatchNorm2d(512, eps=1e-5, momentum=0.1)
self.relu6 = nn.ReLU(True)
self.conv7 = nn.Conv2d(512, 1024, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm7 = nn.BatchNorm2d(1024, eps=1e-5, momentum=0.1)
self.relu7 = nn.ReLU(True)
self.conv8_objs = nn.Conv2d(1024, 1000, kernel_size=(8, 1),
stride=(2, 1))
self.conv8_scns = nn.Conv2d(1024, 401, kernel_size=(8, 1),
stride=(2, 1))
def forward(self, waveform):
x = self.conv1(waveform.unsqueeze(1).permute(0,1,3,2))
x = self.batchnorm1(x)
x = self.relu1(x)
x = self.maxpool1(x)
x = self.conv2(x)
x = self.batchnorm2(x)
x = self.relu2(x)
x = self.maxpool2(x)
x = self.conv3(x)
x = self.batchnorm3(x)
x = self.relu3(x)
x = self.conv4(x)
x = self.batchnorm4(x)
x = self.relu4(x)
x = x.reshape(x.shape[0],-1)
return x
class value_net(nn.Module):
def __init__(self, symmetric=True):
super(value_net, self).__init__()
self.linear = nn.Linear(512, 2)
def forward(self, input_audio):
output = self.linear(input_audio)
return output
############################### Main method: click separator in test mode ######################
def run_click_separator_test_mode(audio_rootname, sep_model_version, sep_model_load_dir, exp_name, det_model_version,
start, end):
'''
Run click separator model (in test mode) to get same/diff whale predictions for all pairs of clicks in specified window of audio file 'audio_rootname'
- sep_model_version: click separator version name, to be used in naming directory to save predictions
- sep_model_load_dir: directory from which to load trained click separator model version
- exp_name: experiment name, not important.
- det_model_version: click detector version used earlier in the pipeline
- start (int): start time of window (in sec)
- end (int): end time of window (in sec)
Effect: saves all-pairs predictions in batches (usually only 1 batch) in pickle files in the following directory:
'/data/vision/torralba/scratch/ioannis/clustering/custom_test_pick_preds/'
+ det_model_version + '/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
'''
############ Admin work (directories) ###################################################
if not os.path.exists('./ckpts'):
os.makedirs('./ckpts')
if not os.path.exists(os.path.join('./ckpts', exp_name)):
os.makedirs(os.path.join('./ckpts',exp_name))
###### Dataset Loading and Splitting##########
data_directory = '/data/vision/torralba/scratch/ioannis/clustering/click_separator_training/correct_data_same_click_diff_click_correct_times.p'
total_data = pickle.load(open(data_directory,"rb"))
data_ordered_dir = '/data/vision/torralba/scratch/ioannis/clustering/click_separator_training/file_ordered_correct_times.p'
file_ordered = pickle.load(open(data_directory,"rb"))
#######################################################################################################
# audio_rootname = 'sw061b001'
# start = 0
# end = 235
print('------Running click separator on detected clicks------\n')
print('Clicks: ', start, '-', end-1, '\n')
main_dir = '/data/vision/torralba/scratch/ioannis/clustering/'
# test_pick = main_dir + 'custom_test_pick_preds/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
test_pick = main_dir + 'custom_test_pick_preds/' + det_model_version + '/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
audio_recordings_test = pickle.load(open(test_pick,"rb"))
# preds_save_dir = main_dir + 'detections_click_sep_preds/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '/'
preds_save_dir = main_dir + 'detections_click_sep_preds/' + det_model_version + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '/'
if not os.path.exists(preds_save_dir):
os.makedirs(preds_save_dir)
############ End of admin work (directories) ###################################################
np.random.seed(0)
torch.manual_seed(0)
seq = SoundNet()
# seq = clickdetector()
seq.cuda()
# seq = nn.DataParallel(seq)
valnet = value_net()
valnet.cuda()
# valnet = nn.DataParallel(valnet)
# optimizer2 = optim.Adam(valnet.parameters(), lr=args.lr, weight_decay=args.weightdecay)
# optimizer = optim.Adam(seq.parameters(), lr=args.lr, weight | random_line_split |
|
click_differentiator_test_mode.py | , 1), stride=(2, 1),
padding=(8, 0))
self.batchnorm3 = nn.BatchNorm2d(64, eps=1e-5, momentum=0.1)
self.relu3 = nn.ReLU(True)
self.conv4 = nn.Conv2d(64, 128, kernel_size=(8, 1), stride=(2, 1),
padding=(4, 0))
self.batchnorm4 = nn.BatchNorm2d(128, eps=1e-5, momentum=0.1)
self.relu4 = nn.ReLU(True)
self.conv5 = nn.Conv2d(128, 256, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm5 = nn.BatchNorm2d(256, eps=1e-5, momentum=0.1)
self.relu5 = nn.ReLU(True)
self.maxpool5 = nn.MaxPool2d((4, 1), stride=(4, 1))
self.conv6 = nn.Conv2d(256, 512, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm6 = nn.BatchNorm2d(512, eps=1e-5, momentum=0.1)
self.relu6 = nn.ReLU(True)
self.conv7 = nn.Conv2d(512, 1024, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm7 = nn.BatchNorm2d(1024, eps=1e-5, momentum=0.1)
self.relu7 = nn.ReLU(True)
self.conv8_objs = nn.Conv2d(1024, 1000, kernel_size=(8, 1),
stride=(2, 1))
self.conv8_scns = nn.Conv2d(1024, 401, kernel_size=(8, 1),
stride=(2, 1))
def forward(self, waveform):
x = self.conv1(waveform.unsqueeze(1).permute(0,1,3,2))
x = self.batchnorm1(x)
x = self.relu1(x)
x = self.maxpool1(x)
x = self.conv2(x)
x = self.batchnorm2(x)
x = self.relu2(x)
x = self.maxpool2(x)
x = self.conv3(x)
x = self.batchnorm3(x)
x = self.relu3(x)
x = self.conv4(x)
x = self.batchnorm4(x)
x = self.relu4(x)
x = x.reshape(x.shape[0],-1)
return x
class value_net(nn.Module):
def __init__(self, symmetric=True):
super(value_net, self).__init__()
self.linear = nn.Linear(512, 2)
def forward(self, input_audio):
output = self.linear(input_audio)
return output
############################### Main method: click separator in test mode ######################
def run_click_separator_test_mode(audio_rootname, sep_model_version, sep_model_load_dir, exp_name, det_model_version,
start, end):
'''
Run click separator model (in test mode) to get same/diff whale predictions for all pairs of clicks in specified window of audio file 'audio_rootname'
- sep_model_version: click separator version name, to be used in naming directory to save predictions
- sep_model_load_dir: directory from which to load trained click separator model version
- exp_name: experiment name, not important.
- det_model_version: click detector version used earlier in the pipeline
- start (int): start time of window (in sec)
- end (int): end time of window (in sec)
Effect: saves all-pairs predictions in batches (usually only 1 batch) in pickle files in the following directory:
'/data/vision/torralba/scratch/ioannis/clustering/custom_test_pick_preds/'
+ det_model_version + '/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
'''
############ Admin work (directories) ###################################################
if not os.path.exists('./ckpts'):
os.makedirs('./ckpts')
if not os.path.exists(os.path.join('./ckpts', exp_name)):
os.makedirs(os.path.join('./ckpts',exp_name))
###### Dataset Loading and Splitting##########
data_directory = '/data/vision/torralba/scratch/ioannis/clustering/click_separator_training/correct_data_same_click_diff_click_correct_times.p'
total_data = pickle.load(open(data_directory,"rb"))
data_ordered_dir = '/data/vision/torralba/scratch/ioannis/clustering/click_separator_training/file_ordered_correct_times.p'
file_ordered = pickle.load(open(data_directory,"rb"))
#######################################################################################################
# audio_rootname = 'sw061b001'
# start = 0
# end = 235
print('------Running click separator on detected clicks------\n')
print('Clicks: ', start, '-', end-1, '\n')
main_dir = '/data/vision/torralba/scratch/ioannis/clustering/'
# test_pick = main_dir + 'custom_test_pick_preds/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
test_pick = main_dir + 'custom_test_pick_preds/' + det_model_version + '/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
audio_recordings_test = pickle.load(open(test_pick,"rb"))
# preds_save_dir = main_dir + 'detections_click_sep_preds/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '/'
preds_save_dir = main_dir + 'detections_click_sep_preds/' + det_model_version + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '/'
if not os.path.exists(preds_save_dir):
os.makedirs(preds_save_dir)
############ End of admin work (directories) ###################################################
np.random.seed(0)
torch.manual_seed(0)
seq = SoundNet()
# seq = clickdetector()
seq.cuda()
# seq = nn.DataParallel(seq)
valnet = value_net()
valnet.cuda()
# valnet = nn.DataParallel(valnet)
# optimizer2 = optim.Adam(valnet.parameters(), lr=args.lr, weight_decay=args.weightdecay)
# optimizer = optim.Adam(seq.parameters(), lr=args.lr, weight_decay=args.weightdecay)
# criterion = nn.CrossEntropyLoss()
test_dataset = sample_data(audio_recordings_test, file_ordered)
print('test dataset length: ', len(test_dataset))
test_dataloader = DataLoader(test_dataset, batch_size = len(test_dataset),
shuffle = False, num_workers = 20)
# predictions = []
checkpoint = torch.load(sep_model_load_dir) # NEED TO CHANGE
seq.load_state_dict(checkpoint['state_dict'])
valnet.load_state_dict(checkpoint['state_dict_valnet'])
seq.eval()
valnet.eval()
for i_batch, sample_batched in enumerate(test_dataloader): ### NEEDS CHANGEEEEEEEEE
print(i_batch)
# optimizer.zero_grad()
# optimizer2.zero_grad()
audio = sample_batched[0].type(torch.cuda.FloatTensor)
label = sample_batched[1].type(torch.cuda.FloatTensor)
click_1_file_dir, click_1_time, click_2_file_dir, click_2_time = sample_batched[2:] ## NEW
out = valnet(seq(audio))
## NEW ##
out = out.cpu().data.numpy()
labels_out = np.argmax(out,axis = 1)
label = label.cpu().data.numpy()
preds = np.array([list(click_1_file_dir), list(click_1_time),
list(click_2_file_dir), list(click_2_time),
labels_out, label], dtype=object)
preds = preds.T
print('predictions np array shape: ', preds.shape)
preds_dir = preds_save_dir
pickle.dump(preds, open(preds_dir + 'batch_' + str(i_batch) + '.p', "wb"))
cf_matrix_test = confusion_matrix(label, labels_out)
acc = 0
tp, fp, fn, tn = 0, 0, 0, 0
for i in range(labels_out.shape[0]):
if labels_out[i] == label[i]:
acc += 1
if labels_out[i] == 1 and label[i] == 1:
tp += 1
if labels_out[i] == 0 and label[i] == 0:
tn += 1
if labels_out[i] == 1 and label[i] == 0:
fp += 1
if labels_out[i] == 0 and label[i] == 1:
| fn += 1 | conditional_block |
|
click_differentiator_test_mode.py |
def __getitem__(self, idx):
## only for test mode
audio_dir_1, label_1 = self.data_in[idx, 0], self.data_in[idx, 2]
audio_dir_2, label_2 = self.data_in[idx, 4], self.data_in[idx, 6]
time_1 = float(self.data_in[idx, 3])
time_2 = float(self.data_in[idx, 7])
audio1, sr = librosa.load(audio_dir_1, mono=False)
# find time of click's peak?
start_1 = 10925 + np.argmax(abs(audio1[1 , 10925 : 11035])) # why dim 1 and not 0?
audio2, sr = librosa.load(audio_dir_2, mono=False)
start_2 = 10925 + np.argmax(abs(audio2[1 , 10925 : 11035]))
audio = np.concatenate((audio1[:, start_2 : start_2 + 300], audio2[:, start_1 : start_1 +300]), axis=1)
if int(label_1) == int(label_2):
label = 1
else:
label = 0
## return audio, label, click_1_file_dir, click_1_time, click_2_file_dir, click_2_time
return (audio, label, audio_dir_1, time_1, audio_dir_2, time_2)
###### Model #################################
class SoundNet(nn.Module):
def __init__(self):
super(SoundNet, self).__init__()
self.conv1 = nn.Conv2d(1, 16, kernel_size=(64, 1), stride=(2, 1),
padding=(32, 0))
self.batchnorm1 = nn.BatchNorm2d(16, eps=1e-5, momentum=0.1)
self.relu1 = nn.ReLU(True)
self.maxpool1 = nn.MaxPool2d((8, 1), stride=(8, 1))
self.conv2 = nn.Conv2d(16, 32, kernel_size=(32, 1), stride=(2, 1),
padding=(16, 0))
self.batchnorm2 = nn.BatchNorm2d(32, eps=1e-5, momentum=0.1)
self.relu2 = nn.ReLU(True)
self.maxpool2 = nn.MaxPool2d((8, 1), stride=(8, 1))
self.conv3 = nn.Conv2d(32, 64, kernel_size=(16, 1), stride=(2, 1),
padding=(8, 0))
self.batchnorm3 = nn.BatchNorm2d(64, eps=1e-5, momentum=0.1)
self.relu3 = nn.ReLU(True)
self.conv4 = nn.Conv2d(64, 128, kernel_size=(8, 1), stride=(2, 1),
padding=(4, 0))
self.batchnorm4 = nn.BatchNorm2d(128, eps=1e-5, momentum=0.1)
self.relu4 = nn.ReLU(True)
self.conv5 = nn.Conv2d(128, 256, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm5 = nn.BatchNorm2d(256, eps=1e-5, momentum=0.1)
self.relu5 = nn.ReLU(True)
self.maxpool5 = nn.MaxPool2d((4, 1), stride=(4, 1))
self.conv6 = nn.Conv2d(256, 512, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm6 = nn.BatchNorm2d(512, eps=1e-5, momentum=0.1)
self.relu6 = nn.ReLU(True)
self.conv7 = nn.Conv2d(512, 1024, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm7 = nn.BatchNorm2d(1024, eps=1e-5, momentum=0.1)
self.relu7 = nn.ReLU(True)
self.conv8_objs = nn.Conv2d(1024, 1000, kernel_size=(8, 1),
stride=(2, 1))
self.conv8_scns = nn.Conv2d(1024, 401, kernel_size=(8, 1),
stride=(2, 1))
def forward(self, waveform):
x = self.conv1(waveform.unsqueeze(1).permute(0,1,3,2))
x = self.batchnorm1(x)
x = self.relu1(x)
x = self.maxpool1(x)
x = self.conv2(x)
x = self.batchnorm2(x)
x = self.relu2(x)
x = self.maxpool2(x)
x = self.conv3(x)
x = self.batchnorm3(x)
x = self.relu3(x)
x = self.conv4(x)
x = self.batchnorm4(x)
x = self.relu4(x)
x = x.reshape(x.shape[0],-1)
return x
class value_net(nn.Module):
def __init__(self, symmetric=True):
super(value_net, self).__init__()
self.linear = nn.Linear(512, 2)
def forward(self, input_audio):
output = self.linear(input_audio)
return output
############################### Main method: click separator in test mode ######################
def run_click_separator_test_mode(audio_rootname, sep_model_version, sep_model_load_dir, exp_name, det_model_version,
start, end):
'''
Run click separator model (in test mode) to get same/diff whale predictions for all pairs of clicks in specified window of audio file 'audio_rootname'
- sep_model_version: click separator version name, to be used in naming directory to save predictions
- sep_model_load_dir: directory from which to load trained click separator model version
- exp_name: experiment name, not important.
- det_model_version: click detector version used earlier in the pipeline
- start (int): start time of window (in sec)
- end (int): end time of window (in sec)
Effect: saves all-pairs predictions in batches (usually only 1 batch) in pickle files in the following directory:
'/data/vision/torralba/scratch/ioannis/clustering/custom_test_pick_preds/'
+ det_model_version + '/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
'''
############ Admin work (directories) ###################################################
if not os.path.exists('./ckpts'):
os.makedirs('./ckpts')
if not os.path.exists(os.path.join('./ckpts', exp_name)):
os.makedirs(os.path.join('./ckpts',exp_name))
###### Dataset Loading and Splitting##########
data_directory = '/data/vision/torralba/scratch/ioannis/clustering/click_separator_training/correct_data_same_click_diff_click_correct_times.p'
total_data = pickle.load(open(data_directory,"rb"))
data_ordered_dir = '/data/vision/torralba/scratch/ioannis/clustering/click_separator_training/file_ordered_correct_times.p'
file_ordered = pickle.load(open(data_directory,"rb"))
#######################################################################################################
# audio_rootname = 'sw061b001'
# start = 0
# end = 235
print('------Running click separator on detected clicks------\n')
print('Clicks: ', start, '-', end-1, '\n')
main_dir = '/data/vision/torralba/scratch/ioannis/clustering/'
# test_pick = main_dir + 'custom_test_pick_preds/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
test_pick = main_dir + 'custom_test_pick_preds/' + det_model_version + '/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
audio_recordings_test = pickle.load(open(test_pick,"rb"))
# preds_save_dir = main_dir + 'detections_click_sep_preds/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '/'
preds_save_dir = main_dir + 'detections_click_sep_preds/' + det_model_version + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '/'
if not os.path.exists(preds_save_dir):
os.makedirs(preds_save_dir)
############ End of admin work (directories) ###################################################
np.random.seed(0)
torch.manual_seed(0)
seq = SoundNet()
# seq = clickdetector()
| return len(self.data_in) | identifier_body |
|
click_differentiator_test_mode.py | 6]
time_1 = float(self.data_in[idx, 3])
time_2 = float(self.data_in[idx, 7])
audio1, sr = librosa.load(audio_dir_1, mono=False)
# find time of click's peak?
start_1 = 10925 + np.argmax(abs(audio1[1 , 10925 : 11035])) # why dim 1 and not 0?
audio2, sr = librosa.load(audio_dir_2, mono=False)
start_2 = 10925 + np.argmax(abs(audio2[1 , 10925 : 11035]))
audio = np.concatenate((audio1[:, start_2 : start_2 + 300], audio2[:, start_1 : start_1 +300]), axis=1)
if int(label_1) == int(label_2):
label = 1
else:
label = 0
## return audio, label, click_1_file_dir, click_1_time, click_2_file_dir, click_2_time
return (audio, label, audio_dir_1, time_1, audio_dir_2, time_2)
###### Model #################################
class SoundNet(nn.Module):
def __init__(self):
super(SoundNet, self).__init__()
self.conv1 = nn.Conv2d(1, 16, kernel_size=(64, 1), stride=(2, 1),
padding=(32, 0))
self.batchnorm1 = nn.BatchNorm2d(16, eps=1e-5, momentum=0.1)
self.relu1 = nn.ReLU(True)
self.maxpool1 = nn.MaxPool2d((8, 1), stride=(8, 1))
self.conv2 = nn.Conv2d(16, 32, kernel_size=(32, 1), stride=(2, 1),
padding=(16, 0))
self.batchnorm2 = nn.BatchNorm2d(32, eps=1e-5, momentum=0.1)
self.relu2 = nn.ReLU(True)
self.maxpool2 = nn.MaxPool2d((8, 1), stride=(8, 1))
self.conv3 = nn.Conv2d(32, 64, kernel_size=(16, 1), stride=(2, 1),
padding=(8, 0))
self.batchnorm3 = nn.BatchNorm2d(64, eps=1e-5, momentum=0.1)
self.relu3 = nn.ReLU(True)
self.conv4 = nn.Conv2d(64, 128, kernel_size=(8, 1), stride=(2, 1),
padding=(4, 0))
self.batchnorm4 = nn.BatchNorm2d(128, eps=1e-5, momentum=0.1)
self.relu4 = nn.ReLU(True)
self.conv5 = nn.Conv2d(128, 256, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm5 = nn.BatchNorm2d(256, eps=1e-5, momentum=0.1)
self.relu5 = nn.ReLU(True)
self.maxpool5 = nn.MaxPool2d((4, 1), stride=(4, 1))
self.conv6 = nn.Conv2d(256, 512, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm6 = nn.BatchNorm2d(512, eps=1e-5, momentum=0.1)
self.relu6 = nn.ReLU(True)
self.conv7 = nn.Conv2d(512, 1024, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm7 = nn.BatchNorm2d(1024, eps=1e-5, momentum=0.1)
self.relu7 = nn.ReLU(True)
self.conv8_objs = nn.Conv2d(1024, 1000, kernel_size=(8, 1),
stride=(2, 1))
self.conv8_scns = nn.Conv2d(1024, 401, kernel_size=(8, 1),
stride=(2, 1))
def | (self, waveform):
x = self.conv1(waveform.unsqueeze(1).permute(0,1,3,2))
x = self.batchnorm1(x)
x = self.relu1(x)
x = self.maxpool1(x)
x = self.conv2(x)
x = self.batchnorm2(x)
x = self.relu2(x)
x = self.maxpool2(x)
x = self.conv3(x)
x = self.batchnorm3(x)
x = self.relu3(x)
x = self.conv4(x)
x = self.batchnorm4(x)
x = self.relu4(x)
x = x.reshape(x.shape[0],-1)
return x
class value_net(nn.Module):
def __init__(self, symmetric=True):
super(value_net, self).__init__()
self.linear = nn.Linear(512, 2)
def forward(self, input_audio):
output = self.linear(input_audio)
return output
############################### Main method: click separator in test mode ######################
def run_click_separator_test_mode(audio_rootname, sep_model_version, sep_model_load_dir, exp_name, det_model_version,
start, end):
'''
Run click separator model (in test mode) to get same/diff whale predictions for all pairs of clicks in specified window of audio file 'audio_rootname'
- sep_model_version: click separator version name, to be used in naming directory to save predictions
- sep_model_load_dir: directory from which to load trained click separator model version
- exp_name: experiment name, not important.
- det_model_version: click detector version used earlier in the pipeline
- start (int): start time of window (in sec)
- end (int): end time of window (in sec)
Effect: saves all-pairs predictions in batches (usually only 1 batch) in pickle files in the following directory:
'/data/vision/torralba/scratch/ioannis/clustering/custom_test_pick_preds/'
+ det_model_version + '/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
'''
############ Admin work (directories) ###################################################
if not os.path.exists('./ckpts'):
os.makedirs('./ckpts')
if not os.path.exists(os.path.join('./ckpts', exp_name)):
os.makedirs(os.path.join('./ckpts',exp_name))
###### Dataset Loading and Splitting##########
data_directory = '/data/vision/torralba/scratch/ioannis/clustering/click_separator_training/correct_data_same_click_diff_click_correct_times.p'
total_data = pickle.load(open(data_directory,"rb"))
data_ordered_dir = '/data/vision/torralba/scratch/ioannis/clustering/click_separator_training/file_ordered_correct_times.p'
file_ordered = pickle.load(open(data_directory,"rb"))
#######################################################################################################
# audio_rootname = 'sw061b001'
# start = 0
# end = 235
print('------Running click separator on detected clicks------\n')
print('Clicks: ', start, '-', end-1, '\n')
main_dir = '/data/vision/torralba/scratch/ioannis/clustering/'
# test_pick = main_dir + 'custom_test_pick_preds/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
test_pick = main_dir + 'custom_test_pick_preds/' + det_model_version + '/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
audio_recordings_test = pickle.load(open(test_pick,"rb"))
# preds_save_dir = main_dir + 'detections_click_sep_preds/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '/'
preds_save_dir = main_dir + 'detections_click_sep_preds/' + det_model_version + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '/'
if not os.path.exists(preds_save_dir):
os.makedirs(preds_save_dir)
############ End of admin work (directories) ###################################################
np.random.seed(0)
torch.manual_seed(0)
seq = SoundNet()
# seq = clickdetector()
seq.cuda()
# seq = nn.DataParallel(seq)
valnet = value_net()
valnet.cuda()
# valnet = nn.DataParallel(valnet)
# optimizer2 = optim.Adam(valnet.parameters(), lr=args.lr, weight_decay=args.weightdecay)
# optimizer = optim.Adam(seq.parameters(), lr=args.lr, | forward | identifier_name |
proto_connection.rs | (i: i32, p: i32) -> i32 {
let mut result = 1;
for _ in 0..p {
result *= i;
}
result
}
fn enqueue_packet_test(c: &mut Criterion) {
// take the cartesian product of the following conditions:
// - the packet is an event, a reply, or an error
// - pending_events and pending_replies are empty, have one element, or have
// many elements
enum PacketType {
Event,
Reply,
Error,
}
enum PacketCount {
Empty,
One,
Many,
}
use PacketCount::*;
use PacketType::*;
let mut group = c.benchmark_group("enqueue_packet");
for packet_ty in &[Event, Reply, Error] {
for packet_count in &[Empty, One, Many] {
let packet_ty_desc = match packet_ty {
Event => "event",
Reply => "reply",
Error => "error",
};
let packet_count_desc = match packet_count {
Empty => "no",
One => "one",
Many => "many",
};
let name = format!(
"enqueue_packet {} with {} packets",
packet_ty_desc, packet_count_desc
);
group.bench_function(name, |b| {
// generate a valid packet with the given first byte and sequence number
let mut seqno = 0u16;
let mut packet = move |ind: u8| {
let our_seqno = seqno + 1;
seqno += 1;
let mut v = vec![0; 32];
v[0] = ind;
// copy our_seqno to bytes 3 and 4
v[2..4].copy_from_slice(&our_seqno.to_ne_bytes());
v
};
// we need another one for make_conn
let mut packet2 = packet;
let queue_count = match packet_count {
PacketCount::Empty => 0,
PacketCount::One => 1,
PacketCount::Many => pow(2, 8),
};
// create a connection with the given stats
let mut make_conn = || {
let mut conn = Connection::new();
for _ in 0..queue_count {
// push a new event
conn.enqueue_packet(packet2(2));
}
for _ in 0..queue_count {
// push a new reply
conn.enqueue_packet(packet2(1));
}
conn
};
let mut conn = make_conn();
let packet = packet(match packet_ty {
Event => 2,
Reply => 1,
Error => 0,
});
b.iter(move || {
conn.enqueue_packet(packet.clone());
})
});
}
}
}
fn send_and_receive_request(c: &mut Criterion) {
// permutations:
// - send queue is empty or very full
// - receive queue is empty of very full
enum SendQueue {
SEmpty,
SFull,
}
enum RecvQueue {
REmpty,
RFull,
}
use RecvQueue::*;
use SendQueue::*;
let mut group = c.benchmark_group("send_and_receive_request");
for send_queue in &[SEmpty, SFull] {
for recv_queue in &[REmpty, RFull] {
let name = format!(
"send_and_receive_request (send {}, recv {})",
match send_queue {
SEmpty => "empty",
SFull => "full",
},
match recv_queue {
REmpty => "empty",
RFull => "full",
}
);
group.bench_function(name, |b| {
// create a new connection
let mut conn = Connection::new();
// if the send queue needs to be full, flood it with sent requests
if matches!(send_queue, SFull) {
for _ in 0..pow(2, 14) {
conn.send_request(match recv_queue {
REmpty => ReplyFdKind::NoReply,
RFull => ReplyFdKind::ReplyWithoutFDs,
});
}
}
// if the recv queue needs to be full, flood it with replies
if matches!(recv_queue, RFull) {
for _ in 0..pow(2, 14) {
let mut packet = vec![0; 32];
packet[0] = 1;
conn.enqueue_packet(packet);
}
}
// create a new packet
let mut packet = vec![0u8; 32];
packet[0] = 1;
b.iter(move || {
// send our request
let seq = conn.send_request(ReplyFdKind::ReplyWithoutFDs).unwrap();
// truncate to a u16
let seq_trunc = seq as u16;
// insert the sequence number at positions 2 and 3
packet[2..4].copy_from_slice(&seq_trunc.to_ne_bytes());
// enqueue the packet
conn.enqueue_packet(black_box(replace(&mut packet, vec![0u8; 32])));
// pop the reply
conn.poll_for_reply_or_error(seq)
})
});
}
}
}
fn try_parse_small_struct(c: &mut Criterion) {
// xproto::Rectangle is a pointer wide on 64-bit, use that
c.bench_function("try_parse an xproto::Rectangle", |b| {
let packet = [0x42u8; size_of::<Rectangle>()];
b.iter(|| Rectangle::try_parse(black_box(&packet)))
});
}
fn try_parse_large_struct(c: &mut Criterion) {
// xproto::Screen is a significantly larger structure, use that
const SCREEN_BASE_SIZE: usize = size_of::<Screen>() - size_of::<Vec<Depth>>() + size_of::<u8>();
const NUM_DEPTHS: usize = 3;
const DEPTH_SIZE: usize = 8;
const TOTAL_SIZE: usize = SCREEN_BASE_SIZE + (NUM_DEPTHS * DEPTH_SIZE);
c.bench_function("try_parse an xproto::Screen", |b| {
let mut packet = [0; TOTAL_SIZE];
packet[SCREEN_BASE_SIZE - 1] = NUM_DEPTHS as u8;
b.iter(|| Screen::try_parse(black_box(&packet)))
});
}
fn serialize_struct(c: &mut Criterion) {
// try the following:
// - send it down a TCP socket
// - send it down a Unix socket (if linux)
//
// this should relatively accurately tell us what kind of impact the buffering
// and writing have on the serialization time
//
// note that send() and recv() degenerate into sendmsg() and recvmsg(), at least
// on the Linux kernel end, so not using those functions should have no effect
enum SocketTy {
TryTcp,
TryUnix,
}
enum StructType {
Small,
Large,
}
use SocketTy::*;
use StructType::*;
let mut group = c.benchmark_group("serialize_struct");
for socket_ty in &[TryTcp, TryUnix] {
let mut fd: Box<dyn Write> = match socket_ty {
TryTcp => {
const PORT: u16 = 41234;
let listen = TcpListener::bind(("::1", PORT)).unwrap();
thread::spawn(move || {
let (mut sock, _) = listen.accept().unwrap();
// read until other sock gets dropped
let mut buf = [0u8; 1024];
loop {
if sock.read(&mut buf).is_err() {
break;
}
}
});
let sock = TcpStream::connect(("::1", PORT)).unwrap();
Box::new(sock)
}
TryUnix => {
#[cfg(unix)]
{
let (mut left, right) = UnixStream::pair().unwrap();
thread::spawn(move || {
let mut buf = [0u8; 1024];
loop {
if left.read(&mut buf).is_err() {
break;
}
}
});
Box::new(right)
}
#[cfg(not(unix))]
{
continue;
}
}
};
let try_desc = match socket_ty {
TryTcp => "TCP",
TryUnix => "Unix",
};
for struct_size in &[Small, Large] {
let size_desc = match struct_size {
Small => "small",
Large => "large",
};
let name = format!("serialize_struct {} {}", try_desc, size_desc);
group.bench_function(name, |b| {
b.iter(|| {
let bytes = match struct_size {
Small => {
let rect = Rectangle::default();
black_box(rect.serialize()).to_vec()
}
Large => {
let mut screen = Screen::default();
screen.allowed_depths.resize_with(3, Default::default);
black_box(screen.serialize())
}
};
// write the serialized bytes tothe output
fd.write_all(&bytes).unwrap();
})
});
}
}
}
fn discard_reply(c: &mut Criterion) {
// Measure the performance of discard_reply()
fn get_connection_and_seqnos() -> (Connection, Vec<SequenceNumber>) {
let mut conn = Connection::new();
| pow | identifier_name |
|
proto_connection.rs | 1;
for _ in 0..p {
result *= i;
}
result
}
fn enqueue_packet_test(c: &mut Criterion) {
// take the cartesian product of the following conditions:
// - the packet is an event, a reply, or an error
// - pending_events and pending_replies are empty, have one element, or have
// many elements
enum PacketType {
Event,
Reply,
Error,
}
enum PacketCount {
Empty,
One,
Many,
}
use PacketCount::*;
use PacketType::*;
let mut group = c.benchmark_group("enqueue_packet");
for packet_ty in &[Event, Reply, Error] {
for packet_count in &[Empty, One, Many] {
let packet_ty_desc = match packet_ty {
Event => "event",
Reply => "reply",
Error => "error",
};
let packet_count_desc = match packet_count {
Empty => "no",
One => "one",
Many => "many",
};
let name = format!(
"enqueue_packet {} with {} packets",
packet_ty_desc, packet_count_desc
);
group.bench_function(name, |b| {
// generate a valid packet with the given first byte and sequence number
let mut seqno = 0u16;
let mut packet = move |ind: u8| {
let our_seqno = seqno + 1;
seqno += 1;
let mut v = vec![0; 32];
v[0] = ind;
// copy our_seqno to bytes 3 and 4
v[2..4].copy_from_slice(&our_seqno.to_ne_bytes());
v
};
// we need another one for make_conn
let mut packet2 = packet;
let queue_count = match packet_count {
PacketCount::Empty => 0,
PacketCount::One => 1,
PacketCount::Many => pow(2, 8),
};
// create a connection with the given stats
let mut make_conn = || {
let mut conn = Connection::new();
for _ in 0..queue_count {
// push a new event
conn.enqueue_packet(packet2(2));
}
for _ in 0..queue_count {
// push a new reply
conn.enqueue_packet(packet2(1));
}
conn
};
let mut conn = make_conn();
let packet = packet(match packet_ty {
Event => 2,
Reply => 1,
Error => 0,
});
b.iter(move || {
conn.enqueue_packet(packet.clone());
})
});
}
}
}
fn send_and_receive_request(c: &mut Criterion) {
// permutations:
// - send queue is empty or very full
// - receive queue is empty of very full
enum SendQueue {
SEmpty,
SFull,
}
enum RecvQueue {
REmpty,
RFull,
}
use RecvQueue::*;
use SendQueue::*;
let mut group = c.benchmark_group("send_and_receive_request");
for send_queue in &[SEmpty, SFull] {
for recv_queue in &[REmpty, RFull] {
let name = format!(
"send_and_receive_request (send {}, recv {})",
match send_queue {
SEmpty => "empty",
SFull => "full",
},
match recv_queue {
REmpty => "empty",
RFull => "full",
}
);
group.bench_function(name, |b| {
// create a new connection
let mut conn = Connection::new();
// if the send queue needs to be full, flood it with sent requests
if matches!(send_queue, SFull) {
for _ in 0..pow(2, 14) {
conn.send_request(match recv_queue {
REmpty => ReplyFdKind::NoReply,
RFull => ReplyFdKind::ReplyWithoutFDs,
});
}
}
// if the recv queue needs to be full, flood it with replies
if matches!(recv_queue, RFull) {
for _ in 0..pow(2, 14) {
let mut packet = vec![0; 32];
packet[0] = 1;
conn.enqueue_packet(packet);
}
}
// create a new packet
let mut packet = vec![0u8; 32];
packet[0] = 1;
b.iter(move || {
// send our request
let seq = conn.send_request(ReplyFdKind::ReplyWithoutFDs).unwrap();
// truncate to a u16
let seq_trunc = seq as u16;
// insert the sequence number at positions 2 and 3
packet[2..4].copy_from_slice(&seq_trunc.to_ne_bytes());
// enqueue the packet
conn.enqueue_packet(black_box(replace(&mut packet, vec![0u8; 32])));
// pop the reply
conn.poll_for_reply_or_error(seq)
})
});
}
}
}
fn try_parse_small_struct(c: &mut Criterion) {
// xproto::Rectangle is a pointer wide on 64-bit, use that
c.bench_function("try_parse an xproto::Rectangle", |b| {
let packet = [0x42u8; size_of::<Rectangle>()];
b.iter(|| Rectangle::try_parse(black_box(&packet)))
});
}
fn try_parse_large_struct(c: &mut Criterion) {
// xproto::Screen is a significantly larger structure, use that
const SCREEN_BASE_SIZE: usize = size_of::<Screen>() - size_of::<Vec<Depth>>() + size_of::<u8>();
const NUM_DEPTHS: usize = 3;
const DEPTH_SIZE: usize = 8;
const TOTAL_SIZE: usize = SCREEN_BASE_SIZE + (NUM_DEPTHS * DEPTH_SIZE);
c.bench_function("try_parse an xproto::Screen", |b| {
let mut packet = [0; TOTAL_SIZE];
packet[SCREEN_BASE_SIZE - 1] = NUM_DEPTHS as u8;
b.iter(|| Screen::try_parse(black_box(&packet)))
});
}
fn serialize_struct(c: &mut Criterion) {
// try the following:
// - send it down a TCP socket
// - send it down a Unix socket (if linux)
//
// this should relatively accurately tell us what kind of impact the buffering
// and writing have on the serialization time
//
// note that send() and recv() degenerate into sendmsg() and recvmsg(), at least
// on the Linux kernel end, so not using those functions should have no effect
enum SocketTy {
TryTcp,
TryUnix,
}
enum StructType {
Small,
Large,
}
use SocketTy::*;
use StructType::*;
let mut group = c.benchmark_group("serialize_struct");
for socket_ty in &[TryTcp, TryUnix] {
let mut fd: Box<dyn Write> = match socket_ty {
TryTcp => {
const PORT: u16 = 41234;
let listen = TcpListener::bind(("::1", PORT)).unwrap();
thread::spawn(move || {
let (mut sock, _) = listen.accept().unwrap();
// read until other sock gets dropped
let mut buf = [0u8; 1024];
loop {
if sock.read(&mut buf).is_err() {
break;
}
}
});
let sock = TcpStream::connect(("::1", PORT)).unwrap();
Box::new(sock)
}
TryUnix => {
#[cfg(unix)]
{
let (mut left, right) = UnixStream::pair().unwrap();
thread::spawn(move || {
let mut buf = [0u8; 1024];
loop { | if left.read(&mut buf).is_err() {
break;
}
}
});
Box::new(right)
}
#[cfg(not(unix))]
{
continue;
}
}
};
let try_desc = match socket_ty {
TryTcp => "TCP",
TryUnix => "Unix",
};
for struct_size in &[Small, Large] {
let size_desc = match struct_size {
Small => "small",
Large => "large",
};
let name = format!("serialize_struct {} {}", try_desc, size_desc);
group.bench_function(name, |b| {
b.iter(|| {
let bytes = match struct_size {
Small => {
let rect = Rectangle::default();
black_box(rect.serialize()).to_vec()
}
Large => {
let mut screen = Screen::default();
screen.allowed_depths.resize_with(3, Default::default);
black_box(screen.serialize())
}
};
// write the serialized bytes tothe output
fd.write_all(&bytes).unwrap();
})
});
}
}
}
fn discard_reply(c: &mut Criterion) {
// Measure the performance of discard_reply()
fn get_connection_and_seqnos() -> (Connection, Vec<SequenceNumber>) {
let mut conn = Connection::new();
let seqnos = (0..pow(2, 13))
.map(|_| conn.send_request | random_line_split |
|
proto_connection.rs | 1;
for _ in 0..p {
result *= i;
}
result
}
fn enqueue_packet_test(c: &mut Criterion) {
// take the cartesian product of the following conditions:
// - the packet is an event, a reply, or an error
// - pending_events and pending_replies are empty, have one element, or have
// many elements
enum PacketType {
Event,
Reply,
Error,
}
enum PacketCount {
Empty,
One,
Many,
}
use PacketCount::*;
use PacketType::*;
let mut group = c.benchmark_group("enqueue_packet");
for packet_ty in &[Event, Reply, Error] {
for packet_count in &[Empty, One, Many] {
let packet_ty_desc = match packet_ty {
Event => "event",
Reply => "reply",
Error => "error",
};
let packet_count_desc = match packet_count {
Empty => "no",
One => "one",
Many => "many",
};
let name = format!(
"enqueue_packet {} with {} packets",
packet_ty_desc, packet_count_desc
);
group.bench_function(name, |b| {
// generate a valid packet with the given first byte and sequence number
let mut seqno = 0u16;
let mut packet = move |ind: u8| {
let our_seqno = seqno + 1;
seqno += 1;
let mut v = vec![0; 32];
v[0] = ind;
// copy our_seqno to bytes 3 and 4
v[2..4].copy_from_slice(&our_seqno.to_ne_bytes());
v
};
// we need another one for make_conn
let mut packet2 = packet;
let queue_count = match packet_count {
PacketCount::Empty => 0,
PacketCount::One => 1,
PacketCount::Many => pow(2, 8),
};
// create a connection with the given stats
let mut make_conn = || {
let mut conn = Connection::new();
for _ in 0..queue_count {
// push a new event
conn.enqueue_packet(packet2(2));
}
for _ in 0..queue_count {
// push a new reply
conn.enqueue_packet(packet2(1));
}
conn
};
let mut conn = make_conn();
let packet = packet(match packet_ty {
Event => 2,
Reply => 1,
Error => 0,
});
b.iter(move || {
conn.enqueue_packet(packet.clone());
})
});
}
}
}
fn send_and_receive_request(c: &mut Criterion) | for recv_queue in &[REmpty, RFull] {
let name = format!(
"send_and_receive_request (send {}, recv {})",
match send_queue {
SEmpty => "empty",
SFull => "full",
},
match recv_queue {
REmpty => "empty",
RFull => "full",
}
);
group.bench_function(name, |b| {
// create a new connection
let mut conn = Connection::new();
// if the send queue needs to be full, flood it with sent requests
if matches!(send_queue, SFull) {
for _ in 0..pow(2, 14) {
conn.send_request(match recv_queue {
REmpty => ReplyFdKind::NoReply,
RFull => ReplyFdKind::ReplyWithoutFDs,
});
}
}
// if the recv queue needs to be full, flood it with replies
if matches!(recv_queue, RFull) {
for _ in 0..pow(2, 14) {
let mut packet = vec![0; 32];
packet[0] = 1;
conn.enqueue_packet(packet);
}
}
// create a new packet
let mut packet = vec![0u8; 32];
packet[0] = 1;
b.iter(move || {
// send our request
let seq = conn.send_request(ReplyFdKind::ReplyWithoutFDs).unwrap();
// truncate to a u16
let seq_trunc = seq as u16;
// insert the sequence number at positions 2 and 3
packet[2..4].copy_from_slice(&seq_trunc.to_ne_bytes());
// enqueue the packet
conn.enqueue_packet(black_box(replace(&mut packet, vec![0u8; 32])));
// pop the reply
conn.poll_for_reply_or_error(seq)
})
});
}
}
}
fn try_parse_small_struct(c: &mut Criterion) {
// xproto::Rectangle is a pointer wide on 64-bit, use that
c.bench_function("try_parse an xproto::Rectangle", |b| {
let packet = [0x42u8; size_of::<Rectangle>()];
b.iter(|| Rectangle::try_parse(black_box(&packet)))
});
}
fn try_parse_large_struct(c: &mut Criterion) {
// xproto::Screen is a significantly larger structure, use that
const SCREEN_BASE_SIZE: usize = size_of::<Screen>() - size_of::<Vec<Depth>>() + size_of::<u8>();
const NUM_DEPTHS: usize = 3;
const DEPTH_SIZE: usize = 8;
const TOTAL_SIZE: usize = SCREEN_BASE_SIZE + (NUM_DEPTHS * DEPTH_SIZE);
c.bench_function("try_parse an xproto::Screen", |b| {
let mut packet = [0; TOTAL_SIZE];
packet[SCREEN_BASE_SIZE - 1] = NUM_DEPTHS as u8;
b.iter(|| Screen::try_parse(black_box(&packet)))
});
}
fn serialize_struct(c: &mut Criterion) {
// try the following:
// - send it down a TCP socket
// - send it down a Unix socket (if linux)
//
// this should relatively accurately tell us what kind of impact the buffering
// and writing have on the serialization time
//
// note that send() and recv() degenerate into sendmsg() and recvmsg(), at least
// on the Linux kernel end, so not using those functions should have no effect
enum SocketTy {
TryTcp,
TryUnix,
}
enum StructType {
Small,
Large,
}
use SocketTy::*;
use StructType::*;
let mut group = c.benchmark_group("serialize_struct");
for socket_ty in &[TryTcp, TryUnix] {
let mut fd: Box<dyn Write> = match socket_ty {
TryTcp => {
const PORT: u16 = 41234;
let listen = TcpListener::bind(("::1", PORT)).unwrap();
thread::spawn(move || {
let (mut sock, _) = listen.accept().unwrap();
// read until other sock gets dropped
let mut buf = [0u8; 1024];
loop {
if sock.read(&mut buf).is_err() {
break;
}
}
});
let sock = TcpStream::connect(("::1", PORT)).unwrap();
Box::new(sock)
}
TryUnix => {
#[cfg(unix)]
{
let (mut left, right) = UnixStream::pair().unwrap();
thread::spawn(move || {
let mut buf = [0u8; 1024];
loop {
if left.read(&mut buf).is_err() {
break;
}
}
});
Box::new(right)
}
#[cfg(not(unix))]
{
continue;
}
}
};
let try_desc = match socket_ty {
TryTcp => "TCP",
TryUnix => "Unix",
};
for struct_size in &[Small, Large] {
let size_desc = match struct_size {
Small => "small",
Large => "large",
};
let name = format!("serialize_struct {} {}", try_desc, size_desc);
group.bench_function(name, |b| {
b.iter(|| {
let bytes = match struct_size {
Small => {
let rect = Rectangle::default();
black_box(rect.serialize()).to_vec()
}
Large => {
let mut screen = Screen::default();
screen.allowed_depths.resize_with(3, Default::default);
black_box(screen.serialize())
}
};
// write the serialized bytes tothe output
fd.write_all(&bytes).unwrap();
})
});
}
}
}
fn discard_reply(c: &mut Criterion) {
// Measure the performance of discard_reply()
fn get_connection_and_seqnos() -> (Connection, Vec<SequenceNumber>) {
let mut conn = Connection::new();
let seqnos = (0..pow(2, 13))
.map(|_| conn.send_request | {
// permutations:
// - send queue is empty or very full
// - receive queue is empty of very full
enum SendQueue {
SEmpty,
SFull,
}
enum RecvQueue {
REmpty,
RFull,
}
use RecvQueue::*;
use SendQueue::*;
let mut group = c.benchmark_group("send_and_receive_request");
for send_queue in &[SEmpty, SFull] { | identifier_body |
main.rs | {
// We could do precedence-based printing, but let's always put them in...
let mut first = true;
for x in xs.iter() {
write!(f, "{}", if first {'('} else {'|'})?;
first = false;
x.fmt(f)?;
}
write!(f, ")")?;
}
}
Ok(())
}
}
fn parse_regexp(iter: &mut Peekable<Iter<char>>) -> Match {
// Current alternation, made of a sequence of concatentations.
let mut alternatives = Vec::new();
// Current concatenation being built.
let mut curr = Vec::new();
loop {
let c = match iter.peek() {
Some(c) => Some(*c),
None => None,
};
match c {
Some('(') => {
iter.next();
curr.push(parse_regexp(iter));
if iter.next() != Some(&')') {
panic!("Imbalanced brackets");
}
}
Some('|') => {
iter.next();
alternatives.push(Match::Concatenation(curr));
curr = Vec::new();
}
Some(')') => break,
None => break,
Some(c) => {
curr.push(Match::Literal(*c));
iter.next();
}
}
}
alternatives.push(Match::Concatenation(curr));
Match::Alternation(alternatives)
}
////////////////////////////////////////////////////////////////////////
// This is the bit for problem 20a...
//
// This just cleans up the regexp tree, without understanding paths.
fn opt_regexp(m: Match) -> Match {
match m {
Match::Alternation(xs) => {
let xs: Vec<Match> = xs.into_iter().map(opt_regexp).collect();
if xs.len() == 1 {
// Take first element, and discard rest.
xs.into_iter().next().unwrap()
} else {
Match::Alternation(xs)
}
}
Match::Concatenation(xs) => {
let xs: Vec<Match> = xs.into_iter().map(opt_regexp).collect();
if xs.len() == 1 {
// Take first element, and discard rest.
xs.into_iter().next().unwrap()
} else {
Match::Concatenation(xs)
}
}
Match::Literal(_) => m,
}
}
// This removes obvious, basic back-tracking (back-tracking that
// occurs only within a single concatenation of literals).
fn opt_backtracks(m: Match) -> Match {
match m {
Match::Alternation(xs) => {
Match::Alternation(xs.into_iter().map(opt_backtracks).collect())
}
Match::Literal(_) => m,
Match::Concatenation(xs) => {
let mut xs = xs.into_iter().map(opt_backtracks).collect::<Vec<_>>();
let mut i = 0;
while i + 1 < xs.len() {
if if let (Match::Literal(a), Match::Literal(b)) = (&xs[i], &xs[i+1]) {
match (a, b) {
('N', 'S') => true,
('S', 'N') => true,
('W', 'E') => true,
('E', 'W') => true,
_ => false,
}
} else {
false
} {
xs.drain(i..i+2);
if i > 0 {
i -= 1;
}
} else {
i += 1;
}
}
Match::Concatenation(xs)
}
}
}
// Is this an empty match? Used by opt_empties.
fn is_empty(m: &Match) -> bool {
match m {
Match::Literal(_) => false,
Match::Concatenation(xs) => xs.iter().all(is_empty),
Match::Alternation(xs) => xs.len() > 0 && xs.iter().all(is_empty),
}
}
// And this removes alternatives of thing from concatenations. It's a
// specific optimisation, but seems key to this exercise.
fn opt_empties(m: Match) -> Match {
match m {
Match::Alternation(xs) => {
Match::Alternation(xs.into_iter().map(opt_empties).collect())
}
Match::Literal(_) => m,
Match::Concatenation(xs) => {
Match::Concatenation(xs.into_iter().map(opt_empties).filter(|x| !is_empty(x)).collect())
}
}
}
////////////////////////////////////////////////////////////////////////
// Problem 20b part
//
// Find the route to the turning point for a sequence of literals
fn get_literal_partial(xs: &[Match]) -> Option<Vec<Match>> {
if xs.len() == 0 {
return None;
}
for elem in xs.iter().zip(xs.iter().rev()) {
match elem {
(Match::Literal('N'), Match::Literal('S')) => (),
(Match::Literal('S'), Match::Literal('N')) => (),
(Match::Literal('W'), Match::Literal('E')) => (),
(Match::Literal('E'), Match::Literal('W')) => (),
_ => return None,
}
}
Some(xs.iter().take(xs.len() / 2).map(|x| (*x).clone()).collect())
}
// Given a route that involves back-tracks, generate a list of routes
// up to the turning-around point. e.g. NEWS -> NE.
fn get_partials(m: &Match) -> Vec<Match> | element.push(partial);
res.push(Match::Concatenation(element));
}
}
res
}
}
}
}
}
////////////////////////////////////////////////////////////////////////
// Generate all the possible strings.
//
fn generate_all(m: &Match) -> HashSet<String> {
let mut res: HashSet<String> = HashSet::new();
match m {
Match::Literal(x) => {
res.insert(x.to_string());
()
}
Match::Alternation(xs) => {
for x in xs.iter() {
res.extend(generate_all(x).into_iter());
}
}
Match::Concatenation(xs) => {
// Ugh. Cross products are potentially expensive.
res.insert(String::new());
for x in xs.iter() {
let to_cross = generate_all(x);
add_cross_string(&mut res, &to_cross);
}
}
}
res
}
fn add_cross_string(lhs: &mut HashSet<String>, rhs: &HashSet<String>) {
let mut res = HashSet::new();
for s1 in lhs.iter() {
for s2 in rhs.iter() {
let mut s = s1.clone();
s.push_str(&s2);
res.insert(s);
}
}
// This is where I'd like to swap lhs and res.
lhs.clear();
lhs.extend(res.into_iter());
}
// Generate all the incremental paths
fn all_prefixes(strs: &HashSet<String>) -> HashSet<String> {
let mut seen = HashSet::new();
for str in strs.iter() {
for l in 0..str.len() {
seen.insert(str.get(0..l+1).unwrap().to_string());
}
}
seen
}
// Given a path, generate the coordinates of its end point.
fn get_coords(s: &str) -> (i32, i32) {
let y = s.chars().map(|c| match c {
'N' => 1,
'S' => -1,
_ => 0,
}).sum();
let x = s.chars().map(|c| match c {
'E' => 1,
'W' => -1,
_ => 0,
}).sum();
(x, y)
}
// Build a mapping from coord to shortest route there.
fn build_mapping(strs: &HashSet<String>) -> HashMap<(i32, i32), usize> {
let mut map = HashMap::new();
for s in strs.iter() {
let xy = get_coords(s);
let l = s.len();
let e = map.entry(xy).or_insert(1000000);
if l < *e {
*e = l;
}
}
map
}
// Count the long routes
fn count_long(l: usize, mapping: &HashMap<(i32, i32), usize>) -> usize {
mapping.iter().filter(|(_, l2)| **l2 >= l).count()
}
fn main() {
let mut buffer = String::new();
io::stdin().read_to_string(&mut buffer).expect("Read error");
let chars = buffer.replace('^', "").replace('$', "").trim(). | {
match m {
Match::Alternation(xs) => {
let mut res = Vec::new();
for alternative in xs.iter() {
res.extend(get_partials(alternative).into_iter());
}
res
}
// A single literal will have no backtrackable parts.
Match::Literal(_) => Vec::new(),
Match::Concatenation(xs) => {
match get_literal_partial(xs) {
Some(x) => vec![Match::Concatenation(x)],
None => {
let mut res = Vec::new();
for i in 0..xs.len() {
let partials = get_partials(&xs[i]);
for partial in partials.into_iter() {
let mut element = xs.iter().take(i).map(|x| (*x).clone()).collect::<Vec<Match>>(); | identifier_body |
main.rs | => {
// We could do precedence-based printing, but let's always put them in...
let mut first = true;
for x in xs.iter() {
write!(f, "{}", if first {'('} else {'|'})?;
first = false;
x.fmt(f)?;
}
write!(f, ")")?;
}
}
Ok(())
}
}
fn parse_regexp(iter: &mut Peekable<Iter<char>>) -> Match {
// Current alternation, made of a sequence of concatentations.
let mut alternatives = Vec::new();
// Current concatenation being built.
let mut curr = Vec::new();
loop {
let c = match iter.peek() {
Some(c) => Some(*c),
None => None,
};
match c {
Some('(') => {
iter.next();
curr.push(parse_regexp(iter));
if iter.next() != Some(&')') {
panic!("Imbalanced brackets");
}
}
Some('|') => {
iter.next();
alternatives.push(Match::Concatenation(curr));
curr = Vec::new();
}
Some(')') => break,
None => break,
Some(c) => {
curr.push(Match::Literal(*c));
iter.next();
}
}
}
alternatives.push(Match::Concatenation(curr));
Match::Alternation(alternatives)
}
////////////////////////////////////////////////////////////////////////
// This is the bit for problem 20a...
//
// This just cleans up the regexp tree, without understanding paths.
fn opt_regexp(m: Match) -> Match {
match m {
Match::Alternation(xs) => {
let xs: Vec<Match> = xs.into_iter().map(opt_regexp).collect();
if xs.len() == 1 {
// Take first element, and discard rest.
xs.into_iter().next().unwrap()
} else {
Match::Alternation(xs)
}
}
Match::Concatenation(xs) => {
let xs: Vec<Match> = xs.into_iter().map(opt_regexp).collect();
if xs.len() == 1 {
// Take first element, and discard rest.
xs.into_iter().next().unwrap()
} else {
Match::Concatenation(xs)
}
}
Match::Literal(_) => m,
}
}
// This removes obvious, basic back-tracking (back-tracking that
// occurs only within a single concatenation of literals).
fn opt_backtracks(m: Match) -> Match {
match m {
Match::Alternation(xs) => {
Match::Alternation(xs.into_iter().map(opt_backtracks).collect())
}
Match::Literal(_) => m,
Match::Concatenation(xs) => {
let mut xs = xs.into_iter().map(opt_backtracks).collect::<Vec<_>>();
let mut i = 0;
while i + 1 < xs.len() {
if if let (Match::Literal(a), Match::Literal(b)) = (&xs[i], &xs[i+1]) {
match (a, b) {
('N', 'S') => true,
('S', 'N') => true,
('W', 'E') => true,
('E', 'W') => true,
_ => false,
}
} else {
false
} {
xs.drain(i..i+2);
if i > 0 {
i -= 1;
}
} else {
i += 1;
}
}
Match::Concatenation(xs) | }
}
}
// Is this an empty match? Used by opt_empties.
fn is_empty(m: &Match) -> bool {
match m {
Match::Literal(_) => false,
Match::Concatenation(xs) => xs.iter().all(is_empty),
Match::Alternation(xs) => xs.len() > 0 && xs.iter().all(is_empty),
}
}
// And this removes alternatives of thing from concatenations. It's a
// specific optimisation, but seems key to this exercise.
fn opt_empties(m: Match) -> Match {
match m {
Match::Alternation(xs) => {
Match::Alternation(xs.into_iter().map(opt_empties).collect())
}
Match::Literal(_) => m,
Match::Concatenation(xs) => {
Match::Concatenation(xs.into_iter().map(opt_empties).filter(|x| !is_empty(x)).collect())
}
}
}
////////////////////////////////////////////////////////////////////////
// Problem 20b part
//
// Find the route to the turning point for a sequence of literals
fn get_literal_partial(xs: &[Match]) -> Option<Vec<Match>> {
if xs.len() == 0 {
return None;
}
for elem in xs.iter().zip(xs.iter().rev()) {
match elem {
(Match::Literal('N'), Match::Literal('S')) => (),
(Match::Literal('S'), Match::Literal('N')) => (),
(Match::Literal('W'), Match::Literal('E')) => (),
(Match::Literal('E'), Match::Literal('W')) => (),
_ => return None,
}
}
Some(xs.iter().take(xs.len() / 2).map(|x| (*x).clone()).collect())
}
// Given a route that involves back-tracks, generate a list of routes
// up to the turning-around point. e.g. NEWS -> NE.
fn get_partials(m: &Match) -> Vec<Match> {
match m {
Match::Alternation(xs) => {
let mut res = Vec::new();
for alternative in xs.iter() {
res.extend(get_partials(alternative).into_iter());
}
res
}
// A single literal will have no backtrackable parts.
Match::Literal(_) => Vec::new(),
Match::Concatenation(xs) => {
match get_literal_partial(xs) {
Some(x) => vec![Match::Concatenation(x)],
None => {
let mut res = Vec::new();
for i in 0..xs.len() {
let partials = get_partials(&xs[i]);
for partial in partials.into_iter() {
let mut element = xs.iter().take(i).map(|x| (*x).clone()).collect::<Vec<Match>>();
element.push(partial);
res.push(Match::Concatenation(element));
}
}
res
}
}
}
}
}
////////////////////////////////////////////////////////////////////////
// Generate all the possible strings.
//
fn generate_all(m: &Match) -> HashSet<String> {
let mut res: HashSet<String> = HashSet::new();
match m {
Match::Literal(x) => {
res.insert(x.to_string());
()
}
Match::Alternation(xs) => {
for x in xs.iter() {
res.extend(generate_all(x).into_iter());
}
}
Match::Concatenation(xs) => {
// Ugh. Cross products are potentially expensive.
res.insert(String::new());
for x in xs.iter() {
let to_cross = generate_all(x);
add_cross_string(&mut res, &to_cross);
}
}
}
res
}
fn add_cross_string(lhs: &mut HashSet<String>, rhs: &HashSet<String>) {
let mut res = HashSet::new();
for s1 in lhs.iter() {
for s2 in rhs.iter() {
let mut s = s1.clone();
s.push_str(&s2);
res.insert(s);
}
}
// This is where I'd like to swap lhs and res.
lhs.clear();
lhs.extend(res.into_iter());
}
// Generate all the incremental paths
fn all_prefixes(strs: &HashSet<String>) -> HashSet<String> {
let mut seen = HashSet::new();
for str in strs.iter() {
for l in 0..str.len() {
seen.insert(str.get(0..l+1).unwrap().to_string());
}
}
seen
}
// Given a path, generate the coordinates of its end point.
fn get_coords(s: &str) -> (i32, i32) {
let y = s.chars().map(|c| match c {
'N' => 1,
'S' => -1,
_ => 0,
}).sum();
let x = s.chars().map(|c| match c {
'E' => 1,
'W' => -1,
_ => 0,
}).sum();
(x, y)
}
// Build a mapping from coord to shortest route there.
fn build_mapping(strs: &HashSet<String>) -> HashMap<(i32, i32), usize> {
let mut map = HashMap::new();
for s in strs.iter() {
let xy = get_coords(s);
let l = s.len();
let e = map.entry(xy).or_insert(1000000);
if l < *e {
*e = l;
}
}
map
}
// Count the long routes
fn count_long(l: usize, mapping: &HashMap<(i32, i32), usize>) -> usize {
mapping.iter().filter(|(_, l2)| **l2 >= l).count()
}
fn main() {
let mut buffer = String::new();
io::stdin().read_to_string(&mut buffer).expect("Read error");
let chars = buffer.replace('^', "").replace('$', "").trim().chars | random_line_split |
|
main.rs | => {
// We could do precedence-based printing, but let's always put them in...
let mut first = true;
for x in xs.iter() {
write!(f, "{}", if first {'('} else {'|'})?;
first = false;
x.fmt(f)?;
}
write!(f, ")")?;
}
}
Ok(())
}
}
fn parse_regexp(iter: &mut Peekable<Iter<char>>) -> Match {
// Current alternation, made of a sequence of concatentations.
let mut alternatives = Vec::new();
// Current concatenation being built.
let mut curr = Vec::new();
loop {
let c = match iter.peek() {
Some(c) => Some(*c),
None => None,
};
match c {
Some('(') => {
iter.next();
curr.push(parse_regexp(iter));
if iter.next() != Some(&')') {
panic!("Imbalanced brackets");
}
}
Some('|') => {
iter.next();
alternatives.push(Match::Concatenation(curr));
curr = Vec::new();
}
Some(')') => break,
None => break,
Some(c) => {
curr.push(Match::Literal(*c));
iter.next();
}
}
}
alternatives.push(Match::Concatenation(curr));
Match::Alternation(alternatives)
}
////////////////////////////////////////////////////////////////////////
// This is the bit for problem 20a...
//
// This just cleans up the regexp tree, without understanding paths.
fn opt_regexp(m: Match) -> Match {
match m {
Match::Alternation(xs) => {
let xs: Vec<Match> = xs.into_iter().map(opt_regexp).collect();
if xs.len() == 1 {
// Take first element, and discard rest.
xs.into_iter().next().unwrap()
} else {
Match::Alternation(xs)
}
}
Match::Concatenation(xs) => {
let xs: Vec<Match> = xs.into_iter().map(opt_regexp).collect();
if xs.len() == 1 {
// Take first element, and discard rest.
xs.into_iter().next().unwrap()
} else {
Match::Concatenation(xs)
}
}
Match::Literal(_) => m,
}
}
// This removes obvious, basic back-tracking (back-tracking that
// occurs only within a single concatenation of literals).
fn opt_backtracks(m: Match) -> Match {
match m {
Match::Alternation(xs) => {
Match::Alternation(xs.into_iter().map(opt_backtracks).collect())
}
Match::Literal(_) => m,
Match::Concatenation(xs) => {
let mut xs = xs.into_iter().map(opt_backtracks).collect::<Vec<_>>();
let mut i = 0;
while i + 1 < xs.len() {
if if let (Match::Literal(a), Match::Literal(b)) = (&xs[i], &xs[i+1]) {
match (a, b) {
('N', 'S') => true,
('S', 'N') => true,
('W', 'E') => true,
('E', 'W') => true,
_ => false,
}
} else {
false
} {
xs.drain(i..i+2);
if i > 0 {
i -= 1;
}
} else {
i += 1;
}
}
Match::Concatenation(xs)
}
}
}
// Is this an empty match? Used by opt_empties.
fn is_empty(m: &Match) -> bool {
match m {
Match::Literal(_) => false,
Match::Concatenation(xs) => xs.iter().all(is_empty),
Match::Alternation(xs) => xs.len() > 0 && xs.iter().all(is_empty),
}
}
// And this removes alternatives of thing from concatenations. It's a
// specific optimisation, but seems key to this exercise.
fn opt_empties(m: Match) -> Match {
match m {
Match::Alternation(xs) => {
Match::Alternation(xs.into_iter().map(opt_empties).collect())
}
Match::Literal(_) => m,
Match::Concatenation(xs) => {
Match::Concatenation(xs.into_iter().map(opt_empties).filter(|x| !is_empty(x)).collect())
}
}
}
////////////////////////////////////////////////////////////////////////
// Problem 20b part
//
// Find the route to the turning point for a sequence of literals
fn get_literal_partial(xs: &[Match]) -> Option<Vec<Match>> {
if xs.len() == 0 {
return None;
}
for elem in xs.iter().zip(xs.iter().rev()) {
match elem {
(Match::Literal('N'), Match::Literal('S')) => (),
(Match::Literal('S'), Match::Literal('N')) => (),
(Match::Literal('W'), Match::Literal('E')) => (),
(Match::Literal('E'), Match::Literal('W')) => (),
_ => return None,
}
}
Some(xs.iter().take(xs.len() / 2).map(|x| (*x).clone()).collect())
}
// Given a route that involves back-tracks, generate a list of routes
// up to the turning-around point. e.g. NEWS -> NE.
fn get_partials(m: &Match) -> Vec<Match> {
match m {
Match::Alternation(xs) => {
let mut res = Vec::new();
for alternative in xs.iter() {
res.extend(get_partials(alternative).into_iter());
}
res
}
// A single literal will have no backtrackable parts.
Match::Literal(_) => Vec::new(),
Match::Concatenation(xs) => {
match get_literal_partial(xs) {
Some(x) => vec![Match::Concatenation(x)],
None => {
let mut res = Vec::new();
for i in 0..xs.len() {
let partials = get_partials(&xs[i]);
for partial in partials.into_iter() {
let mut element = xs.iter().take(i).map(|x| (*x).clone()).collect::<Vec<Match>>();
element.push(partial);
res.push(Match::Concatenation(element));
}
}
res
}
}
}
}
}
////////////////////////////////////////////////////////////////////////
// Generate all the possible strings.
//
fn generate_all(m: &Match) -> HashSet<String> {
let mut res: HashSet<String> = HashSet::new();
match m {
Match::Literal(x) => {
res.insert(x.to_string());
()
}
Match::Alternation(xs) => {
for x in xs.iter() {
res.extend(generate_all(x).into_iter());
}
}
Match::Concatenation(xs) => {
// Ugh. Cross products are potentially expensive.
res.insert(String::new());
for x in xs.iter() {
let to_cross = generate_all(x);
add_cross_string(&mut res, &to_cross);
}
}
}
res
}
fn add_cross_string(lhs: &mut HashSet<String>, rhs: &HashSet<String>) {
let mut res = HashSet::new();
for s1 in lhs.iter() {
for s2 in rhs.iter() {
let mut s = s1.clone();
s.push_str(&s2);
res.insert(s);
}
}
// This is where I'd like to swap lhs and res.
lhs.clear();
lhs.extend(res.into_iter());
}
// Generate all the incremental paths
fn all_prefixes(strs: &HashSet<String>) -> HashSet<String> {
let mut seen = HashSet::new();
for str in strs.iter() {
for l in 0..str.len() {
seen.insert(str.get(0..l+1).unwrap().to_string());
}
}
seen
}
// Given a path, generate the coordinates of its end point.
fn get_coords(s: &str) -> (i32, i32) {
let y = s.chars().map(|c| match c {
'N' => 1,
'S' => -1,
_ => 0,
}).sum();
let x = s.chars().map(|c| match c {
'E' => 1,
'W' => -1,
_ => 0,
}).sum();
(x, y)
}
// Build a mapping from coord to shortest route there.
fn build_mapping(strs: &HashSet<String>) -> HashMap<(i32, i32), usize> {
let mut map = HashMap::new();
for s in strs.iter() {
let xy = get_coords(s);
let l = s.len();
let e = map.entry(xy).or_insert(1000000);
if l < *e {
*e = l;
}
}
map
}
// Count the long routes
fn | (l: usize, mapping: &HashMap<(i32, i32), usize>) -> usize {
mapping.iter().filter(|(_, l2)| **l2 >= l).count()
}
fn main() {
let mut buffer = String::new();
io::stdin().read_to_string(&mut buffer).expect("Read error");
let chars = buffer.replace('^', "").replace('$', "").trim(). | count_long | identifier_name |
precheck.go | requirements.`,
Example: ` # Verify that Istio can be installed or upgraded
istioctl x precheck
# Check only a single namespace
istioctl x precheck --namespace default`,
RunE: func(cmd *cobra.Command, args []string) (err error) {
cli, err := ctx.CLIClientWithRevision(opts.Revision)
if err != nil {
return err
}
msgs := diag.Messages{}
if !skipControlPlane {
msgs, err = checkControlPlane(ctx)
if err != nil {
return err
}
}
nsmsgs, err := checkDataPlane(cli, ctx.Namespace())
if err != nil {
return err
}
msgs.Add(nsmsgs...)
// Print all the messages to stdout in the specified format
msgs = msgs.SortedDedupedCopy()
output, err := formatting.Print(msgs, formatting.LogFormat, false)
if err != nil {
return err
}
if len(msgs) == 0 {
fmt.Fprintf(cmd.ErrOrStderr(), color.New(color.FgGreen).Sprint("✔")+" No issues found when checking the cluster. Istio is safe to install or upgrade!\n"+
" To get started, check out https://istio.io/latest/docs/setup/getting-started/\n")
} else {
fmt.Fprintln(cmd.OutOrStdout(), output)
}
for _, m := range msgs {
if m.Type.Level().IsWorseThanOrEqualTo(diag.Warning) {
e := fmt.Sprintf(`Issues found when checking the cluster. Istio may not be safe to install or upgrade.
See %s for more information about causes and resolutions.`, url.ConfigAnalysis)
return errors.New(e)
}
}
return nil
},
}
cmd.PersistentFlags().BoolVar(&skipControlPlane, "skip-controlplane", false, "skip checking the control plane")
opts.AttachControlPlaneFlags(cmd)
return cmd
}
func checkControlPlane(ctx cli.Context) (diag.Messages, error) {
cli, err := ctx.CLIClient()
if err != nil {
return nil, err
}
msgs := diag.Messages{}
m, err := checkServerVersion(cli)
if err != nil {
return nil, err
}
msgs = append(msgs, m...)
msgs = append(msgs, checkInstallPermissions(cli, ctx.IstioNamespace())...)
gwMsg, err := checkGatewayAPIs(cli)
if err != nil {
return nil, err
} |
// TODO: add more checks
sa := local.NewSourceAnalyzer(
analysis.Combine("upgrade precheck", &maturity.AlphaAnalyzer{}),
resource.Namespace(ctx.Namespace()),
resource.Namespace(ctx.IstioNamespace()),
nil,
)
if err != nil {
return nil, err
}
sa.AddRunningKubeSource(cli)
cancel := make(chan struct{})
result, err := sa.Analyze(cancel)
if err != nil {
return nil, err
}
if result.Messages != nil {
msgs = append(msgs, result.Messages...)
}
return msgs, nil
}
// Checks that if the user has gateway APIs, they are the minimum version.
// It is ok to not have them, but they must be at least v1beta1 if they do.
func checkGatewayAPIs(cli kube.CLIClient) (diag.Messages, error) {
msgs := diag.Messages{}
res, err := cli.Ext().ApiextensionsV1().CustomResourceDefinitions().List(context.Background(), metav1.ListOptions{})
if err != nil {
return nil, err
}
betaKinds := sets.New(gvk.KubernetesGateway.Kind, gvk.GatewayClass.Kind, gvk.HTTPRoute.Kind, gvk.ReferenceGrant.Kind)
for _, r := range res.Items {
if r.Spec.Group != gvk.KubernetesGateway.Group {
continue
}
if !betaKinds.Contains(r.Spec.Names.Kind) {
continue
}
versions := extractCRDVersions(&r)
has := "none"
if len(versions) > 0 {
has = strings.Join(sets.SortedList(versions), ",")
}
if !versions.Contains(gvk.KubernetesGateway.Version) {
origin := kube3.Origin{
Type: gvk.CustomResourceDefinition,
FullName: resource.FullName{
Namespace: resource.Namespace(r.Namespace),
Name: resource.LocalName(r.Name),
},
ResourceVersion: resource.Version(r.ResourceVersion),
}
r := &resource.Instance{
Origin: &origin,
}
msgs.Add(msg.NewUnsupportedGatewayAPIVersion(r, has, gvk.KubernetesGateway.Version))
}
}
return msgs, nil
}
func extractCRDVersions(r *crd.CustomResourceDefinition) sets.String {
res := sets.New[string]()
for _, v := range r.Spec.Versions {
if v.Served {
res.Insert(v.Name)
}
}
return res
}
func checkInstallPermissions(cli kube.CLIClient, istioNamespace string) diag.Messages {
Resources := []struct {
namespace string
group string
version string
name string
}{
{
version: "v1",
name: "Namespace",
},
{
namespace: istioNamespace,
group: "rbac.authorization.k8s.io",
version: "v1",
name: "ClusterRole",
},
{
namespace: istioNamespace,
group: "rbac.authorization.k8s.io",
version: "v1",
name: "ClusterRoleBinding",
},
{
namespace: istioNamespace,
group: "apiextensions.k8s.io",
version: "v1",
name: "CustomResourceDefinition",
},
{
namespace: istioNamespace,
group: "rbac.authorization.k8s.io",
version: "v1",
name: "Role",
},
{
namespace: istioNamespace,
version: "v1",
name: "ServiceAccount",
},
{
namespace: istioNamespace,
version: "v1",
name: "Service",
},
{
namespace: istioNamespace,
group: "apps",
version: "v1",
name: "Deployments",
},
{
namespace: istioNamespace,
version: "v1",
name: "ConfigMap",
},
{
group: "admissionregistration.k8s.io",
version: "v1",
name: "MutatingWebhookConfiguration",
},
{
group: "admissionregistration.k8s.io",
version: "v1",
name: "ValidatingWebhookConfiguration",
},
}
msgs := diag.Messages{}
for _, r := range Resources {
err := checkCanCreateResources(cli, r.namespace, r.group, r.version, r.name)
if err != nil {
msgs.Add(msg.NewInsufficientPermissions(&resource.Instance{Origin: clusterOrigin{}}, r.name, err.Error()))
}
}
return msgs
}
func checkCanCreateResources(c kube.CLIClient, namespace, group, version, name string) error {
s := &authorizationapi.SelfSubjectAccessReview{
Spec: authorizationapi.SelfSubjectAccessReviewSpec{
ResourceAttributes: &authorizationapi.ResourceAttributes{
Namespace: namespace,
Verb: "create",
Group: group,
Version: version,
Resource: name,
},
},
}
response, err := c.Kube().AuthorizationV1().SelfSubjectAccessReviews().Create(context.Background(), s, metav1.CreateOptions{})
if err != nil {
return err
}
if !response.Status.Allowed {
if len(response.Status.Reason) > 0 {
return errors.New(response.Status.Reason)
}
return errors.New("permission denied")
}
return nil
}
func checkServerVersion(cli kube.CLIClient) (diag.Messages, error) {
v, err := cli.GetKubernetesVersion()
if err != nil {
return nil, fmt.Errorf("failed to get the Kubernetes version: %v", err)
}
compatible, err := k8sversion.CheckKubernetesVersion(v)
if err != nil {
return nil, err
}
if !compatible {
return []diag.Message{
msg.NewUnsupportedKubernetesVersion(&resource.Instance{Origin: clusterOrigin{}}, v.String(), fmt.Sprintf("1.%d", k8sversion.MinK8SVersion)),
}, nil
}
return nil, nil
}
func checkDataPlane(cli kube.CLIClient, namespace string) (diag.Messages, error) {
msgs := diag.Messages{}
m, err := checkListeners(cli, namespace)
if err != nil {
return nil, err
}
msgs = append(msgs, m...)
// TODO: add more checks
return msgs, nil
}
var networkingChanges, _ = goversion.NewSemver("1.10.0")
func fromLegacyNetworkingVersion(pod v1.Pod) bool {
for _, c := range pod.Spec.Containers {
if c.Name != "istio-proxy" {
continue
}
_, tag, | msgs = append(msgs, gwMsg...) | random_line_split |
precheck.go | name: "ValidatingWebhookConfiguration",
},
}
msgs := diag.Messages{}
for _, r := range Resources {
err := checkCanCreateResources(cli, r.namespace, r.group, r.version, r.name)
if err != nil {
msgs.Add(msg.NewInsufficientPermissions(&resource.Instance{Origin: clusterOrigin{}}, r.name, err.Error()))
}
}
return msgs
}
func checkCanCreateResources(c kube.CLIClient, namespace, group, version, name string) error {
s := &authorizationapi.SelfSubjectAccessReview{
Spec: authorizationapi.SelfSubjectAccessReviewSpec{
ResourceAttributes: &authorizationapi.ResourceAttributes{
Namespace: namespace,
Verb: "create",
Group: group,
Version: version,
Resource: name,
},
},
}
response, err := c.Kube().AuthorizationV1().SelfSubjectAccessReviews().Create(context.Background(), s, metav1.CreateOptions{})
if err != nil {
return err
}
if !response.Status.Allowed {
if len(response.Status.Reason) > 0 {
return errors.New(response.Status.Reason)
}
return errors.New("permission denied")
}
return nil
}
func checkServerVersion(cli kube.CLIClient) (diag.Messages, error) {
v, err := cli.GetKubernetesVersion()
if err != nil {
return nil, fmt.Errorf("failed to get the Kubernetes version: %v", err)
}
compatible, err := k8sversion.CheckKubernetesVersion(v)
if err != nil {
return nil, err
}
if !compatible {
return []diag.Message{
msg.NewUnsupportedKubernetesVersion(&resource.Instance{Origin: clusterOrigin{}}, v.String(), fmt.Sprintf("1.%d", k8sversion.MinK8SVersion)),
}, nil
}
return nil, nil
}
func checkDataPlane(cli kube.CLIClient, namespace string) (diag.Messages, error) {
msgs := diag.Messages{}
m, err := checkListeners(cli, namespace)
if err != nil {
return nil, err
}
msgs = append(msgs, m...)
// TODO: add more checks
return msgs, nil
}
var networkingChanges, _ = goversion.NewSemver("1.10.0")
func fromLegacyNetworkingVersion(pod v1.Pod) bool {
for _, c := range pod.Spec.Containers {
if c.Name != "istio-proxy" {
continue
}
_, tag, _ := strings.Cut(c.Image, ":")
ver, err := pkgversion.TagToVersionString(tag)
if err != nil {
return true // If we aren't sure, default to doing more checks than needed
}
sv, err := goversion.NewSemver(ver)
if err != nil {
return true // If we aren't sure, default to doing more checks than needed
}
return sv.LessThan(networkingChanges)
}
return false
}
// checkListeners checks for workloads that would be broken by https://istio.io/latest/blog/2021/upcoming-networking-changes/
func checkListeners(cli kube.CLIClient, namespace string) (diag.Messages, error) {
pods, err := cli.Kube().CoreV1().Pods(namespace).List(context.Background(), metav1.ListOptions{
// Find all running pods
FieldSelector: "status.phase=Running",
// Find all injected pods. We don't care about non-injected pods, because the new behavior
// mirrors Kubernetes; this is only a breaking change for existing Istio users.
LabelSelector: "security.istio.io/tlsMode=istio",
})
if err != nil {
return nil, err
}
var messages diag.Messages = make([]diag.Message, 0)
g := errgroup.Group{}
sem := semaphore.NewWeighted(25)
for _, pod := range pods.Items {
pod := pod
if !fromLegacyNetworkingVersion(pod) {
// Skip check. This pod is already on a version where the change has been made; if they were going
// to break they would already be broken.
continue
}
g.Go(func() error {
_ = sem.Acquire(context.Background(), 1)
defer sem.Release(1)
// Fetch list of all clusters to get which ports we care about
resp, err := cli.EnvoyDo(context.Background(), pod.Name, pod.Namespace, "GET", "config_dump?resource=dynamic_active_clusters&mask=cluster.name")
if err != nil {
fmt.Println("failed to get config dump: ", err)
return nil
}
ports, err := extractInboundPorts(resp)
if err != nil {
fmt.Println("failed to get ports: ", err)
return nil
}
// Next, look at what ports the pod is actually listening on
// This requires parsing the output from ss; the version we use doesn't support JSON
out, _, err := cli.PodExec(pod.Name, pod.Namespace, "istio-proxy", "ss -ltnH")
if err != nil {
if strings.Contains(err.Error(), "executable file not found") {
// Likely distroless or other custom build without ss. Nothing we can do here...
return nil
}
fmt.Println("failed to get listener state: ", err)
return nil
}
for _, ss := range strings.Split(out, "\n") {
if len(ss) == 0 {
continue
}
bind, port, err := net.SplitHostPort(getColumn(ss, 3))
if err != nil {
fmt.Println("failed to get parse state: ", err)
continue
}
ip, _ := netip.ParseAddr(bind)
portn, _ := strconv.Atoi(port)
if _, f := ports[portn]; f {
c := ports[portn]
if bind == "" {
continue
} else if bind == "*" || ip.IsUnspecified() {
c.Wildcard = true
} else if ip.IsLoopback() {
c.Lo = true
} else {
c.Explicit = true
}
ports[portn] = c
}
}
origin := &kube3.Origin{
Type: gvk.Pod,
FullName: resource.FullName{
Namespace: resource.Namespace(pod.Namespace),
Name: resource.LocalName(pod.Name),
},
ResourceVersion: resource.Version(pod.ResourceVersion),
}
for port, status := range ports {
// Binding to localhost no longer works out of the box on Istio 1.10+, give them a warning.
if status.Lo {
messages.Add(msg.NewLocalhostListener(&resource.Instance{Origin: origin}, fmt.Sprint(port)))
}
}
return nil
})
}
if err := g.Wait(); err != nil {
return nil, err
}
return messages, nil
}
func getColumn(line string, col int) string {
res := []byte{}
prevSpace := false
for _, c := range line {
if col < 0 {
return string(res)
}
if c == ' ' {
if !prevSpace {
col--
}
prevSpace = true
continue
}
prevSpace = false
if col == 0 {
res = append(res, byte(c))
}
}
return string(res)
}
func extractInboundPorts(configdump []byte) (map[int]bindStatus, error) {
ports := map[int]bindStatus{}
cd := &admin.ConfigDump{}
if err := protomarshal.Unmarshal(configdump, cd); err != nil {
return nil, err
}
for _, cdump := range cd.Configs {
clw := &admin.ClustersConfigDump_DynamicCluster{}
if err := cdump.UnmarshalTo(clw); err != nil {
return nil, err
}
cl := &cluster.Cluster{}
if err := clw.Cluster.UnmarshalTo(cl); err != nil {
return nil, err
}
dir, _, _, port := model.ParseSubsetKey(cl.Name)
if dir == model.TrafficDirectionInbound {
ports[port] = bindStatus{}
}
}
return ports, nil
}
type bindStatus struct {
Lo bool
Wildcard bool
Explicit bool
}
func (b bindStatus) Any() bool {
return b.Lo || b.Wildcard || b.Explicit
}
func (b bindStatus) String() string {
res := []string{}
if b.Lo {
res = append(res, "Localhost")
}
if b.Wildcard {
res = append(res, "Wildcard")
}
if b.Explicit {
res = append(res, "Explicit")
}
if len(res) == 0 {
return "Unknown"
}
return strings.Join(res, ", ")
}
// clusterOrigin defines an Origin that refers to the cluster
type clusterOrigin struct{}
func (o clusterOrigin) String() string {
return ""
}
func (o clusterOrigin) FriendlyName() string {
return "Cluster"
}
func (o clusterOrigin) Comparator() string {
return o.FriendlyName()
}
func (o clusterOrigin) Namespace() resource.Namespace {
return ""
}
func (o clusterOrigin) Ref | erence() | identifier_name |
|
precheck.go | namespace: istioNamespace,
group: "rbac.authorization.k8s.io",
version: "v1",
name: "Role",
},
{
namespace: istioNamespace,
version: "v1",
name: "ServiceAccount",
},
{
namespace: istioNamespace,
version: "v1",
name: "Service",
},
{
namespace: istioNamespace,
group: "apps",
version: "v1",
name: "Deployments",
},
{
namespace: istioNamespace,
version: "v1",
name: "ConfigMap",
},
{
group: "admissionregistration.k8s.io",
version: "v1",
name: "MutatingWebhookConfiguration",
},
{
group: "admissionregistration.k8s.io",
version: "v1",
name: "ValidatingWebhookConfiguration",
},
}
msgs := diag.Messages{}
for _, r := range Resources {
err := checkCanCreateResources(cli, r.namespace, r.group, r.version, r.name)
if err != nil {
msgs.Add(msg.NewInsufficientPermissions(&resource.Instance{Origin: clusterOrigin{}}, r.name, err.Error()))
}
}
return msgs
}
func checkCanCreateResources(c kube.CLIClient, namespace, group, version, name string) error {
s := &authorizationapi.SelfSubjectAccessReview{
Spec: authorizationapi.SelfSubjectAccessReviewSpec{
ResourceAttributes: &authorizationapi.ResourceAttributes{
Namespace: namespace,
Verb: "create",
Group: group,
Version: version,
Resource: name,
},
},
}
response, err := c.Kube().AuthorizationV1().SelfSubjectAccessReviews().Create(context.Background(), s, metav1.CreateOptions{})
if err != nil {
return err
}
if !response.Status.Allowed {
if len(response.Status.Reason) > 0 {
return errors.New(response.Status.Reason)
}
return errors.New("permission denied")
}
return nil
}
func checkServerVersion(cli kube.CLIClient) (diag.Messages, error) {
v, err := cli.GetKubernetesVersion()
if err != nil {
return nil, fmt.Errorf("failed to get the Kubernetes version: %v", err)
}
compatible, err := k8sversion.CheckKubernetesVersion(v)
if err != nil {
return nil, err
}
if !compatible {
return []diag.Message{
msg.NewUnsupportedKubernetesVersion(&resource.Instance{Origin: clusterOrigin{}}, v.String(), fmt.Sprintf("1.%d", k8sversion.MinK8SVersion)),
}, nil
}
return nil, nil
}
func checkDataPlane(cli kube.CLIClient, namespace string) (diag.Messages, error) {
msgs := diag.Messages{}
m, err := checkListeners(cli, namespace)
if err != nil {
return nil, err
}
msgs = append(msgs, m...)
// TODO: add more checks
return msgs, nil
}
var networkingChanges, _ = goversion.NewSemver("1.10.0")
func fromLegacyNetworkingVersion(pod v1.Pod) bool {
for _, c := range pod.Spec.Containers {
if c.Name != "istio-proxy" {
continue
}
_, tag, _ := strings.Cut(c.Image, ":")
ver, err := pkgversion.TagToVersionString(tag)
if err != nil {
return true // If we aren't sure, default to doing more checks than needed
}
sv, err := goversion.NewSemver(ver)
if err != nil {
return true // If we aren't sure, default to doing more checks than needed
}
return sv.LessThan(networkingChanges)
}
return false
}
// checkListeners checks for workloads that would be broken by https://istio.io/latest/blog/2021/upcoming-networking-changes/
func checkListeners(cli kube.CLIClient, namespace string) (diag.Messages, error) {
pods, err := cli.Kube().CoreV1().Pods(namespace).List(context.Background(), metav1.ListOptions{
// Find all running pods
FieldSelector: "status.phase=Running",
// Find all injected pods. We don't care about non-injected pods, because the new behavior
// mirrors Kubernetes; this is only a breaking change for existing Istio users.
LabelSelector: "security.istio.io/tlsMode=istio",
})
if err != nil {
return nil, err
}
var messages diag.Messages = make([]diag.Message, 0)
g := errgroup.Group{}
sem := semaphore.NewWeighted(25)
for _, pod := range pods.Items {
pod := pod
if !fromLegacyNetworkingVersion(pod) {
// Skip check. This pod is already on a version where the change has been made; if they were going
// to break they would already be broken.
continue
}
g.Go(func() error {
_ = sem.Acquire(context.Background(), 1)
defer sem.Release(1)
// Fetch list of all clusters to get which ports we care about
resp, err := cli.EnvoyDo(context.Background(), pod.Name, pod.Namespace, "GET", "config_dump?resource=dynamic_active_clusters&mask=cluster.name")
if err != nil {
fmt.Println("failed to get config dump: ", err)
return nil
}
ports, err := extractInboundPorts(resp)
if err != nil {
fmt.Println("failed to get ports: ", err)
return nil
}
// Next, look at what ports the pod is actually listening on
// This requires parsing the output from ss; the version we use doesn't support JSON
out, _, err := cli.PodExec(pod.Name, pod.Namespace, "istio-proxy", "ss -ltnH")
if err != nil {
if strings.Contains(err.Error(), "executable file not found") {
// Likely distroless or other custom build without ss. Nothing we can do here...
return nil
}
fmt.Println("failed to get listener state: ", err)
return nil
}
for _, ss := range strings.Split(out, "\n") {
if len(ss) == 0 {
continue
}
bind, port, err := net.SplitHostPort(getColumn(ss, 3))
if err != nil {
fmt.Println("failed to get parse state: ", err)
continue
}
ip, _ := netip.ParseAddr(bind)
portn, _ := strconv.Atoi(port)
if _, f := ports[portn]; f {
c := ports[portn]
if bind == "" {
continue
} else if bind == "*" || ip.IsUnspecified() {
c.Wildcard = true
} else if ip.IsLoopback() {
c.Lo = true
} else {
c.Explicit = true
}
ports[portn] = c
}
}
origin := &kube3.Origin{
Type: gvk.Pod,
FullName: resource.FullName{
Namespace: resource.Namespace(pod.Namespace),
Name: resource.LocalName(pod.Name),
},
ResourceVersion: resource.Version(pod.ResourceVersion),
}
for port, status := range ports {
// Binding to localhost no longer works out of the box on Istio 1.10+, give them a warning.
if status.Lo {
messages.Add(msg.NewLocalhostListener(&resource.Instance{Origin: origin}, fmt.Sprint(port)))
}
}
return nil
})
}
if err := g.Wait(); err != nil {
return nil, err
}
return messages, nil
}
func getColumn(line string, col int) string {
res := []byte{}
prevSpace := false
for _, c := range line {
if col < 0 {
return string(res)
}
if c == ' ' {
if !prevSpace {
col--
}
prevSpace = true
continue
}
prevSpace = false
if col == 0 {
res = append(res, byte(c))
}
}
return string(res)
}
func extractInboundPorts(configdump []byte) (map[int]bindStatus, error) {
| ports := map[int]bindStatus{}
cd := &admin.ConfigDump{}
if err := protomarshal.Unmarshal(configdump, cd); err != nil {
return nil, err
}
for _, cdump := range cd.Configs {
clw := &admin.ClustersConfigDump_DynamicCluster{}
if err := cdump.UnmarshalTo(clw); err != nil {
return nil, err
}
cl := &cluster.Cluster{}
if err := clw.Cluster.UnmarshalTo(cl); err != nil {
return nil, err
}
dir, _, _, port := model.ParseSubsetKey(cl.Name)
if dir == model.TrafficDirectionInbound {
ports[port] = bindStatus{}
}
}
return ports, nil | identifier_body |
|
precheck.go | nil, err
}
sa.AddRunningKubeSource(cli)
cancel := make(chan struct{})
result, err := sa.Analyze(cancel)
if err != nil {
return nil, err
}
if result.Messages != nil {
msgs = append(msgs, result.Messages...)
}
return msgs, nil
}
// Checks that if the user has gateway APIs, they are the minimum version.
// It is ok to not have them, but they must be at least v1beta1 if they do.
func checkGatewayAPIs(cli kube.CLIClient) (diag.Messages, error) {
msgs := diag.Messages{}
res, err := cli.Ext().ApiextensionsV1().CustomResourceDefinitions().List(context.Background(), metav1.ListOptions{})
if err != nil {
return nil, err
}
betaKinds := sets.New(gvk.KubernetesGateway.Kind, gvk.GatewayClass.Kind, gvk.HTTPRoute.Kind, gvk.ReferenceGrant.Kind)
for _, r := range res.Items {
if r.Spec.Group != gvk.KubernetesGateway.Group {
continue
}
if !betaKinds.Contains(r.Spec.Names.Kind) {
continue
}
versions := extractCRDVersions(&r)
has := "none"
if len(versions) > 0 {
has = strings.Join(sets.SortedList(versions), ",")
}
if !versions.Contains(gvk.KubernetesGateway.Version) {
origin := kube3.Origin{
Type: gvk.CustomResourceDefinition,
FullName: resource.FullName{
Namespace: resource.Namespace(r.Namespace),
Name: resource.LocalName(r.Name),
},
ResourceVersion: resource.Version(r.ResourceVersion),
}
r := &resource.Instance{
Origin: &origin,
}
msgs.Add(msg.NewUnsupportedGatewayAPIVersion(r, has, gvk.KubernetesGateway.Version))
}
}
return msgs, nil
}
func extractCRDVersions(r *crd.CustomResourceDefinition) sets.String {
res := sets.New[string]()
for _, v := range r.Spec.Versions {
if v.Served {
res.Insert(v.Name)
}
}
return res
}
func checkInstallPermissions(cli kube.CLIClient, istioNamespace string) diag.Messages {
Resources := []struct {
namespace string
group string
version string
name string
}{
{
version: "v1",
name: "Namespace",
},
{
namespace: istioNamespace,
group: "rbac.authorization.k8s.io",
version: "v1",
name: "ClusterRole",
},
{
namespace: istioNamespace,
group: "rbac.authorization.k8s.io",
version: "v1",
name: "ClusterRoleBinding",
},
{
namespace: istioNamespace,
group: "apiextensions.k8s.io",
version: "v1",
name: "CustomResourceDefinition",
},
{
namespace: istioNamespace,
group: "rbac.authorization.k8s.io",
version: "v1",
name: "Role",
},
{
namespace: istioNamespace,
version: "v1",
name: "ServiceAccount",
},
{
namespace: istioNamespace,
version: "v1",
name: "Service",
},
{
namespace: istioNamespace,
group: "apps",
version: "v1",
name: "Deployments",
},
{
namespace: istioNamespace,
version: "v1",
name: "ConfigMap",
},
{
group: "admissionregistration.k8s.io",
version: "v1",
name: "MutatingWebhookConfiguration",
},
{
group: "admissionregistration.k8s.io",
version: "v1",
name: "ValidatingWebhookConfiguration",
},
}
msgs := diag.Messages{}
for _, r := range Resources {
err := checkCanCreateResources(cli, r.namespace, r.group, r.version, r.name)
if err != nil {
msgs.Add(msg.NewInsufficientPermissions(&resource.Instance{Origin: clusterOrigin{}}, r.name, err.Error()))
}
}
return msgs
}
func checkCanCreateResources(c kube.CLIClient, namespace, group, version, name string) error {
s := &authorizationapi.SelfSubjectAccessReview{
Spec: authorizationapi.SelfSubjectAccessReviewSpec{
ResourceAttributes: &authorizationapi.ResourceAttributes{
Namespace: namespace,
Verb: "create",
Group: group,
Version: version,
Resource: name,
},
},
}
response, err := c.Kube().AuthorizationV1().SelfSubjectAccessReviews().Create(context.Background(), s, metav1.CreateOptions{})
if err != nil {
return err
}
if !response.Status.Allowed {
if len(response.Status.Reason) > 0 {
return errors.New(response.Status.Reason)
}
return errors.New("permission denied")
}
return nil
}
func checkServerVersion(cli kube.CLIClient) (diag.Messages, error) {
v, err := cli.GetKubernetesVersion()
if err != nil {
return nil, fmt.Errorf("failed to get the Kubernetes version: %v", err)
}
compatible, err := k8sversion.CheckKubernetesVersion(v)
if err != nil {
return nil, err
}
if !compatible {
return []diag.Message{
msg.NewUnsupportedKubernetesVersion(&resource.Instance{Origin: clusterOrigin{}}, v.String(), fmt.Sprintf("1.%d", k8sversion.MinK8SVersion)),
}, nil
}
return nil, nil
}
func checkDataPlane(cli kube.CLIClient, namespace string) (diag.Messages, error) {
msgs := diag.Messages{}
m, err := checkListeners(cli, namespace)
if err != nil {
return nil, err
}
msgs = append(msgs, m...)
// TODO: add more checks
return msgs, nil
}
var networkingChanges, _ = goversion.NewSemver("1.10.0")
func fromLegacyNetworkingVersion(pod v1.Pod) bool {
for _, c := range pod.Spec.Containers {
if c.Name != "istio-proxy" {
continue
}
_, tag, _ := strings.Cut(c.Image, ":")
ver, err := pkgversion.TagToVersionString(tag)
if err != nil {
return true // If we aren't sure, default to doing more checks than needed
}
sv, err := goversion.NewSemver(ver)
if err != nil {
return true // If we aren't sure, default to doing more checks than needed
}
return sv.LessThan(networkingChanges)
}
return false
}
// checkListeners checks for workloads that would be broken by https://istio.io/latest/blog/2021/upcoming-networking-changes/
func checkListeners(cli kube.CLIClient, namespace string) (diag.Messages, error) {
pods, err := cli.Kube().CoreV1().Pods(namespace).List(context.Background(), metav1.ListOptions{
// Find all running pods
FieldSelector: "status.phase=Running",
// Find all injected pods. We don't care about non-injected pods, because the new behavior
// mirrors Kubernetes; this is only a breaking change for existing Istio users.
LabelSelector: "security.istio.io/tlsMode=istio",
})
if err != nil {
return nil, err
}
var messages diag.Messages = make([]diag.Message, 0)
g := errgroup.Group{}
sem := semaphore.NewWeighted(25)
for _, pod := range pods.Items {
pod := pod
if !fromLegacyNetworkingVersion(pod) {
// Skip check. This pod is already on a version where the change has been made; if they were going
// to break they would already be broken.
continue
}
g.Go(func() error {
_ = sem.Acquire(context.Background(), 1)
defer sem.Release(1)
// Fetch list of all clusters to get which ports we care about
resp, err := cli.EnvoyDo(context.Background(), pod.Name, pod.Namespace, "GET", "config_dump?resource=dynamic_active_clusters&mask=cluster.name")
if err != nil {
fmt.Println("failed to get config dump: ", err)
return nil
}
ports, err := extractInboundPorts(resp)
if err != nil {
fmt.Println("failed to get ports: ", err)
return nil
}
// Next, look at what ports the pod is actually listening on
// This requires parsing the output from ss; the version we use doesn't support JSON
out, _, err := cli.PodExec(pod.Name, pod.Namespace, "istio-proxy", "ss -ltnH")
if err != nil {
if strings.Contains(err.Error(), "executable file not found") {
| // Likely distroless or other custom build without ss. Nothing we can do here...
return nil
}
| conditional_block |
|
river.rs | InverseLock,
YoungSense,
Switch,
YoungSwitch,
Narrows,
AppendUp,
YoungRangeSense,
Net,
ForceDown,
ForceUp,
Spawn,
PowerInvert,
Current,
Bridge,
Split,
RangeSwitch,
YoungRangeSwitch,
}
impl NodeType {
pub fn from_name(name: &str) -> NodeType {
// unimplemented!();
use self::NodeType::*;
match &name.to_lowercase()[..] {
"hatchery" => Hatchery,
"hydro. power" => HydroPower,
"snowmelt" => Snowmelt,
"shallows" => Shallows(2),
"rapids" => Rapids(2),
"append. down" => AppendDown,
"bear" => Bear,
"force. field" => ForceField,
"sense" => Sense,
"clone" => Clone,
"young bear" => YoungBear,
"bird" => Bird,
"upstream. killing. device" => UpstreamKillingDevice,
"waterfall" => Waterfall,
"universe" => Universe,
"powers" => Powers,
"marshy" => Marshy,
"insulated" => Insulted,
"upstream. sense" => UpstreamSense,
"downstream. sense" => DownstreamSense,
"evaporates" => Evaporates,
"youth. fountain" => YouthFountain,
"oblivion" => Oblivion,
"pump" => Pump,
"range. sense" => RangeSense,
"fear" => Fear,
"reverse. up" => ReverseUp,
"reverse. down" => ReverseDown,
"time" => Time,
"lock" => Lock,
"inverse. lock" => InverseLock,
"young. sense" => YoungSense,
"switch" => Switch,
"young. switch" => YoungSwitch,
"narrows" => Narrows,
"append. up" => AppendUp,
"young. range. sense" => YoungRangeSense,
"net" => Net,
"force. down" => ForceDown,
"force. up" => ForceUp,
"spawn" => Spawn,
"power. invert" => PowerInvert,
"current" => Current,
"bridge" => Bridge,
"split" => Split,
"range. switch" => RangeSwitch,
"young. range. switch" => YoungRangeSwitch,
_ => Other(name.to_owned()),
}
}
}
#[derive(Debug)]
pub struct Node<'a, 'b> {
pub name: &'b str,
pub node_type: NodeType,
pub parent: Weak<RefCell<Node<'a, 'b>>>,
pub children: Vec<Rc<RefCell<Node<'a, 'b>>>>,
pub salmon: Vec<Salmon<'a>>,
pub block_salmon: bool,
pub very_block_salmon: bool,
pub powered: bool,
pub block_power: bool,
pub watered: bool,
pub block_water: bool,
pub snowy: bool,
pub block_snow: bool,
pub destroyed: bool,
}
impl<'a, 'b> Node<'a, 'b> {
pub fn new(name: &'b str) -> Node<'a, 'b> {
let node = Node {
name,
node_type: NodeType::from_name(name),
parent: Weak::new(),
children: vec![],
salmon: vec![],
block_salmon: false,
very_block_salmon: false,
powered: false,
block_power: false,
watered: false,
block_water: false,
snowy: false,
block_snow: false,
destroyed: false,
};
node.init()
}
fn init(mut self) -> Node<'a, 'b> {
use self::NodeType::*;
match &self.node_type {
&Snowmelt => self.snowy = true,
&Powers => self.powered = true,
_ => (),
}
self
}
pub fn borrow_child(&self, n: usize) -> Ref<Node<'a, 'b>> {
self.children[n].borrow()
}
pub fn borrow_mut_child(&self, n: usize) -> RefMut<Node<'a, 'b>> {
self.children[n].borrow_mut()
}
pub fn add_child(&mut self, child: Rc<RefCell<Node<'a, 'b>>>) {
self.children.push(child);
}
pub fn add_salmon(&mut self, salmon: Salmon<'a>) {
self.salmon.push(salmon);
}
// Returns the index of the child that would lead to the node
// with a name of `name`.
pub fn find_node_path(&self, name: &str) -> Option<usize> {
(0..self.children.len()).position(|i|
self.borrow_child(i).find_node(name)
)
}
// This is supposed to use an in-order search, but that doesn't
// really make sense for an n-ary tree...
// This will at least be in-order for any nodes with <= 2 children.
fn find_node(&self, name: &str) -> bool {
let len = self.children.len();
if len > 0 {
match self.borrow_child(0).find_node(name) {
true => return true,
false => (),
}
}
if self.name == name { return true; }
if len > 1 {
for i in 1..len {
match self.borrow_child(i).find_node(name) {
true => return true,
false => (),
}
}
}
false
}
// something to move fish up and down stream
pub fn move_salmon(&mut self, direction: Direction) {
match &mut self.node_type {
&mut NodeType::Shallows(ref mut i) =>
if *i > 0 {
*i -= 1;
return
},
&mut NodeType::Rapids(ref mut i) =>
if *i > 0 {
*i -= 1;
return
},
_ => (),
}
match direction {
Direction::Downstream => {
match self.parent.upgrade() {
Some(p) => {
// Use `Vec::drain_filter` when once it stabilizes: https://doc.rust-lang.org/std/vec/struct.Vec.html#method.drain_filter
let mut p = p.borrow_mut();
let mut i = 0;
while i != self.salmon.len() {
if self.salmon[i].direction == Direction::Downstream {
let s = self.salmon.remove(i);
p.salmon.push(s);
} else {
i += 1;
}
}
},
None => {
for s in &self.salmon {
if s.direction == Direction::Downstream {
print!("{}", s.name);
}
}
self.salmon.retain(|s| s.direction != Direction::Downstream);
},
}
},
Direction::Upstream => {
if self.block_salmon { return }
// `Vec::drain_filter` could probably be used here too
let mut i = 0;
while i != self.salmon.len() {
if self.salmon[i].direction == Direction::Upstream {
let idx = match self.find_node_path(self.salmon[i].name) {
Some(idx) if !self.borrow_child(idx).very_block_salmon
=> Some(idx),
_ => self.children.iter().position(|c| !c.borrow().very_block_salmon),
};
match idx {
Some(idx) => {
let s = self.salmon.remove(i);
self.borrow_mut_child(idx).salmon.push(s);
},
None => i += 1,
}
} else {
i += 1;
}
}
},
}
}
pub fn tick(&mut self, tick: Tick) {
use tick::PropagationOrder::*;
match tick.propagation_order() {
PostOrder => {
for i in 0..self.children.len() {
self.borrow_mut_child(i).tick(tick);
}
self.run_tick(tick);
},
PreOrder => {
self.run_tick(tick);
for i in 0..self.children.len() {
self.borrow_mut_child(i).tick(tick);
}
},
_ => unimplemented!(),
}
}
// TODO: rewrite this, it's crap
// I don't like this inside of Node... (or do I...?)
fn run_tick(&mut self, tick: Tick) {
use self::NodeType::*;
use tick::Tick::*;
match (tick, &self.node_type) {
(Snow, _) => {
for i in 0..self.children.len() {
if self.borrow_child(i).snowy {
self.become_snowy();
break;
}
}
},
(Water, _) => {
for i in 0..self.children.len() {
if self.borrow_child(i).watered {
self.become_watered();
break;
}
}
},
(Power, &HydroPower) => self.powered = self.watered,
(FishDown, _) => self.move_salmon(Direction:: | random_line_split |
||
river.rs | allows(2),
"rapids" => Rapids(2),
"append. down" => AppendDown,
"bear" => Bear,
"force. field" => ForceField,
"sense" => Sense,
"clone" => Clone,
"young bear" => YoungBear,
"bird" => Bird,
"upstream. killing. device" => UpstreamKillingDevice,
"waterfall" => Waterfall,
"universe" => Universe,
"powers" => Powers,
"marshy" => Marshy,
"insulated" => Insulted,
"upstream. sense" => UpstreamSense,
"downstream. sense" => DownstreamSense,
"evaporates" => Evaporates,
"youth. fountain" => YouthFountain,
"oblivion" => Oblivion,
"pump" => Pump,
"range. sense" => RangeSense,
"fear" => Fear,
"reverse. up" => ReverseUp,
"reverse. down" => ReverseDown,
"time" => Time,
"lock" => Lock,
"inverse. lock" => InverseLock,
"young. sense" => YoungSense,
"switch" => Switch,
"young. switch" => YoungSwitch,
"narrows" => Narrows,
"append. up" => AppendUp,
"young. range. sense" => YoungRangeSense,
"net" => Net,
"force. down" => ForceDown,
"force. up" => ForceUp,
"spawn" => Spawn,
"power. invert" => PowerInvert,
"current" => Current,
"bridge" => Bridge,
"split" => Split,
"range. switch" => RangeSwitch,
"young. range. switch" => YoungRangeSwitch,
_ => Other(name.to_owned()),
}
}
}
#[derive(Debug)]
pub struct Node<'a, 'b> {
pub name: &'b str,
pub node_type: NodeType,
pub parent: Weak<RefCell<Node<'a, 'b>>>,
pub children: Vec<Rc<RefCell<Node<'a, 'b>>>>,
pub salmon: Vec<Salmon<'a>>,
pub block_salmon: bool,
pub very_block_salmon: bool,
pub powered: bool,
pub block_power: bool,
pub watered: bool,
pub block_water: bool,
pub snowy: bool,
pub block_snow: bool,
pub destroyed: bool,
}
impl<'a, 'b> Node<'a, 'b> {
pub fn new(name: &'b str) -> Node<'a, 'b> {
let node = Node {
name,
node_type: NodeType::from_name(name),
parent: Weak::new(),
children: vec![],
salmon: vec![],
block_salmon: false,
very_block_salmon: false,
powered: false,
block_power: false,
watered: false,
block_water: false,
snowy: false,
block_snow: false,
destroyed: false,
};
node.init()
}
fn init(mut self) -> Node<'a, 'b> {
use self::NodeType::*;
match &self.node_type {
&Snowmelt => self.snowy = true,
&Powers => self.powered = true,
_ => (),
}
self
}
pub fn borrow_child(&self, n: usize) -> Ref<Node<'a, 'b>> {
self.children[n].borrow()
}
pub fn borrow_mut_child(&self, n: usize) -> RefMut<Node<'a, 'b>> {
self.children[n].borrow_mut()
}
pub fn add_child(&mut self, child: Rc<RefCell<Node<'a, 'b>>>) {
self.children.push(child);
}
pub fn add_salmon(&mut self, salmon: Salmon<'a>) {
self.salmon.push(salmon);
}
// Returns the index of the child that would lead to the node
// with a name of `name`.
pub fn find_node_path(&self, name: &str) -> Option<usize> {
(0..self.children.len()).position(|i|
self.borrow_child(i).find_node(name)
)
}
// This is supposed to use an in-order search, but that doesn't
// really make sense for an n-ary tree...
// This will at least be in-order for any nodes with <= 2 children.
fn find_node(&self, name: &str) -> bool {
let len = self.children.len();
if len > 0 {
match self.borrow_child(0).find_node(name) {
true => return true,
false => (),
}
}
if self.name == name { return true; }
if len > 1 {
for i in 1..len {
match self.borrow_child(i).find_node(name) {
true => return true,
false => (),
}
}
}
false
}
// something to move fish up and down stream
pub fn move_salmon(&mut self, direction: Direction) {
match &mut self.node_type {
&mut NodeType::Shallows(ref mut i) =>
if *i > 0 {
*i -= 1;
return
},
&mut NodeType::Rapids(ref mut i) =>
if *i > 0 {
*i -= 1;
return
},
_ => (),
}
match direction {
Direction::Downstream => {
match self.parent.upgrade() {
Some(p) => {
// Use `Vec::drain_filter` when once it stabilizes: https://doc.rust-lang.org/std/vec/struct.Vec.html#method.drain_filter
let mut p = p.borrow_mut();
let mut i = 0;
while i != self.salmon.len() {
if self.salmon[i].direction == Direction::Downstream {
let s = self.salmon.remove(i);
p.salmon.push(s);
} else {
i += 1;
}
}
},
None => {
for s in &self.salmon {
if s.direction == Direction::Downstream {
print!("{}", s.name);
}
}
self.salmon.retain(|s| s.direction != Direction::Downstream);
},
}
},
Direction::Upstream => {
if self.block_salmon { return }
// `Vec::drain_filter` could probably be used here too
let mut i = 0;
while i != self.salmon.len() {
if self.salmon[i].direction == Direction::Upstream {
let idx = match self.find_node_path(self.salmon[i].name) {
Some(idx) if !self.borrow_child(idx).very_block_salmon
=> Some(idx),
_ => self.children.iter().position(|c| !c.borrow().very_block_salmon),
};
match idx {
Some(idx) => {
let s = self.salmon.remove(i);
self.borrow_mut_child(idx).salmon.push(s);
},
None => i += 1,
}
} else {
i += 1;
}
}
},
}
}
pub fn tick(&mut self, tick: Tick) {
use tick::PropagationOrder::*;
match tick.propagation_order() {
PostOrder => {
for i in 0..self.children.len() {
self.borrow_mut_child(i).tick(tick);
}
self.run_tick(tick);
},
PreOrder => {
self.run_tick(tick);
for i in 0..self.children.len() {
self.borrow_mut_child(i).tick(tick);
}
},
_ => unimplemented!(),
}
}
// TODO: rewrite this, it's crap
// I don't like this inside of Node... (or do I...?)
fn run_tick(&mut self, tick: Tick) {
use self::NodeType::*;
use tick::Tick::*;
match (tick, &self.node_type) {
(Snow, _) => {
for i in 0..self.children.len() {
if self.borrow_child(i).snowy {
self.become_snowy();
break;
}
}
},
(Water, _) => {
for i in 0..self.children.len() {
if self.borrow_child(i).watered {
self.become_watered();
break;
}
}
},
(Power, &HydroPower) => self.powered = self.watered,
(FishDown, _) => self.move_salmon(Direction::Downstream),
(FishUp, _) => self.move_salmon(Direction::Upstream),
(FishHatch, &Hatchery) => if self.is_powered() {
self.add_salmon(Salmon {
age: Age::Mature,
direction: Direction::Upstream,
name: "homeless"
});
},
_ => (),
}
}
// TODO: I don't like this...
pub fn become_snowy(&mut self) | {
use self::NodeType::*;
self.snowy = true;
match self.node_type {
HydroPower => self.destroyed = true,
_ => (),
}
} | identifier_body |
|
river.rs | ,
ForceDown,
ForceUp,
Spawn,
PowerInvert,
Current,
Bridge,
Split,
RangeSwitch,
YoungRangeSwitch,
}
impl NodeType {
pub fn from_name(name: &str) -> NodeType {
// unimplemented!();
use self::NodeType::*;
match &name.to_lowercase()[..] {
"hatchery" => Hatchery,
"hydro. power" => HydroPower,
"snowmelt" => Snowmelt,
"shallows" => Shallows(2),
"rapids" => Rapids(2),
"append. down" => AppendDown,
"bear" => Bear,
"force. field" => ForceField,
"sense" => Sense,
"clone" => Clone,
"young bear" => YoungBear,
"bird" => Bird,
"upstream. killing. device" => UpstreamKillingDevice,
"waterfall" => Waterfall,
"universe" => Universe,
"powers" => Powers,
"marshy" => Marshy,
"insulated" => Insulted,
"upstream. sense" => UpstreamSense,
"downstream. sense" => DownstreamSense,
"evaporates" => Evaporates,
"youth. fountain" => YouthFountain,
"oblivion" => Oblivion,
"pump" => Pump,
"range. sense" => RangeSense,
"fear" => Fear,
"reverse. up" => ReverseUp,
"reverse. down" => ReverseDown,
"time" => Time,
"lock" => Lock,
"inverse. lock" => InverseLock,
"young. sense" => YoungSense,
"switch" => Switch,
"young. switch" => YoungSwitch,
"narrows" => Narrows,
"append. up" => AppendUp,
"young. range. sense" => YoungRangeSense,
"net" => Net,
"force. down" => ForceDown,
"force. up" => ForceUp,
"spawn" => Spawn,
"power. invert" => PowerInvert,
"current" => Current,
"bridge" => Bridge,
"split" => Split,
"range. switch" => RangeSwitch,
"young. range. switch" => YoungRangeSwitch,
_ => Other(name.to_owned()),
}
}
}
#[derive(Debug)]
pub struct Node<'a, 'b> {
pub name: &'b str,
pub node_type: NodeType,
pub parent: Weak<RefCell<Node<'a, 'b>>>,
pub children: Vec<Rc<RefCell<Node<'a, 'b>>>>,
pub salmon: Vec<Salmon<'a>>,
pub block_salmon: bool,
pub very_block_salmon: bool,
pub powered: bool,
pub block_power: bool,
pub watered: bool,
pub block_water: bool,
pub snowy: bool,
pub block_snow: bool,
pub destroyed: bool,
}
impl<'a, 'b> Node<'a, 'b> {
pub fn new(name: &'b str) -> Node<'a, 'b> {
let node = Node {
name,
node_type: NodeType::from_name(name),
parent: Weak::new(),
children: vec![],
salmon: vec![],
block_salmon: false,
very_block_salmon: false,
powered: false,
block_power: false,
watered: false,
block_water: false,
snowy: false,
block_snow: false,
destroyed: false,
};
node.init()
}
fn init(mut self) -> Node<'a, 'b> {
use self::NodeType::*;
match &self.node_type {
&Snowmelt => self.snowy = true,
&Powers => self.powered = true,
_ => (),
}
self
}
pub fn borrow_child(&self, n: usize) -> Ref<Node<'a, 'b>> {
self.children[n].borrow()
}
pub fn borrow_mut_child(&self, n: usize) -> RefMut<Node<'a, 'b>> {
self.children[n].borrow_mut()
}
pub fn add_child(&mut self, child: Rc<RefCell<Node<'a, 'b>>>) {
self.children.push(child);
}
pub fn add_salmon(&mut self, salmon: Salmon<'a>) {
self.salmon.push(salmon);
}
// Returns the index of the child that would lead to the node
// with a name of `name`.
pub fn find_node_path(&self, name: &str) -> Option<usize> {
(0..self.children.len()).position(|i|
self.borrow_child(i).find_node(name)
)
}
// This is supposed to use an in-order search, but that doesn't
// really make sense for an n-ary tree...
// This will at least be in-order for any nodes with <= 2 children.
fn | (&self, name: &str) -> bool {
let len = self.children.len();
if len > 0 {
match self.borrow_child(0).find_node(name) {
true => return true,
false => (),
}
}
if self.name == name { return true; }
if len > 1 {
for i in 1..len {
match self.borrow_child(i).find_node(name) {
true => return true,
false => (),
}
}
}
false
}
// something to move fish up and down stream
pub fn move_salmon(&mut self, direction: Direction) {
match &mut self.node_type {
&mut NodeType::Shallows(ref mut i) =>
if *i > 0 {
*i -= 1;
return
},
&mut NodeType::Rapids(ref mut i) =>
if *i > 0 {
*i -= 1;
return
},
_ => (),
}
match direction {
Direction::Downstream => {
match self.parent.upgrade() {
Some(p) => {
// Use `Vec::drain_filter` when once it stabilizes: https://doc.rust-lang.org/std/vec/struct.Vec.html#method.drain_filter
let mut p = p.borrow_mut();
let mut i = 0;
while i != self.salmon.len() {
if self.salmon[i].direction == Direction::Downstream {
let s = self.salmon.remove(i);
p.salmon.push(s);
} else {
i += 1;
}
}
},
None => {
for s in &self.salmon {
if s.direction == Direction::Downstream {
print!("{}", s.name);
}
}
self.salmon.retain(|s| s.direction != Direction::Downstream);
},
}
},
Direction::Upstream => {
if self.block_salmon { return }
// `Vec::drain_filter` could probably be used here too
let mut i = 0;
while i != self.salmon.len() {
if self.salmon[i].direction == Direction::Upstream {
let idx = match self.find_node_path(self.salmon[i].name) {
Some(idx) if !self.borrow_child(idx).very_block_salmon
=> Some(idx),
_ => self.children.iter().position(|c| !c.borrow().very_block_salmon),
};
match idx {
Some(idx) => {
let s = self.salmon.remove(i);
self.borrow_mut_child(idx).salmon.push(s);
},
None => i += 1,
}
} else {
i += 1;
}
}
},
}
}
pub fn tick(&mut self, tick: Tick) {
use tick::PropagationOrder::*;
match tick.propagation_order() {
PostOrder => {
for i in 0..self.children.len() {
self.borrow_mut_child(i).tick(tick);
}
self.run_tick(tick);
},
PreOrder => {
self.run_tick(tick);
for i in 0..self.children.len() {
self.borrow_mut_child(i).tick(tick);
}
},
_ => unimplemented!(),
}
}
// TODO: rewrite this, it's crap
// I don't like this inside of Node... (or do I...?)
fn run_tick(&mut self, tick: Tick) {
use self::NodeType::*;
use tick::Tick::*;
match (tick, &self.node_type) {
(Snow, _) => {
for i in 0..self.children.len() {
if self.borrow_child(i).snowy {
self.become_snowy();
break;
}
}
},
(Water, _) => {
for i in 0..self.children.len() {
if self.borrow_child(i).watered {
self.become_watered();
break;
}
}
},
(Power, &HydroPower) => self.powered = self.watered,
(FishDown, _) => self.move_salmon(Direction::Downstream),
(FishUp, _) => self.move_salmon(Direction::Upstream),
(FishHatch, &Hatchery) | find_node | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.