file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
scr.py | some threshold.
Ex: scr.load_pfile(pfile='ff_xp93s1sp0001.dat', filetype=['pfile' or 'matfile'])
Ex: Parse the text data into numbers.
scr.pfile.parse_pfile(case=[1,100])
Ex: Sync and set the run to have a timestep of 0.01 sec.
scr.pfile.sync(case=[1,100], tstep=0.01)
Ex: Sync and set the run to have an auto time step, defaults to 0.01 sec.
scr.pfile.sync(case=76, auto='yes')
Ex: Sync and set the run to have an auto time step with the times were force exists = 0.02 sec
scr.pfile.sync(case=76, auto='yes', tstep=0.02)
"""
pfile = kwargs['pfile']
filetype = kwargs['filetype']
# Loads the pfile and finds the indices, still need to sync and parse.
self.pfile = PFILE(pfile, filetype=filetype)
# self.pfile.sync(tstep='auto')
def load_zeta(self, **kwargs):
"""Method to load the damping file.
Ex: scr.load_zeta(damp='xp93s1/DAMPINGFILE')
"""
dampfile = kwargs['damp']
with open(dampfile) as f:
for line in f:
if line[0] != '$' and line[0] != 'i':
row = line.split()
row = list(map(float, row))
self.zeta[int(row[0] - 1)] = 0.01 * row[1]
def save2mat(self, outfile):
"""Method to save the scr object to a Matlab mat file.
Ex: scr.save2mat('xp93zz/sc_xp93zzsp0001.mat')
"""
from matlab.mat_utilities import save2mat
from matlab.mat_utilities import tuple2list as t2l
doflist = {'acron_dofs': t2l(self.ltm.acron_dofs)}
outlist = [self.eta, self.u, self.time, doflist]
keylist = ['eta', 'u', 'time', 'ltm']
save2mat(key=keylist, olist=outlist, ofile=outfile)
def plot_u(self, **kwargs):
"""Method to plot the response in the time domain.
Ex: Plot this dof for case 1 and 2, and label the window "u test"
scr.plot_u(items=[(1, 'N1PN3', 'TOR'), (2, 'N1PN3', 'TOR')], desc='u test')
"""
# Get the kwargs.
items = kwargs['items']
if type(items) is not list:
items = [items]
if 'desc' in kwargs.keys():
desc = kwargs['desc']
else:
desc = ''
# Loop and plot each requested dof.
fig = figure()
ax = subplot(111)
for item in items:
if item.__len__() != 3:
raise Exception('!!! You must supply (case, acron, dof) to plot !!!')
c = item[0]
if c not in self.u.keys():
raise Exception('!!! Case {0} is has not been run or does not exist !!!'.format(c))
dof = (item[1], item[2])
# Find the dof tuple in the acron_dof list or the dof list from the ltm object.
if dof in self.ltm.acron_dofs:
i_dof = self.ltm.acron_dofs.index(dof)
elif dof in self.ltm.dofs:
i_dof = self.ltm.dofs.index(dof)
else:
raise Exception("!!! DOF " + dof.__str__() + " not in LTM " + self.ltm.name)
# Plot the requested time history.
label = '({0}, {1}) case: {2}'.format(dof[0], dof[1], c)
ax.plot(self.time[c], self.u[c][i_dof, :], label=label)
ax.legend()
title('Response of FF: %s' % (self.pfile.name))
xlabel('Time (s)')
fig.canvas.set_window_title('{0} {1}'.format(self.name, desc))
show()
def plot_eta(self, **kwargs):
"""Method to plot the modal displacements.
Ex: Plot mode 7 for case 1 and case 100, and label the window "eta sp0001".
scr.plot_eta(items=[(1, 7), (100, 7)], desc='eta sp0001')
"""
# Get the kwargs.
items = kwargs['items']
if type(items) is not list:
items = [items]
if 'desc' in kwargs.keys():
desc = kwargs['desc']
else:
desc = ''
fig = plt.figure()
ax = plt.subplot(111)
for item in items:
c = item[0]
mode = item[1]
if mode > self.phi.num_modes:
raise Exception("!!! Only %s modes in analysis !!!" % self.phi.num_modes.__str__())
# Plot the requested modal displacement.
label = 'Mode {0} case: {1}'.format(mode, c)
ax.plot(self.time[c], self.eta[c][mode - 1, :], label=label)
ax.legend()
plt.title('Modal Response of FF: %s' % self.pfile.name)
plt.xlabel('Time (s)')
fig.canvas.set_window_title('{0} {1}'.format(self.name, desc))
plt.show()
def amx(self, **kwargs):
"""Method to find the max/mins for one or all output DOF.\n
Ex: Find the max/mins and times for this DOF in case 1.
scr.amx(item=(1, 'N2LAB', 'TOR'))
"""
# Determine the keyword arguments.
if 'item' in kwargs.keys():
item = kwargs['item']
if not type(item) is tuple:
raise Exception('Requested dof {0} is not a tuple (case, "acron", "dof").'.format(dof))
dof = (item[1], item[2])
case = item[0]
else:
raise Exception('You must request a dof: scr.amx(item=(case, "acron", "dof")).')
# Determine the location of the requested dof.
loc = [x for x, y in enumerate(self.ltm.acron_dofs) if y == dof][0]
# Determine the max/min and the time at which they occurred.
dof_res = self.u[case][loc, :]
max_val = np.max(dof_res)
min_val = np.min(dof_res)
max_loc = np.argmax(dof_res)
min_loc = np.argmin(dof_res)
max_time = self.time[case][max_loc]
min_time = self.time[case][min_loc]
# Print to the screen.
print('Case {0}- \t{1}\tMax: {2:.4f} (@ {3:.4f} sec)\tMin: {4:.4f} (@ {5:.4f} sec)\n'.format(
case, dof, max_val, max_time, min_val, min_time
))
def | (self, **kwargs):
"""Method to perform fft on a signal.
Ex: Plot fft of several responses.
scr.fft(u_out=[(1, 'SSSIEA', 'FX'), (1, 'SSSIEA', 'FY')])
Ex: Plot fft of several applied forces.
scr.fft(f_in=[(1, 100012, 1), (1, 100012, 2), (1, 100012, 3)])
"""
from PyLnD.loads.freq_domain import FFT
u_out = []
f_in = []
# Obtain the keyword arguments.
if 'u_out' in kwargs.keys():
u_out = kwargs['u_out']
if type(u_out) is not list:
u_out = [u_out]
if 'f_in' in kwargs.keys():
f_in = kwargs['f_in']
if type(f_in) is not list:
f_in = [f_in]
if 'desc' in kwargs.keys():
desc = kwargs['desc']
else:
desc = ''
# Loop, perform fft, and plot each requested response.
if u_out:
for resp in u_out:
if resp.__len__() != 3:
raise Exception('!!! You must supply (case, acron, dof) to plot !!!')
c = resp[0]
if c not in self.u.keys():
raise Exception('!!! Case {0} is has not been run or does not exist !!!'.format(c))
dof = (resp[1], resp[2])
# Find the dof tuple in the acron_dof list or the dof list from the ltm object.
| fft | identifier_name |
unauthorized-view.component.ts | Login:String;
public server = environment.server;
public allAccounts=0;
public orgId:String="";
public orgName:String="";
public totalLogins:Number=0;
public allUsers=[];
public lastLogins=[];
public barChartOptions:any = {
scaleShowVerticalLines: false,
responsive: true,
scales: {
yAxes: [{
type: "linear",
display: true,
position: "left",
id: "y-axis-1",
gridLines: {
display: false
},
scaleLabel: {
display: true,
labelString: 'Number of Logins'
},
labels: {
show: true
},
ticks: {
beginAtZero: true,
userCallback: function(label, index, labels) {
if (Math.floor(label) === label) {
return label;
}
},
}
}],
xAxes: [{
scaleLabel: {
display: true,
labelString: 'Users'
}
}]
}
};
public barChartOptions1:any = {
scaleShowVerticalLines: false,
responsive: true,
scales: {
yAxes: [{
type: "linear",
display: true,
position: "left",
id: "y-axis-1",
gridLines: {
display: false
},
scaleLabel: {
display: true,
labelString: 'Number of Users'
},
labels: {
show: true
},
ticks: {
beginAtZero: true,
userCallback: function(label, index, labels) {
if (Math.floor(label) === label) {
return label;
}
},
}
}],
xAxes: [{
scaleLabel: {
display: true,
labelString: 'Last Activity'
}
}]
}
};
public org_options=[];
public barChartLabels:string[] = [];
public barChartType:string = 'bar';
public barChartLegend:boolean = true;
public barChartData = [];
public barChartLabels1:string[] = [];
public barChartType1:string = 'bar';
public barChartLegend1:boolean = true;
public barChartData1:any[] = [
{data: [], label: ''}
];
public doughnutChartLabels:string[] = ['', ''];
public doughnutChartData:number[] = [];
public doughnutChartType:string = 'doughnut';
public doughnutChartColors: any[] = [{ backgroundColor: ["#88d753", "#ff5656","#ff8605", "#7aad02"] }];
public chartClicked(e:any):void |
public chartHovered(e:any):void {
console.log(e);
}
public populateData():void {
let that=this;
that.org_options=[];
var url='http://'+that.server+'/'+environment.rbacRoot+'/AccountUsers.php';
var obj={};
if (this.showAccounts!=true)
obj["orgId"]=this.uploadedService.getOrgId();
this.orgId = this.uploadedService.getOrgId();
that.http.post(url, JSON.stringify(obj), {
responseType: 'json'
}).map(response => {
var data=[];
for (var each in response){
if (response[each]["organization"]=="Ruckus Wireless"){
this.orgName = response[each]["organization"];
continue;
}
that.barChartLabels.push(response[each]["organization"]);
that.org_options.push([response[each]["id"],response[each]["organization"]]);
that.barChartData.push(parseInt(response[each]["users"]));
}
}).subscribe(re => {
console.log(re);
});
if (this.showAccounts==true){
that.http.post('http://'+that.server+'/'+environment.rbacRoot+'/listOrganizations.php', JSON.stringify({}), {
responseType: 'json'
}).map(response => {
var obj={};
var allIds={};
for (var k in response){
allIds[response[k]["id"]]=1;
if (response[k]["feature"]=="ADMIN")
continue;
else
{
if (obj.hasOwnProperty(response[k]["feature"])){
if (obj[response[k]["feature"]].indexOf(response[k]["id"])<0){
obj[response[k]["feature"]].push(response[k]["id"]);
}
}
else{
obj[response[k]["feature"]]=[response[k]["id"]];
}
}
}
if (Object.keys(allIds).length>0)
that.allAccounts=Object.keys(allIds).length-1;
else
that.allAccounts=0;
let clone = JSON.parse(JSON.stringify(this.doughnutChartData));
var ind=0;
for (var k in obj){
if (k=="SUPER USER")
continue;
that.doughnutChartLabels[ind]=k;
clone.push(obj[k].length-1);
ind++;
}
that.doughnutChartData=clone;
}).subscribe(res => {
console.log(res);
});
}
}
constructor(private uploadedService :UploadedFloorPlanService,private logger :LoggerService,private http: HttpClient,private authService: AuthService, private spinner: NgxSpinnerService, private router: Router) { }
ngOnInit() {
this.username = sessionStorage.getItem('username');
if (!this.uploadedService.getLoggedIn()) {
this.router.navigate(['/login']);
}
if (this.uploadedService.getLastLogin())
this.lastLogin = this.uploadedService.getLastLogin().toString();
this.totalLogins = this.uploadedService.getTotalLogins();
if (this.totalLogins==0) {
this.router.navigate(['/confirm']);
}
var that=this;
that.uploadedService.setShowAdmin(false);
that.uploadedService.setShowLBS(false);
that.uploadedService.setShowST(false);
that.uploadedService.setShowAccounts(false);
that.uploadedService.setAllowConf(false);
this.spinner.show();
that.http.post('http://'+that.server+'/'+environment.rbacRoot+'/listUserPermissions.php?user_id='+that.uploadedService.getUser(), JSON.stringify({}), {
responseType: 'json'
}).map(response => {
this.spinner.hide();
for (var each in response){
for (var k in response[each]){
if (k=="ADMIN"){
that.uploadedService.setShowAdmin(true);
that.showAdmin=true;
}
else if (k=="LBS"){
that.uploadedService.setShowLBS(true);
that.showLBS=true;
}
else if (k=="SUBSCRIBER TRACING"){
that.uploadedService.setShowST(true);
that.showST=true;
if (response[each][k]=="READ ONLY")
that.allowConf=false;
else if (response[each][k]=="READ/WRITE")
that.allowConf=true;
that.uploadedService.setAllowConf(that.allowConf);
}
else if (k=="SUPER USER"){
that.uploadedService.setShowAccounts(true);
that.showAccounts=true;
}
}
}
this.logger.log("LOGIN","", new Date().toUTCString(),"","SUCCESS",this.showAccounts,this.username as string,that.uploadedService.getRoleName() as string,"DASHBOARD",this.uploadedService.getOrgName() as string);
that.populateData();
let jQueryInstance=this;
$AB(document).ready(function(){
$AB(".hamburger").off('click').on('click',function(e){
e.preventDefault();
if ($AB(".panel-body").css("padding-left")=="100px"){
$AB(".sideMenu").slideToggle(function(){
$AB(".panel-body").css("padding-left","10px");
});
}
else{
$AB(".panel-body").css("padding-left","100px");
$AB(".sideMenu").slideToggle();
}
});
$AB(document).click(function(event) {
if (!$(event.target).hasClass('logout')) {
$(".logout").hide();
}
if (!$(event.target).hasClass('.slide-menu')) {
$AB(".slide-menu").css('width','0px');
$AB('.dropdown-submenu a.test').css('color','#888888');
}
});
$AB('.first-level > a.test').on("click", function(e){
$AB('.first-level > .dropdown-menu').hide();
});
$AB("#logoutBtn a").off('mouseover').on('mouseover',function(){
$AB(".logout").show();
});
that.fillGraphData(that.uploadedService.getOrgId());
$AB("#loginAttempts").css("display","block");
$AB("#lastActivityChart").css("display","block");
$AB("#listOrganizations").off('change').on('change',function(){
that.spinner.show();
that.fillGraphData($AB(this).find("option:selected").data('row'));
});
$AB('.dropdown-submenu a.test').on("click", function(e){
$AB("a.test").css("color","#888888");
$AB(".slide-menu").css('width','0px');
$AB(this).css("color","#fff");
$AB(this). | {
console.log(e);
console.log(event);
} | identifier_body |
unauthorized-view.component.ts | ins=[];
public barChartOptions:any = {
scaleShowVerticalLines: false,
responsive: true,
scales: {
yAxes: [{
type: "linear",
display: true,
position: "left",
id: "y-axis-1",
gridLines: {
display: false
},
scaleLabel: {
display: true,
labelString: 'Number of Logins'
},
labels: {
show: true
},
ticks: {
beginAtZero: true,
userCallback: function(label, index, labels) {
if (Math.floor(label) === label) {
return label;
}
},
}
}],
xAxes: [{
scaleLabel: {
display: true,
labelString: 'Users'
}
}]
}
};
public barChartOptions1:any = {
scaleShowVerticalLines: false,
responsive: true,
scales: {
yAxes: [{
type: "linear",
display: true,
position: "left",
id: "y-axis-1",
gridLines: {
display: false
},
scaleLabel: {
display: true,
labelString: 'Number of Users'
},
labels: {
show: true
},
ticks: {
beginAtZero: true,
userCallback: function(label, index, labels) {
if (Math.floor(label) === label) {
return label;
}
},
}
}],
xAxes: [{
scaleLabel: {
display: true,
labelString: 'Last Activity'
}
}]
}
};
public org_options=[];
public barChartLabels:string[] = [];
public barChartType:string = 'bar';
public barChartLegend:boolean = true;
public barChartData = [];
public barChartLabels1:string[] = [];
public barChartType1:string = 'bar';
public barChartLegend1:boolean = true;
public barChartData1:any[] = [
{data: [], label: ''}
];
public doughnutChartLabels:string[] = ['', ''];
public doughnutChartData:number[] = [];
public doughnutChartType:string = 'doughnut';
public doughnutChartColors: any[] = [{ backgroundColor: ["#88d753", "#ff5656","#ff8605", "#7aad02"] }];
public chartClicked(e:any):void {
console.log(e);
console.log(event);
}
public chartHovered(e:any):void {
console.log(e);
}
public populateData():void {
let that=this;
that.org_options=[];
var url='http://'+that.server+'/'+environment.rbacRoot+'/AccountUsers.php';
var obj={};
if (this.showAccounts!=true)
obj["orgId"]=this.uploadedService.getOrgId();
this.orgId = this.uploadedService.getOrgId();
that.http.post(url, JSON.stringify(obj), {
responseType: 'json'
}).map(response => {
var data=[];
for (var each in response){
if (response[each]["organization"]=="Ruckus Wireless"){
this.orgName = response[each]["organization"];
continue;
}
that.barChartLabels.push(response[each]["organization"]);
that.org_options.push([response[each]["id"],response[each]["organization"]]);
that.barChartData.push(parseInt(response[each]["users"]));
}
}).subscribe(re => {
console.log(re);
});
if (this.showAccounts==true){
that.http.post('http://'+that.server+'/'+environment.rbacRoot+'/listOrganizations.php', JSON.stringify({}), {
responseType: 'json'
}).map(response => {
var obj={};
var allIds={};
for (var k in response){
allIds[response[k]["id"]]=1;
if (response[k]["feature"]=="ADMIN")
continue;
else
{
if (obj.hasOwnProperty(response[k]["feature"])){
if (obj[response[k]["feature"]].indexOf(response[k]["id"])<0){
obj[response[k]["feature"]].push(response[k]["id"]);
}
}
else{
obj[response[k]["feature"]]=[response[k]["id"]];
}
}
}
if (Object.keys(allIds).length>0)
that.allAccounts=Object.keys(allIds).length-1;
else
that.allAccounts=0;
let clone = JSON.parse(JSON.stringify(this.doughnutChartData));
var ind=0;
for (var k in obj){
if (k=="SUPER USER")
continue;
that.doughnutChartLabels[ind]=k;
clone.push(obj[k].length-1);
ind++;
}
that.doughnutChartData=clone;
}).subscribe(res => {
console.log(res);
});
}
}
constructor(private uploadedService :UploadedFloorPlanService,private logger :LoggerService,private http: HttpClient,private authService: AuthService, private spinner: NgxSpinnerService, private router: Router) { }
ngOnInit() {
this.username = sessionStorage.getItem('username');
if (!this.uploadedService.getLoggedIn()) {
this.router.navigate(['/login']);
}
if (this.uploadedService.getLastLogin())
this.lastLogin = this.uploadedService.getLastLogin().toString();
this.totalLogins = this.uploadedService.getTotalLogins();
if (this.totalLogins==0) {
this.router.navigate(['/confirm']);
}
var that=this;
that.uploadedService.setShowAdmin(false);
that.uploadedService.setShowLBS(false);
that.uploadedService.setShowST(false);
that.uploadedService.setShowAccounts(false);
that.uploadedService.setAllowConf(false);
this.spinner.show();
that.http.post('http://'+that.server+'/'+environment.rbacRoot+'/listUserPermissions.php?user_id='+that.uploadedService.getUser(), JSON.stringify({}), {
responseType: 'json'
}).map(response => {
this.spinner.hide();
for (var each in response){
for (var k in response[each]){
if (k=="ADMIN"){
that.uploadedService.setShowAdmin(true);
that.showAdmin=true;
}
else if (k=="LBS"){
that.uploadedService.setShowLBS(true);
that.showLBS=true;
}
else if (k=="SUBSCRIBER TRACING"){
that.uploadedService.setShowST(true);
that.showST=true;
if (response[each][k]=="READ ONLY")
that.allowConf=false;
else if (response[each][k]=="READ/WRITE")
that.allowConf=true;
that.uploadedService.setAllowConf(that.allowConf);
}
else if (k=="SUPER USER"){
that.uploadedService.setShowAccounts(true);
that.showAccounts=true;
}
}
}
this.logger.log("LOGIN","", new Date().toUTCString(),"","SUCCESS",this.showAccounts,this.username as string,that.uploadedService.getRoleName() as string,"DASHBOARD",this.uploadedService.getOrgName() as string);
that.populateData();
let jQueryInstance=this;
$AB(document).ready(function(){
$AB(".hamburger").off('click').on('click',function(e){
e.preventDefault();
if ($AB(".panel-body").css("padding-left")=="100px"){
$AB(".sideMenu").slideToggle(function(){
$AB(".panel-body").css("padding-left","10px");
});
}
else{
$AB(".panel-body").css("padding-left","100px");
$AB(".sideMenu").slideToggle();
}
});
$AB(document).click(function(event) {
if (!$(event.target).hasClass('logout')) {
$(".logout").hide();
}
if (!$(event.target).hasClass('.slide-menu')) {
$AB(".slide-menu").css('width','0px');
$AB('.dropdown-submenu a.test').css('color','#888888');
}
});
$AB('.first-level > a.test').on("click", function(e){
$AB('.first-level > .dropdown-menu').hide();
});
$AB("#logoutBtn a").off('mouseover').on('mouseover',function(){
$AB(".logout").show();
});
that.fillGraphData(that.uploadedService.getOrgId());
$AB("#loginAttempts").css("display","block");
$AB("#lastActivityChart").css("display","block");
$AB("#listOrganizations").off('change').on('change',function(){
that.spinner.show();
that.fillGraphData($AB(this).find("option:selected").data('row'));
});
$AB('.dropdown-submenu a.test').on("click", function(e){
$AB("a.test").css("color","#888888");
$AB(".slide-menu").css('width','0px');
$AB(this).css("color","#fff");
$AB(this).next('ul').css('width','150px');
e.stopPropagation();
e.preventDefault();
});
});
}).subscribe(response => {
console.log(response);
});
}
| fillGraphData | identifier_name |
|
unauthorized-view.component.ts | Login:String;
public server = environment.server;
public allAccounts=0;
public orgId:String="";
public orgName:String="";
public totalLogins:Number=0;
public allUsers=[];
public lastLogins=[];
public barChartOptions:any = {
scaleShowVerticalLines: false,
responsive: true,
scales: {
yAxes: [{
type: "linear",
display: true,
position: "left",
id: "y-axis-1",
gridLines: {
display: false
},
scaleLabel: {
display: true,
labelString: 'Number of Logins'
},
labels: {
show: true
},
ticks: {
beginAtZero: true,
userCallback: function(label, index, labels) {
if (Math.floor(label) === label) {
return label;
}
},
}
}],
xAxes: [{
scaleLabel: {
display: true,
labelString: 'Users'
}
}]
}
};
public barChartOptions1:any = {
scaleShowVerticalLines: false,
responsive: true,
scales: {
yAxes: [{
type: "linear",
display: true,
position: "left",
id: "y-axis-1",
gridLines: {
display: false
},
scaleLabel: {
display: true,
labelString: 'Number of Users'
},
labels: {
show: true
},
ticks: {
beginAtZero: true,
userCallback: function(label, index, labels) {
if (Math.floor(label) === label) {
return label;
}
},
}
}],
xAxes: [{
scaleLabel: {
display: true,
labelString: 'Last Activity'
}
}]
}
};
public org_options=[];
public barChartLabels:string[] = [];
public barChartType:string = 'bar';
public barChartLegend:boolean = true;
public barChartData = [];
public barChartLabels1:string[] = [];
public barChartType1:string = 'bar';
public barChartLegend1:boolean = true;
public barChartData1:any[] = [
{data: [], label: ''}
];
public doughnutChartLabels:string[] = ['', ''];
public doughnutChartData:number[] = [];
public doughnutChartType:string = 'doughnut';
public doughnutChartColors: any[] = [{ backgroundColor: ["#88d753", "#ff5656","#ff8605", "#7aad02"] }];
public chartClicked(e:any):void {
console.log(e);
console.log(event);
}
public chartHovered(e:any):void {
console.log(e);
}
public populateData():void {
let that=this;
that.org_options=[];
var url='http://'+that.server+'/'+environment.rbacRoot+'/AccountUsers.php';
var obj={};
if (this.showAccounts!=true)
obj["orgId"]=this.uploadedService.getOrgId();
this.orgId = this.uploadedService.getOrgId();
that.http.post(url, JSON.stringify(obj), {
responseType: 'json'
}).map(response => {
var data=[];
for (var each in response){
if (response[each]["organization"]=="Ruckus Wireless"){
this.orgName = response[each]["organization"];
continue;
}
that.barChartLabels.push(response[each]["organization"]);
that.org_options.push([response[each]["id"],response[each]["organization"]]);
that.barChartData.push(parseInt(response[each]["users"]));
}
}).subscribe(re => {
console.log(re);
});
if (this.showAccounts==true){
that.http.post('http://'+that.server+'/'+environment.rbacRoot+'/listOrganizations.php', JSON.stringify({}), {
responseType: 'json'
}).map(response => {
var obj={};
var allIds={};
for (var k in response){
allIds[response[k]["id"]]=1;
if (response[k]["feature"]=="ADMIN")
continue;
else
{
if (obj.hasOwnProperty(response[k]["feature"])){
if (obj[response[k]["feature"]].indexOf(response[k]["id"])<0){
obj[response[k]["feature"]].push(response[k]["id"]);
}
}
else{
obj[response[k]["feature"]]=[response[k]["id"]];
}
}
}
if (Object.keys(allIds).length>0)
that.allAccounts=Object.keys(allIds).length-1;
else
that.allAccounts=0;
let clone = JSON.parse(JSON.stringify(this.doughnutChartData));
var ind=0;
for (var k in obj){
if (k=="SUPER USER")
continue;
that.doughnutChartLabels[ind]=k;
clone.push(obj[k].length-1);
ind++;
}
that.doughnutChartData=clone;
}).subscribe(res => {
console.log(res);
});
}
}
constructor(private uploadedService :UploadedFloorPlanService,private logger :LoggerService,private http: HttpClient,private authService: AuthService, private spinner: NgxSpinnerService, private router: Router) { }
ngOnInit() {
this.username = sessionStorage.getItem('username');
if (!this.uploadedService.getLoggedIn()) {
this.router.navigate(['/login']);
}
if (this.uploadedService.getLastLogin())
this.lastLogin = this.uploadedService.getLastLogin().toString();
this.totalLogins = this.uploadedService.getTotalLogins();
if (this.totalLogins==0) {
this.router.navigate(['/confirm']);
}
var that=this;
that.uploadedService.setShowAdmin(false);
that.uploadedService.setShowLBS(false);
that.uploadedService.setShowST(false);
that.uploadedService.setShowAccounts(false);
that.uploadedService.setAllowConf(false);
this.spinner.show();
that.http.post('http://'+that.server+'/'+environment.rbacRoot+'/listUserPermissions.php?user_id='+that.uploadedService.getUser(), JSON.stringify({}), {
responseType: 'json'
}).map(response => {
this.spinner.hide();
for (var each in response){
for (var k in response[each]){
if (k=="ADMIN"){
that.uploadedService.setShowAdmin(true);
that.showAdmin=true;
}
else if (k=="LBS"){
that.uploadedService.setShowLBS(true);
that.showLBS=true;
}
else if (k=="SUBSCRIBER TRACING"){
that.uploadedService.setShowST(true);
that.showST=true;
if (response[each][k]=="READ ONLY")
that.allowConf=false;
else if (response[each][k]=="READ/WRITE")
that.allowConf=true;
that.uploadedService.setAllowConf(that.allowConf);
}
else if (k=="SUPER USER"){
that.uploadedService.setShowAccounts(true);
that.showAccounts=true;
}
} | that.populateData();
let jQueryInstance=this;
$AB(document).ready(function(){
$AB(".hamburger").off('click').on('click',function(e){
e.preventDefault();
if ($AB(".panel-body").css("padding-left")=="100px"){
$AB(".sideMenu").slideToggle(function(){
$AB(".panel-body").css("padding-left","10px");
});
}
else{
$AB(".panel-body").css("padding-left","100px");
$AB(".sideMenu").slideToggle();
}
});
$AB(document).click(function(event) {
if (!$(event.target).hasClass('logout')) {
$(".logout").hide();
}
if (!$(event.target).hasClass('.slide-menu')) {
$AB(".slide-menu").css('width','0px');
$AB('.dropdown-submenu a.test').css('color','#888888');
}
});
$AB('.first-level > a.test').on("click", function(e){
$AB('.first-level > .dropdown-menu').hide();
});
$AB("#logoutBtn a").off('mouseover').on('mouseover',function(){
$AB(".logout").show();
});
that.fillGraphData(that.uploadedService.getOrgId());
$AB("#loginAttempts").css("display","block");
$AB("#lastActivityChart").css("display","block");
$AB("#listOrganizations").off('change').on('change',function(){
that.spinner.show();
that.fillGraphData($AB(this).find("option:selected").data('row'));
});
$AB('.dropdown-submenu a.test').on("click", function(e){
$AB("a.test").css("color","#888888");
$AB(".slide-menu").css('width','0px');
$AB(this).css("color","#fff");
$AB(this).next | }
this.logger.log("LOGIN","", new Date().toUTCString(),"","SUCCESS",this.showAccounts,this.username as string,that.uploadedService.getRoleName() as string,"DASHBOARD",this.uploadedService.getOrgName() as string);
| random_line_split |
TestGyp.py | (self, gyp=None, *args, **kw):
self.origin_cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
self.extra_args = sys.argv[1:]
if not gyp:
gyp = os.environ.get('TESTGYP_GYP')
if not gyp:
if sys.platform == 'win32':
gyp = 'gyn-run.bat'
else:
gyp = 'gyn-run'
self.gyp = os.path.abspath(gyp)
self.no_parallel = False
self.formats = [self.format]
self.initialize_build_tool()
kw.setdefault('match', TestCommon.match_exact)
# Put test output in out/testworkarea by default.
# Use temporary names so there are no collisions.
workdir = os.path.join('out', kw.get('workdir', 'testworkarea'))
# Create work area if it doesn't already exist.
if not os.path.isdir(workdir):
os.makedirs(workdir)
kw['workdir'] = tempfile.mktemp(prefix='testgyp.', dir=workdir)
formats = kw.pop('formats', [])
super(TestGypBase, self).__init__(*args, **kw)
real_format = self.format.split('-')[-1]
excluded_formats = set([f for f in formats if f[0] == '!'])
included_formats = set(formats) - excluded_formats
if ('!'+real_format in excluded_formats or
included_formats and real_format not in included_formats):
msg = 'Invalid test for %r format; skipping test.\n'
self.skip_test(msg % self.format)
self.copy_test_configuration(self.origin_cwd, self.workdir)
self.set_configuration(None)
# Set $HOME so that gyp doesn't read the user's actual
# ~/.gyp/include.gypi file, which may contain variables
# and other settings that would change the output.
os.environ['HOME'] = self.workpath()
# Clear $GYP_DEFINES for the same reason.
if 'GYP_DEFINES' in os.environ:
del os.environ['GYP_DEFINES']
# Override the user's language settings, which could
# otherwise make the output vary from what is expected.
os.environ['LC_ALL'] = 'C'
def built_file_must_exist(self, name, type=None, **kw):
"""
Fails the test if the specified built file name does not exist.
"""
return self.must_exist(self.built_file_path(name, type, **kw))
def built_file_must_not_exist(self, name, type=None, **kw):
"""
Fails the test if the specified built file name exists.
"""
return self.must_not_exist(self.built_file_path(name, type, **kw))
def built_file_must_match(self, name, contents, **kw):
"""
Fails the test if the contents of the specified built file name
do not match the specified contents.
"""
return self.must_match(self.built_file_path(name, **kw), contents)
def built_file_must_not_match(self, name, contents, **kw):
"""
Fails the test if the contents of the specified built file name
match the specified contents.
"""
return self.must_not_match(self.built_file_path(name, **kw), contents)
def built_file_must_not_contain(self, name, contents, **kw):
"""
Fails the test if the specified built file name contains the specified
contents.
"""
return self.must_not_contain(self.built_file_path(name, **kw), contents)
def copy_test_configuration(self, source_dir, dest_dir):
"""
Copies the test configuration from the specified source_dir
(the directory in which the test script lives) to the
specified dest_dir (a temporary working directory).
This ignores all files and directories that begin with
the string 'gyptest', and all '.svn' subdirectories.
"""
for root, dirs, files in os.walk(source_dir):
if '.svn' in dirs:
dirs.remove('.svn')
dirs = [ d for d in dirs if not d.startswith('gyptest') ]
files = [ f for f in files if not f.startswith('gyptest') ]
for dirname in dirs:
source = os.path.join(root, dirname)
destination = source.replace(source_dir, dest_dir)
os.mkdir(destination)
if sys.platform != 'win32':
shutil.copystat(source, destination)
for filename in files:
source = os.path.join(root, filename)
destination = source.replace(source_dir, dest_dir)
shutil.copy2(source, destination)
def initialize_build_tool(self):
"""
Initializes the .build_tool attribute.
Searches the .build_tool_list for an executable name on the user's
$PATH. The first tool on the list is used as-is if nothing is found
on the current $PATH.
"""
for build_tool in self.build_tool_list:
if not build_tool:
continue
if os.path.isabs(build_tool):
self.build_tool = build_tool
return
build_tool = self.where_is(build_tool)
if build_tool:
self.build_tool = build_tool
return
if self.build_tool_list:
self.build_tool = self.build_tool_list[0]
def relocate(self, source, destination):
"""
Renames (relocates) the specified source (usually a directory)
to the specified destination, creating the destination directory
first if necessary.
Note: Don't use this as a generic "rename" operation. In the
future, "relocating" parts of a GYP tree may affect the state of
the test to modify the behavior of later method calls.
"""
destination_dir = os.path.dirname(destination)
if not os.path.exists(destination_dir):
self.subdir(destination_dir)
os.rename(source, destination)
def report_not_up_to_date(self):
"""
Reports that a build is not up-to-date.
This provides common reporting for formats that have complicated
conditions for checking whether a build is up-to-date. Formats
that expect exact output from the command (make) can
just set stdout= when they call the run_build() method.
"""
print "Build is not up-to-date:"
print self.banner('STDOUT ')
print self.stdout()
stderr = self.stderr()
if stderr:
print self.banner('STDERR ')
print stderr
def run_gyp(self, gyp_file, *args, **kw):
"""
Runs gyp against the specified gyp_file with the specified args.
"""
# When running gyp, and comparing its output we use a comparitor
# that ignores the line numbers that gyp logs in its debug output.
if kw.pop('ignore_line_numbers', False):
kw.setdefault('match', match_modulo_line_numbers)
# TODO: --depth=. works around Chromium-specific tree climbing.
depth = kw.pop('depth', '.')
run_args = ['--depth='+depth]
run_args.append(gyp_file)
if self.no_parallel:
run_args += ['--no-parallel']
# TODO: if extra_args contains a '--build' flag
# we really want that to only apply to the last format (self.format).
run_args.extend(self.extra_args)
# Default xcode_ninja_target_pattern to ^.*$ to fix xcode-ninja tests
xcode_ninja_target_pattern = kw.pop('xcode_ninja_target_pattern', '.*')
run_args.extend(
['-G', 'xcode_ninja_target_pattern=%s' % xcode_ninja_target_pattern])
run_args.extend(args)
return self.run(program=self.gyp, arguments=run_args, **kw)
def run(self, *args, **kw):
"""
Executes a program by calling the superclass .run() method.
This exists to provide a common place to filter out keyword
arguments implemented in this layer, without having to update
the tool-specific subclasses or clutter the tests themselves
with platform-specific code.
"""
if kw.has_key('SYMROOT'):
del kw['SYMROOT']
super(TestGypBase, self).run(*args, **kw)
def set_configuration(self, configuration):
"""
Sets the configuration, to be used for invoking the build
tool and testing potential built output.
"""
self.configuration = configuration
def configuration_dirname(self):
if self.configuration:
return self.configuration.split('|')[0]
else:
return 'Default'
def configuration_buildname(self):
if self.configuration:
return self.configuration
else:
return 'Default'
#
# Abstract methods to be defined by format-specific subclasses.
#
def build(self, gyp_file, target=None, **kw):
"""
Runs a build of the specified target against the configuration
generated from the specified gyp_file.
A 'target' argument of None or the special value TestGyp.DEFAULT
specifies the default argument for the underlying build tool.
A 'target' argument of TestGyp.ALL specifies the 'all' target
(if any) of the underlying build tool.
"""
raise NotImplementedError
def built_file_path(self, name, type=None, **kw):
"""
Returns a path to the specified file name, of the specified type.
"""
raise NotImplementedError
def built_file_basename(self, name, type | __init__ | identifier_name |
|
TestGyp.py |
import TestCommon
from TestCommon import __all__
__all__.extend([
'TestGyp',
])
def remove_debug_line_numbers(contents):
"""Function to remove the line numbers from the debug output
of gyp and thus reduce the extreme fragility of the stdout
comparison tests.
"""
lines = contents.splitlines()
# split each line on ":"
lines = [l.split(":", 3) for l in lines]
# join each line back together while ignoring the
# 3rd column which is the line number
lines = [len(l) > 3 and ":".join(l[3:]) or l for l in lines]
return "\n".join(lines)
def match_modulo_line_numbers(contents_a, contents_b):
"""File contents matcher that ignores line numbers."""
contents_a = remove_debug_line_numbers(contents_a)
contents_b = remove_debug_line_numbers(contents_b)
return TestCommon.match_exact(contents_a, contents_b)
@contextmanager
def LocalEnv(local_env):
"""Context manager to provide a local OS environment."""
old_env = os.environ.copy()
os.environ.update(local_env)
try:
yield
finally:
os.environ.clear()
os.environ.update(old_env)
class TestGypBase(TestCommon.TestCommon):
"""
Class for controlling end-to-end tests of gyp generators.
Instantiating this class will create a temporary directory and
arrange for its destruction (via the TestCmd superclass) and
copy all of the non-gyptest files in the directory hierarchy of the
executing script.
The default behavior is to test the 'gyp' or 'gyp.bat' file in the
current directory. An alternative may be specified explicitly on
instantiation, or by setting the TESTGYP_GYP environment variable.
This class should be subclassed for each supported gyp generator
(format). Various abstract methods below define calling signatures
used by the test scripts to invoke builds on the generated build
configuration and to run executables generated by those builds.
"""
formats = []
build_tool = None
build_tool_list = []
_exe = TestCommon.exe_suffix
_obj = TestCommon.obj_suffix
shobj_ = TestCommon.shobj_prefix
_shobj = TestCommon.shobj_suffix
lib_ = TestCommon.lib_prefix
_lib = TestCommon.lib_suffix
dll_ = TestCommon.dll_prefix
_dll = TestCommon.dll_suffix
# Constants to represent different targets.
ALL = '__all__'
DEFAULT = '__default__'
# Constants for different target types.
EXECUTABLE = '__executable__'
STATIC_LIB = '__static_lib__'
SHARED_LIB = '__shared_lib__'
def __init__(self, gyp=None, *args, **kw):
self.origin_cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
self.extra_args = sys.argv[1:]
if not gyp:
gyp = os.environ.get('TESTGYP_GYP')
if not gyp:
if sys.platform == 'win32':
gyp = 'gyn-run.bat'
else:
gyp = 'gyn-run'
self.gyp = os.path.abspath(gyp)
self.no_parallel = False
self.formats = [self.format]
self.initialize_build_tool()
kw.setdefault('match', TestCommon.match_exact)
# Put test output in out/testworkarea by default.
# Use temporary names so there are no collisions.
workdir = os.path.join('out', kw.get('workdir', 'testworkarea'))
# Create work area if it doesn't already exist.
if not os.path.isdir(workdir):
os.makedirs(workdir)
kw['workdir'] = tempfile.mktemp(prefix='testgyp.', dir=workdir)
formats = kw.pop('formats', [])
super(TestGypBase, self).__init__(*args, **kw)
real_format = self.format.split('-')[-1]
excluded_formats = set([f for f in formats if f[0] == '!'])
included_formats = set(formats) - excluded_formats
if ('!'+real_format in excluded_formats or
included_formats and real_format not in included_formats):
msg = 'Invalid test for %r format; skipping test.\n'
self.skip_test(msg % self.format)
self.copy_test_configuration(self.origin_cwd, self.workdir)
self.set_configuration(None)
# Set $HOME so that gyp doesn't read the user's actual
# ~/.gyp/include.gypi file, which may contain variables
# and other settings that would change the output.
os.environ['HOME'] = self.workpath()
# Clear $GYP_DEFINES for the same reason.
if 'GYP_DEFINES' in os.environ:
del os.environ['GYP_DEFINES']
# Override the user's language settings, which could
# otherwise make the output vary from what is expected.
os.environ['LC_ALL'] = 'C'
def built_file_must_exist(self, name, type=None, **kw):
"""
Fails the test if the specified built file name does not exist.
"""
return self.must_exist(self.built_file_path(name, type, **kw))
def built_file_must_not_exist(self, name, type=None, **kw):
"""
Fails the test if the specified built file name exists.
"""
return self.must_not_exist(self.built_file_path(name, type, **kw))
def built_file_must_match(self, name, contents, **kw):
"""
Fails the test if the contents of the specified built file name
do not match the specified contents.
"""
return self.must_match(self.built_file_path(name, **kw), contents)
def built_file_must_not_match(self, name, contents, **kw):
"""
Fails the test if the contents of the specified built file name
match the specified contents.
"""
return self.must_not_match(self.built_file_path(name, **kw), contents)
def built_file_must_not_contain(self, name, contents, **kw):
"""
Fails the test if the specified built file name contains the specified
contents.
"""
return self.must_not_contain(self.built_file_path(name, **kw), contents)
def copy_test_configuration(self, source_dir, dest_dir):
"""
Copies the test configuration from the specified source_dir
(the directory in which the test script lives) to the
specified dest_dir (a temporary working directory).
This ignores all files and directories that begin with
the string 'gyptest', and all '.svn' subdirectories.
"""
for root, dirs, files in os.walk(source_dir):
if '.svn' in dirs:
dirs.remove('.svn')
dirs = [ d for d in dirs if not d.startswith('gyptest') ]
files = [ f for f in files if not f.startswith('gyptest') ]
for dirname in dirs:
source = os.path.join(root, dirname)
destination = source.replace(source_dir, dest_dir)
os.mkdir(destination)
if sys.platform != 'win32':
shutil.copystat(source, destination)
for filename in files:
source = os.path.join(root, filename)
destination = source.replace(source_dir, dest_dir)
shutil.copy2(source, destination)
def initialize_build_tool(self):
"""
Initializes the .build_tool attribute.
Searches the .build_tool_list for an executable name on the user's
$PATH. The first tool on the list is used as-is if nothing is found
on the current $PATH.
"""
for build_tool in self.build_tool_list:
if not build_tool:
continue
if os.path.isabs(build_tool):
self.build_tool = build_tool
return
build_tool = self.where_is(build_tool)
if build_tool:
self.build_tool = build_tool
return
if self.build_tool_list:
self.build_tool = self.build_tool_list[0]
def relocate(self, source, destination):
"""
Renames (relocates) the specified source (usually a directory)
to the specified destination, creating the destination directory
first if necessary.
Note: Don't use this as a generic "rename" operation. In the
future, "relocating" parts of a GYP tree may affect the state of
the test to modify the behavior of later method calls.
"""
destination_dir = os.path.dirname(destination)
if not os.path.exists(destination_dir):
self.subdir(destination_dir)
os.rename(source, destination)
def report_not_up_to_date(self):
"""
Reports that a build is not up-to-date.
This provides common reporting for formats that have complicated
conditions for checking whether a build is up-to-date. Formats
that expect exact output from the command (make) can
just set stdout= when they call the run_build() method.
"""
print "Build is not up-to-date:"
print self.banner('STDOUT ')
print self.stdout()
stderr = self.stderr()
if stderr:
print self.banner('STDERR ')
print stderr
def run_gyp(self, gyp_file, *args, **kw):
"""
Runs gyp against the specified gyp_file with the specified args.
"""
# When running gyp, and comparing its output we | random_line_split |
||
TestGyp.py | _format = self.format.split('-')[-1]
excluded_formats = set([f for f in formats if f[0] == '!'])
included_formats = set(formats) - excluded_formats
if ('!'+real_format in excluded_formats or
included_formats and real_format not in included_formats):
msg = 'Invalid test for %r format; skipping test.\n'
self.skip_test(msg % self.format)
self.copy_test_configuration(self.origin_cwd, self.workdir)
self.set_configuration(None)
# Set $HOME so that gyp doesn't read the user's actual
# ~/.gyp/include.gypi file, which may contain variables
# and other settings that would change the output.
os.environ['HOME'] = self.workpath()
# Clear $GYP_DEFINES for the same reason.
if 'GYP_DEFINES' in os.environ:
del os.environ['GYP_DEFINES']
# Override the user's language settings, which could
# otherwise make the output vary from what is expected.
os.environ['LC_ALL'] = 'C'
def built_file_must_exist(self, name, type=None, **kw):
"""
Fails the test if the specified built file name does not exist.
"""
return self.must_exist(self.built_file_path(name, type, **kw))
def built_file_must_not_exist(self, name, type=None, **kw):
"""
Fails the test if the specified built file name exists.
"""
return self.must_not_exist(self.built_file_path(name, type, **kw))
def built_file_must_match(self, name, contents, **kw):
"""
Fails the test if the contents of the specified built file name
do not match the specified contents.
"""
return self.must_match(self.built_file_path(name, **kw), contents)
def built_file_must_not_match(self, name, contents, **kw):
"""
Fails the test if the contents of the specified built file name
match the specified contents.
"""
return self.must_not_match(self.built_file_path(name, **kw), contents)
def built_file_must_not_contain(self, name, contents, **kw):
|
def copy_test_configuration(self, source_dir, dest_dir):
"""
Copies the test configuration from the specified source_dir
(the directory in which the test script lives) to the
specified dest_dir (a temporary working directory).
This ignores all files and directories that begin with
the string 'gyptest', and all '.svn' subdirectories.
"""
for root, dirs, files in os.walk(source_dir):
if '.svn' in dirs:
dirs.remove('.svn')
dirs = [ d for d in dirs if not d.startswith('gyptest') ]
files = [ f for f in files if not f.startswith('gyptest') ]
for dirname in dirs:
source = os.path.join(root, dirname)
destination = source.replace(source_dir, dest_dir)
os.mkdir(destination)
if sys.platform != 'win32':
shutil.copystat(source, destination)
for filename in files:
source = os.path.join(root, filename)
destination = source.replace(source_dir, dest_dir)
shutil.copy2(source, destination)
def initialize_build_tool(self):
"""
Initializes the .build_tool attribute.
Searches the .build_tool_list for an executable name on the user's
$PATH. The first tool on the list is used as-is if nothing is found
on the current $PATH.
"""
for build_tool in self.build_tool_list:
if not build_tool:
continue
if os.path.isabs(build_tool):
self.build_tool = build_tool
return
build_tool = self.where_is(build_tool)
if build_tool:
self.build_tool = build_tool
return
if self.build_tool_list:
self.build_tool = self.build_tool_list[0]
def relocate(self, source, destination):
"""
Renames (relocates) the specified source (usually a directory)
to the specified destination, creating the destination directory
first if necessary.
Note: Don't use this as a generic "rename" operation. In the
future, "relocating" parts of a GYP tree may affect the state of
the test to modify the behavior of later method calls.
"""
destination_dir = os.path.dirname(destination)
if not os.path.exists(destination_dir):
self.subdir(destination_dir)
os.rename(source, destination)
def report_not_up_to_date(self):
"""
Reports that a build is not up-to-date.
This provides common reporting for formats that have complicated
conditions for checking whether a build is up-to-date. Formats
that expect exact output from the command (make) can
just set stdout= when they call the run_build() method.
"""
print "Build is not up-to-date:"
print self.banner('STDOUT ')
print self.stdout()
stderr = self.stderr()
if stderr:
print self.banner('STDERR ')
print stderr
def run_gyp(self, gyp_file, *args, **kw):
"""
Runs gyp against the specified gyp_file with the specified args.
"""
# When running gyp, and comparing its output we use a comparitor
# that ignores the line numbers that gyp logs in its debug output.
if kw.pop('ignore_line_numbers', False):
kw.setdefault('match', match_modulo_line_numbers)
# TODO: --depth=. works around Chromium-specific tree climbing.
depth = kw.pop('depth', '.')
run_args = ['--depth='+depth]
run_args.append(gyp_file)
if self.no_parallel:
run_args += ['--no-parallel']
# TODO: if extra_args contains a '--build' flag
# we really want that to only apply to the last format (self.format).
run_args.extend(self.extra_args)
# Default xcode_ninja_target_pattern to ^.*$ to fix xcode-ninja tests
xcode_ninja_target_pattern = kw.pop('xcode_ninja_target_pattern', '.*')
run_args.extend(
['-G', 'xcode_ninja_target_pattern=%s' % xcode_ninja_target_pattern])
run_args.extend(args)
return self.run(program=self.gyp, arguments=run_args, **kw)
def run(self, *args, **kw):
"""
Executes a program by calling the superclass .run() method.
This exists to provide a common place to filter out keyword
arguments implemented in this layer, without having to update
the tool-specific subclasses or clutter the tests themselves
with platform-specific code.
"""
if kw.has_key('SYMROOT'):
del kw['SYMROOT']
super(TestGypBase, self).run(*args, **kw)
def set_configuration(self, configuration):
"""
Sets the configuration, to be used for invoking the build
tool and testing potential built output.
"""
self.configuration = configuration
def configuration_dirname(self):
if self.configuration:
return self.configuration.split('|')[0]
else:
return 'Default'
def configuration_buildname(self):
if self.configuration:
return self.configuration
else:
return 'Default'
#
# Abstract methods to be defined by format-specific subclasses.
#
def build(self, gyp_file, target=None, **kw):
"""
Runs a build of the specified target against the configuration
generated from the specified gyp_file.
A 'target' argument of None or the special value TestGyp.DEFAULT
specifies the default argument for the underlying build tool.
A 'target' argument of TestGyp.ALL specifies the 'all' target
(if any) of the underlying build tool.
"""
raise NotImplementedError
def built_file_path(self, name, type=None, **kw):
"""
Returns a path to the specified file name, of the specified type.
"""
raise NotImplementedError
def built_file_basename(self, name, type=None, **kw):
"""
Returns the base name of the specified file name, of the specified type.
A bare=True keyword argument specifies that prefixes and suffixes shouldn't
be applied.
"""
if not kw.get('bare'):
if type == self.EXECUTABLE:
name = name + self._exe
elif type == self.STATIC_LIB:
name = self.lib_ + name + self._lib
elif type == self.SHARED_LIB:
name = self.dll_ + name + self._dll
return name
def run_built_executable(self, name, *args, **kw):
"""
Runs an executable program built from a gyp-generated configuration.
The specified name should be independent of any particular generator.
Subclasses should find the output executable in the appropriate
output build directory, tack on any necessary executable suffix, etc.
"""
raise NotImplementedError
def up_to_date(self, gyp_file, target=None, **kw):
"""
Verifies that a build of the specified target is up to date.
The subclass should implement this by calling build()
(or a reasonable equivalent), checking whatever conditions
will tell it the build was an "up to date" | """
Fails the test if the specified built file name contains the specified
contents.
"""
return self.must_not_contain(self.built_file_path(name, **kw), contents) | identifier_body |
TestGyp.py | .stderr()
if stderr:
print self.banner('STDERR ')
print stderr
def run_gyp(self, gyp_file, *args, **kw):
"""
Runs gyp against the specified gyp_file with the specified args.
"""
# When running gyp, and comparing its output we use a comparitor
# that ignores the line numbers that gyp logs in its debug output.
if kw.pop('ignore_line_numbers', False):
kw.setdefault('match', match_modulo_line_numbers)
# TODO: --depth=. works around Chromium-specific tree climbing.
depth = kw.pop('depth', '.')
run_args = ['--depth='+depth]
run_args.append(gyp_file)
if self.no_parallel:
run_args += ['--no-parallel']
# TODO: if extra_args contains a '--build' flag
# we really want that to only apply to the last format (self.format).
run_args.extend(self.extra_args)
# Default xcode_ninja_target_pattern to ^.*$ to fix xcode-ninja tests
xcode_ninja_target_pattern = kw.pop('xcode_ninja_target_pattern', '.*')
run_args.extend(
['-G', 'xcode_ninja_target_pattern=%s' % xcode_ninja_target_pattern])
run_args.extend(args)
return self.run(program=self.gyp, arguments=run_args, **kw)
def run(self, *args, **kw):
"""
Executes a program by calling the superclass .run() method.
This exists to provide a common place to filter out keyword
arguments implemented in this layer, without having to update
the tool-specific subclasses or clutter the tests themselves
with platform-specific code.
"""
if kw.has_key('SYMROOT'):
del kw['SYMROOT']
super(TestGypBase, self).run(*args, **kw)
def set_configuration(self, configuration):
"""
Sets the configuration, to be used for invoking the build
tool and testing potential built output.
"""
self.configuration = configuration
def configuration_dirname(self):
if self.configuration:
return self.configuration.split('|')[0]
else:
return 'Default'
def configuration_buildname(self):
if self.configuration:
return self.configuration
else:
return 'Default'
#
# Abstract methods to be defined by format-specific subclasses.
#
def build(self, gyp_file, target=None, **kw):
"""
Runs a build of the specified target against the configuration
generated from the specified gyp_file.
A 'target' argument of None or the special value TestGyp.DEFAULT
specifies the default argument for the underlying build tool.
A 'target' argument of TestGyp.ALL specifies the 'all' target
(if any) of the underlying build tool.
"""
raise NotImplementedError
def built_file_path(self, name, type=None, **kw):
"""
Returns a path to the specified file name, of the specified type.
"""
raise NotImplementedError
def built_file_basename(self, name, type=None, **kw):
"""
Returns the base name of the specified file name, of the specified type.
A bare=True keyword argument specifies that prefixes and suffixes shouldn't
be applied.
"""
if not kw.get('bare'):
if type == self.EXECUTABLE:
name = name + self._exe
elif type == self.STATIC_LIB:
name = self.lib_ + name + self._lib
elif type == self.SHARED_LIB:
name = self.dll_ + name + self._dll
return name
def run_built_executable(self, name, *args, **kw):
"""
Runs an executable program built from a gyp-generated configuration.
The specified name should be independent of any particular generator.
Subclasses should find the output executable in the appropriate
output build directory, tack on any necessary executable suffix, etc.
"""
raise NotImplementedError
def up_to_date(self, gyp_file, target=None, **kw):
"""
Verifies that a build of the specified target is up to date.
The subclass should implement this by calling build()
(or a reasonable equivalent), checking whatever conditions
will tell it the build was an "up to date" null build, and
failing if it isn't.
"""
raise NotImplementedError
class TestGypCustom(TestGypBase):
"""
Subclass for testing the GYP with custom generator
"""
def __init__(self, gyp=None, *args, **kw):
self.format = kw.pop("format")
super(TestGypCustom, self).__init__(*args, **kw)
def ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == 'cygwin':
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
return path
def FindMSBuildInstallation(msvs_version = 'auto'):
"""Returns path to MSBuild for msvs_version or latest available.
Looks in the registry to find install location of MSBuild.
MSBuild before v4.0 will not build c++ projects, so only use newer versions.
"""
import TestWin
registry = TestWin.Registry()
msvs_to_msbuild = {
'2013': r'12.0',
'2012': r'4.0', # Really v4.0.30319 which comes with .NET 4.5.
'2010': r'4.0'}
msbuild_basekey = r'HKLM\SOFTWARE\Microsoft\MSBuild\ToolsVersions'
if not registry.KeyExists(msbuild_basekey):
print 'Error: could not find MSBuild base registry entry'
return None
msbuild_version = None
if msvs_version in msvs_to_msbuild:
msbuild_test_version = msvs_to_msbuild[msvs_version]
if registry.KeyExists(msbuild_basekey + '\\' + msbuild_test_version):
msbuild_version = msbuild_test_version
else:
print ('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" '
'but corresponding MSBuild "%s" was not found.' %
(msvs_version, msbuild_version))
if not msbuild_version:
for msvs_version in sorted(msvs_to_msbuild, reverse=True):
msbuild_test_version = msvs_to_msbuild[msvs_version]
if registry.KeyExists(msbuild_basekey + '\\' + msbuild_test_version):
msbuild_version = msbuild_test_version
break
if not msbuild_version:
print 'Error: could not find MSBuild registry entry'
return None
msbuild_path = registry.GetValue(msbuild_basekey + '\\' + msbuild_version,
'MSBuildToolsPath')
if not msbuild_path:
print 'Error: could not get MSBuild registry entry value'
return None
return os.path.join(msbuild_path, 'MSBuild.exe')
def FindVisualStudioInstallation():
"""Returns appropriate values for .build_tool and .uses_msbuild fields
of TestGypBase for Visual Studio.
We use the value specified by GYP_MSVS_VERSION. If not specified, we
search %PATH% and %PATHEXT% for a devenv.{exe,bat,...} executable.
Failing that, we search for likely deployment paths.
"""
possible_roots = ['%s:\\Program Files%s' % (chr(drive), suffix)
for drive in range(ord('C'), ord('Z') + 1)
for suffix in ['', ' (x86)']]
possible_paths = {
'2013': r'Microsoft Visual Studio 12.0\Common7\IDE\devenv.com',
'2012': r'Microsoft Visual Studio 11.0\Common7\IDE\devenv.com',
'2010': r'Microsoft Visual Studio 10.0\Common7\IDE\devenv.com',
'2008': r'Microsoft Visual Studio 9.0\Common7\IDE\devenv.com',
'2005': r'Microsoft Visual Studio 8\Common7\IDE\devenv.com'}
possible_roots = [ConvertToCygpath(r) for r in possible_roots]
msvs_version = 'auto'
for flag in (f for f in sys.argv if f.startswith('msvs_version=')):
msvs_version = flag.split('=')[-1]
msvs_version = os.environ.get('GYP_MSVS_VERSION', msvs_version)
if msvs_version in possible_paths:
# Check that the path to the specified GYP_MSVS_VERSION exists.
path = possible_paths[msvs_version]
for r in possible_roots:
build_tool = os.path.join(r, path)
if os.path.exists(build_tool):
uses_msbuild = msvs_version >= '2010'
msbuild_path = FindMSBuildInstallation(msvs_version)
return build_tool, uses_msbuild, msbuild_path
else:
| print ('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" '
'but corresponding "%s" was not found.' % (msvs_version, path)) | conditional_block |
|
table.rs | chunks: Vec<Arc<dyn Array>>,
num_rows: usize,
null_count: usize,
}
impl ChunkedArray {
/// Construct a `ChunkedArray` from a list of `Array`s.
///
/// There must be at least 1 array, and all arrays must have the same data type.
fn from_arrays(arrays: Vec<Arc<dyn Array>>) -> Self {
assert!(!arrays.is_empty());
let mut num_rows = 0;
let mut null_count = 0;
// check that arrays have the same type
let data_type = &arrays[0].data_type();
arrays.iter().for_each(|array| {
assert!(&array.data_type() == data_type);
num_rows += array.len();
null_count += array.null_count();
});
ChunkedArray {
chunks: arrays,
num_rows,
null_count,
}
}
/// Return the length of the arrays in the chunk. This value is pre-computed.
pub fn num_rows(&self) -> usize {
self.num_rows
}
pub fn null_count(&self) -> usize {
self.null_count
}
pub fn num_chunks(&self) -> usize {
self.chunks.len()
}
/// Get the count per chunk
///
/// This is useful for repartitioning
pub(crate) fn chunk_counts(&self) -> Vec<usize> {
self.chunks().iter().map(|chunk| chunk.len()).collect()
}
/// Get a chunk from the chunked array by index
/// TODO: should this have bounds-chacking?
pub fn chunk(&self, i: usize) -> &Arc<dyn Array> {
&self.chunks[i]
}
pub fn chunks(&self) -> &Vec<Arc<dyn Array>> {
&self.chunks
}
/// Construct a zero-copy slice of the chunked array with the indicated offset and length.
///
/// The `offset` is the position of the first element in the constructed slice.
/// `length` is the length of the slice. If there are not enough elements in the chunked array,
/// the length will be adjusted accordingly.
fn slice(&self, offset: usize, length: Option<usize>) -> Self {
let mut offset = offset;
let mut length = length.unwrap_or(std::usize::MAX);
length = std::cmp::min(length, self.num_rows());
let mut current_chunk: usize = 0;
let mut new_chunks: Vec<ArrayRef> = vec![];
// compute the first offset. If offset > whole chunks' lengths, skip those chunks
while current_chunk < self.num_chunks() && offset >= self.chunk(current_chunk).len() {
offset -= self.chunk(current_chunk).len();
current_chunk += 1;
}
while current_chunk < self.num_chunks() && length > 0 {
new_chunks.push(self.chunk(current_chunk).slice(offset, length));
length -= std::cmp::min(length, self.chunk(current_chunk).len() - offset);
offset = 0;
current_chunk += 1;
}
Self::from_arrays(new_chunks)
}
fn filter(&self, condition: &Self) -> Self {
let filtered: arrow::error::Result<Vec<ArrayRef>> = self
.chunks()
.iter()
.zip(condition.chunks())
.map(|(a, b): (&ArrayRef, &ArrayRef)| {
arrow::compute::filter(a.as_ref(), &BooleanArray::from(b.data()))
})
.collect();
Self::from_arrays(filtered.unwrap())
}
fn flatten(&self) {
unimplemented!("This is for flattening struct columns, we aren't yet there")
}
}
pub fn col_to_prim_arrays<T>(column: &Column) -> Vec<&PrimitiveArray<T>>
where
T: ArrowPrimitiveType,
{
let mut arrays: Vec<&PrimitiveArray<T>> = vec![];
for chunk in column.data().chunks() {
arrays.push(chunk.as_any().downcast_ref::<PrimitiveArray<T>>().unwrap())
}
arrays
}
pub fn col_to_string_arrays(column: &Column) -> Vec<&StringArray> {
let mut arrays = vec![];
for chunk in column.data().chunks() {
arrays.push(chunk.as_any().downcast_ref::<StringArray>().unwrap())
}
arrays
}
/// A column data structure consisting of a `Field` and `ChunkedArray`
#[derive(Clone)]
pub struct Column {
pub(crate) data: ChunkedArray,
field: arrow::datatypes::Field,
}
impl Column {
pub fn from_chunked_array(chunk: ChunkedArray, field: arrow::datatypes::Field) -> Self {
Column { data: chunk, field }
}
pub fn from_arrays(arrays: Vec<Arc<dyn Array>>, field: arrow::datatypes::Field) -> Self {
assert!(!arrays.is_empty());
for array in &arrays {
assert!(array.data_type() == field.data_type());
}
Column {
data: ChunkedArray::from_arrays(arrays),
field,
}
}
/// Merge the chunk arrays into a single array
///
/// Returns an error if concatenating the array type is not supported,
/// or the dataframe is empty
pub fn to_array(&self) -> Result<ArrayRef> {
Ok(arrow::compute::concat(self.data().chunks())?)
}
pub fn name(&self) -> &str {
self.field.name()
}
pub fn data_type(&self) -> &DataType {
self.field.data_type()
}
pub fn data(&self) -> &ChunkedArray {
&self.data
}
pub(crate) fn field(&self) -> &Field {
&self.field
}
pub fn slice(&self, offset: usize, length: Option<usize>) -> Self {
Self::from_chunked_array(self.data().slice(offset, length), self.field().clone())
}
pub fn null_count(&self) -> usize {
self.data().null_count()
}
pub fn num_rows(&self) -> usize {
self.data().num_rows()
}
/// Filter this column using a Boolean column as the mask
pub fn filter(&self, condition: &Self) -> Self {
Self::from_chunked_array(self.data.filter(condition.data()), self.field.clone())
}
/// Create a new column by taking values at indices, while repartitioning to the chunk size
pub fn take(&self, indices: &UInt32Array, chunk_size: usize) -> Result<Self> {
let mut consumed_len = 0;
let total_len = indices.len();
let values = self.to_array()?;
let mut outputs = vec![];
while consumed_len < total_len {
let bounded_len = if total_len < chunk_size {
total_len
} else if consumed_len + chunk_size > total_len {
chunk_size
} else {
total_len - consumed_len
};
let slice = indices.slice(consumed_len, bounded_len);
let slice = slice.as_any().downcast_ref::<UInt32Array>().unwrap();
let taken = arrow::compute::take(&values, slice, None)?;
outputs.push(taken);
consumed_len += bounded_len;
}
Ok(Self {
data: ChunkedArray::from_arrays(outputs),
field: self.field.clone(),
})
}
fn flatten() {}
}
/// Alogical table as a sequence of chunked arrays
pub struct Table {
schema: Arc<Schema>,
pub(crate) columns: Vec<Column>,
}
impl Table {
pub fn new(schema: Arc<Schema>, columns: Vec<Column>) -> Self {
// assert that there are some columns
assert!(
!columns.is_empty(),
"at least one column must be defined to create a record batch"
);
// assert that all columns have the same row count
let len = columns[0].data().num_rows();
for column in &columns {
assert_eq!(
len,
column.data().num_rows(),
"all columns in a record batch must have the same length"
);
}
Table { schema, columns }
}
pub fn schema(&self) -> &Arc<Schema> {
&self.schema
}
pub fn num_columns(&self) -> usize {
self.columns.len()
}
pub fn num_rows(&self) -> usize {
self.columns[0].data().num_rows()
}
// keep fn
pub fn column(&self, i: usize) -> &Column {
&self.columns[i]
}
pub fn columns(&self) -> &Vec<Column> {
&self.columns
}
// new fns
fn add_column() {}
// fn remove_column(&self, _i: usize) -> Self {
// Table {
// schema: self.schema.clone(),
// columns: self.columns
// }
// }
/// Replace a column in the table, producing a new `Table`
fn set_column() {}
fn replace_schema_metadata() {}
/// Each column with a struct type is flattened into one column per struct field.
/// Other columns are left unchanged.
fn flatten() {}
/// Construct a `Table` from a sequence of `Column`s and a schema
fn make(columns: Vec<Column>) -> Self {
let fields: Vec<Field> = columns.iter().map(|column| column.field | pub struct ChunkedArray { | random_line_split |
|
table.rs | 0].data_type();
arrays.iter().for_each(|array| {
assert!(&array.data_type() == data_type);
num_rows += array.len();
null_count += array.null_count();
});
ChunkedArray {
chunks: arrays,
num_rows,
null_count,
}
}
/// Return the length of the arrays in the chunk. This value is pre-computed.
pub fn num_rows(&self) -> usize {
self.num_rows
}
pub fn null_count(&self) -> usize {
self.null_count
}
pub fn num_chunks(&self) -> usize {
self.chunks.len()
}
/// Get the count per chunk
///
/// This is useful for repartitioning
pub(crate) fn chunk_counts(&self) -> Vec<usize> {
self.chunks().iter().map(|chunk| chunk.len()).collect()
}
/// Get a chunk from the chunked array by index
/// TODO: should this have bounds-chacking?
pub fn | (&self, i: usize) -> &Arc<dyn Array> {
&self.chunks[i]
}
pub fn chunks(&self) -> &Vec<Arc<dyn Array>> {
&self.chunks
}
/// Construct a zero-copy slice of the chunked array with the indicated offset and length.
///
/// The `offset` is the position of the first element in the constructed slice.
/// `length` is the length of the slice. If there are not enough elements in the chunked array,
/// the length will be adjusted accordingly.
fn slice(&self, offset: usize, length: Option<usize>) -> Self {
let mut offset = offset;
let mut length = length.unwrap_or(std::usize::MAX);
length = std::cmp::min(length, self.num_rows());
let mut current_chunk: usize = 0;
let mut new_chunks: Vec<ArrayRef> = vec![];
// compute the first offset. If offset > whole chunks' lengths, skip those chunks
while current_chunk < self.num_chunks() && offset >= self.chunk(current_chunk).len() {
offset -= self.chunk(current_chunk).len();
current_chunk += 1;
}
while current_chunk < self.num_chunks() && length > 0 {
new_chunks.push(self.chunk(current_chunk).slice(offset, length));
length -= std::cmp::min(length, self.chunk(current_chunk).len() - offset);
offset = 0;
current_chunk += 1;
}
Self::from_arrays(new_chunks)
}
fn filter(&self, condition: &Self) -> Self {
let filtered: arrow::error::Result<Vec<ArrayRef>> = self
.chunks()
.iter()
.zip(condition.chunks())
.map(|(a, b): (&ArrayRef, &ArrayRef)| {
arrow::compute::filter(a.as_ref(), &BooleanArray::from(b.data()))
})
.collect();
Self::from_arrays(filtered.unwrap())
}
fn flatten(&self) {
unimplemented!("This is for flattening struct columns, we aren't yet there")
}
}
pub fn col_to_prim_arrays<T>(column: &Column) -> Vec<&PrimitiveArray<T>>
where
T: ArrowPrimitiveType,
{
let mut arrays: Vec<&PrimitiveArray<T>> = vec![];
for chunk in column.data().chunks() {
arrays.push(chunk.as_any().downcast_ref::<PrimitiveArray<T>>().unwrap())
}
arrays
}
pub fn col_to_string_arrays(column: &Column) -> Vec<&StringArray> {
let mut arrays = vec![];
for chunk in column.data().chunks() {
arrays.push(chunk.as_any().downcast_ref::<StringArray>().unwrap())
}
arrays
}
/// A column data structure consisting of a `Field` and `ChunkedArray`
#[derive(Clone)]
pub struct Column {
pub(crate) data: ChunkedArray,
field: arrow::datatypes::Field,
}
impl Column {
pub fn from_chunked_array(chunk: ChunkedArray, field: arrow::datatypes::Field) -> Self {
Column { data: chunk, field }
}
pub fn from_arrays(arrays: Vec<Arc<dyn Array>>, field: arrow::datatypes::Field) -> Self {
assert!(!arrays.is_empty());
for array in &arrays {
assert!(array.data_type() == field.data_type());
}
Column {
data: ChunkedArray::from_arrays(arrays),
field,
}
}
/// Merge the chunk arrays into a single array
///
/// Returns an error if concatenating the array type is not supported,
/// or the dataframe is empty
pub fn to_array(&self) -> Result<ArrayRef> {
Ok(arrow::compute::concat(self.data().chunks())?)
}
pub fn name(&self) -> &str {
self.field.name()
}
pub fn data_type(&self) -> &DataType {
self.field.data_type()
}
pub fn data(&self) -> &ChunkedArray {
&self.data
}
pub(crate) fn field(&self) -> &Field {
&self.field
}
pub fn slice(&self, offset: usize, length: Option<usize>) -> Self {
Self::from_chunked_array(self.data().slice(offset, length), self.field().clone())
}
pub fn null_count(&self) -> usize {
self.data().null_count()
}
pub fn num_rows(&self) -> usize {
self.data().num_rows()
}
/// Filter this column using a Boolean column as the mask
pub fn filter(&self, condition: &Self) -> Self {
Self::from_chunked_array(self.data.filter(condition.data()), self.field.clone())
}
/// Create a new column by taking values at indices, while repartitioning to the chunk size
pub fn take(&self, indices: &UInt32Array, chunk_size: usize) -> Result<Self> {
let mut consumed_len = 0;
let total_len = indices.len();
let values = self.to_array()?;
let mut outputs = vec![];
while consumed_len < total_len {
let bounded_len = if total_len < chunk_size {
total_len
} else if consumed_len + chunk_size > total_len {
chunk_size
} else {
total_len - consumed_len
};
let slice = indices.slice(consumed_len, bounded_len);
let slice = slice.as_any().downcast_ref::<UInt32Array>().unwrap();
let taken = arrow::compute::take(&values, slice, None)?;
outputs.push(taken);
consumed_len += bounded_len;
}
Ok(Self {
data: ChunkedArray::from_arrays(outputs),
field: self.field.clone(),
})
}
fn flatten() {}
}
/// Alogical table as a sequence of chunked arrays
pub struct Table {
schema: Arc<Schema>,
pub(crate) columns: Vec<Column>,
}
impl Table {
pub fn new(schema: Arc<Schema>, columns: Vec<Column>) -> Self {
// assert that there are some columns
assert!(
!columns.is_empty(),
"at least one column must be defined to create a record batch"
);
// assert that all columns have the same row count
let len = columns[0].data().num_rows();
for column in &columns {
assert_eq!(
len,
column.data().num_rows(),
"all columns in a record batch must have the same length"
);
}
Table { schema, columns }
}
pub fn schema(&self) -> &Arc<Schema> {
&self.schema
}
pub fn num_columns(&self) -> usize {
self.columns.len()
}
pub fn num_rows(&self) -> usize {
self.columns[0].data().num_rows()
}
// keep fn
pub fn column(&self, i: usize) -> &Column {
&self.columns[i]
}
pub fn columns(&self) -> &Vec<Column> {
&self.columns
}
// new fns
fn add_column() {}
// fn remove_column(&self, _i: usize) -> Self {
// Table {
// schema: self.schema.clone(),
// columns: self.columns
// }
// }
/// Replace a column in the table, producing a new `Table`
fn set_column() {}
fn replace_schema_metadata() {}
/// Each column with a struct type is flattened into one column per struct field.
/// Other columns are left unchanged.
fn flatten() {}
/// Construct a `Table` from a sequence of `Column`s and a schema
fn make(columns: Vec<Column>) -> Self {
let fields: Vec<Field> = columns.iter().map(|column| column.field.clone()).collect();
Self {
schema: Arc::new(Schema::new(fields)),
columns,
}
}
/// Construct a `Table` from a sequence of `Column`s and a schema
fn make_with_schema(schema: Arc<Schema>, columns: Vec<Column>) -> Self {
// TODO validate that schema and columns match
Self { schema, columns }
}
/// Slice the table from an offset
pub fn slice(&self, offset: usize, limit: usize) -> Self {
Self {
schema: self.schema.clone(),
columns: self
.columns
.clone | chunk | identifier_name |
table.rs | 0].data_type();
arrays.iter().for_each(|array| {
assert!(&array.data_type() == data_type);
num_rows += array.len();
null_count += array.null_count();
});
ChunkedArray {
chunks: arrays,
num_rows,
null_count,
}
}
/// Return the length of the arrays in the chunk. This value is pre-computed.
pub fn num_rows(&self) -> usize {
self.num_rows
}
pub fn null_count(&self) -> usize {
self.null_count
}
pub fn num_chunks(&self) -> usize {
self.chunks.len()
}
/// Get the count per chunk
///
/// This is useful for repartitioning
pub(crate) fn chunk_counts(&self) -> Vec<usize> {
self.chunks().iter().map(|chunk| chunk.len()).collect()
}
/// Get a chunk from the chunked array by index
/// TODO: should this have bounds-chacking?
pub fn chunk(&self, i: usize) -> &Arc<dyn Array> {
&self.chunks[i]
}
pub fn chunks(&self) -> &Vec<Arc<dyn Array>> {
&self.chunks
}
/// Construct a zero-copy slice of the chunked array with the indicated offset and length.
///
/// The `offset` is the position of the first element in the constructed slice.
/// `length` is the length of the slice. If there are not enough elements in the chunked array,
/// the length will be adjusted accordingly.
fn slice(&self, offset: usize, length: Option<usize>) -> Self {
let mut offset = offset;
let mut length = length.unwrap_or(std::usize::MAX);
length = std::cmp::min(length, self.num_rows());
let mut current_chunk: usize = 0;
let mut new_chunks: Vec<ArrayRef> = vec![];
// compute the first offset. If offset > whole chunks' lengths, skip those chunks
while current_chunk < self.num_chunks() && offset >= self.chunk(current_chunk).len() {
offset -= self.chunk(current_chunk).len();
current_chunk += 1;
}
while current_chunk < self.num_chunks() && length > 0 {
new_chunks.push(self.chunk(current_chunk).slice(offset, length));
length -= std::cmp::min(length, self.chunk(current_chunk).len() - offset);
offset = 0;
current_chunk += 1;
}
Self::from_arrays(new_chunks)
}
fn filter(&self, condition: &Self) -> Self {
let filtered: arrow::error::Result<Vec<ArrayRef>> = self
.chunks()
.iter()
.zip(condition.chunks())
.map(|(a, b): (&ArrayRef, &ArrayRef)| {
arrow::compute::filter(a.as_ref(), &BooleanArray::from(b.data()))
})
.collect();
Self::from_arrays(filtered.unwrap())
}
fn flatten(&self) {
unimplemented!("This is for flattening struct columns, we aren't yet there")
}
}
pub fn col_to_prim_arrays<T>(column: &Column) -> Vec<&PrimitiveArray<T>>
where
T: ArrowPrimitiveType,
{
let mut arrays: Vec<&PrimitiveArray<T>> = vec![];
for chunk in column.data().chunks() {
arrays.push(chunk.as_any().downcast_ref::<PrimitiveArray<T>>().unwrap())
}
arrays
}
pub fn col_to_string_arrays(column: &Column) -> Vec<&StringArray> {
let mut arrays = vec![];
for chunk in column.data().chunks() {
arrays.push(chunk.as_any().downcast_ref::<StringArray>().unwrap())
}
arrays
}
/// A column data structure consisting of a `Field` and `ChunkedArray`
#[derive(Clone)]
pub struct Column {
pub(crate) data: ChunkedArray,
field: arrow::datatypes::Field,
}
impl Column {
pub fn from_chunked_array(chunk: ChunkedArray, field: arrow::datatypes::Field) -> Self {
Column { data: chunk, field }
}
pub fn from_arrays(arrays: Vec<Arc<dyn Array>>, field: arrow::datatypes::Field) -> Self {
assert!(!arrays.is_empty());
for array in &arrays {
assert!(array.data_type() == field.data_type());
}
Column {
data: ChunkedArray::from_arrays(arrays),
field,
}
}
/// Merge the chunk arrays into a single array
///
/// Returns an error if concatenating the array type is not supported,
/// or the dataframe is empty
pub fn to_array(&self) -> Result<ArrayRef> {
Ok(arrow::compute::concat(self.data().chunks())?)
}
pub fn name(&self) -> &str {
self.field.name()
}
pub fn data_type(&self) -> &DataType {
self.field.data_type()
}
pub fn data(&self) -> &ChunkedArray {
&self.data
}
pub(crate) fn field(&self) -> &Field {
&self.field
}
pub fn slice(&self, offset: usize, length: Option<usize>) -> Self {
Self::from_chunked_array(self.data().slice(offset, length), self.field().clone())
}
pub fn null_count(&self) -> usize {
self.data().null_count()
}
pub fn num_rows(&self) -> usize {
self.data().num_rows()
}
/// Filter this column using a Boolean column as the mask
pub fn filter(&self, condition: &Self) -> Self {
Self::from_chunked_array(self.data.filter(condition.data()), self.field.clone())
}
/// Create a new column by taking values at indices, while repartitioning to the chunk size
pub fn take(&self, indices: &UInt32Array, chunk_size: usize) -> Result<Self> {
let mut consumed_len = 0;
let total_len = indices.len();
let values = self.to_array()?;
let mut outputs = vec![];
while consumed_len < total_len {
let bounded_len = if total_len < chunk_size | else if consumed_len + chunk_size > total_len {
chunk_size
} else {
total_len - consumed_len
};
let slice = indices.slice(consumed_len, bounded_len);
let slice = slice.as_any().downcast_ref::<UInt32Array>().unwrap();
let taken = arrow::compute::take(&values, slice, None)?;
outputs.push(taken);
consumed_len += bounded_len;
}
Ok(Self {
data: ChunkedArray::from_arrays(outputs),
field: self.field.clone(),
})
}
fn flatten() {}
}
/// Alogical table as a sequence of chunked arrays
pub struct Table {
schema: Arc<Schema>,
pub(crate) columns: Vec<Column>,
}
impl Table {
pub fn new(schema: Arc<Schema>, columns: Vec<Column>) -> Self {
// assert that there are some columns
assert!(
!columns.is_empty(),
"at least one column must be defined to create a record batch"
);
// assert that all columns have the same row count
let len = columns[0].data().num_rows();
for column in &columns {
assert_eq!(
len,
column.data().num_rows(),
"all columns in a record batch must have the same length"
);
}
Table { schema, columns }
}
pub fn schema(&self) -> &Arc<Schema> {
&self.schema
}
pub fn num_columns(&self) -> usize {
self.columns.len()
}
pub fn num_rows(&self) -> usize {
self.columns[0].data().num_rows()
}
// keep fn
pub fn column(&self, i: usize) -> &Column {
&self.columns[i]
}
pub fn columns(&self) -> &Vec<Column> {
&self.columns
}
// new fns
fn add_column() {}
// fn remove_column(&self, _i: usize) -> Self {
// Table {
// schema: self.schema.clone(),
// columns: self.columns
// }
// }
/// Replace a column in the table, producing a new `Table`
fn set_column() {}
fn replace_schema_metadata() {}
/// Each column with a struct type is flattened into one column per struct field.
/// Other columns are left unchanged.
fn flatten() {}
/// Construct a `Table` from a sequence of `Column`s and a schema
fn make(columns: Vec<Column>) -> Self {
let fields: Vec<Field> = columns.iter().map(|column| column.field.clone()).collect();
Self {
schema: Arc::new(Schema::new(fields)),
columns,
}
}
/// Construct a `Table` from a sequence of `Column`s and a schema
fn make_with_schema(schema: Arc<Schema>, columns: Vec<Column>) -> Self {
// TODO validate that schema and columns match
Self { schema, columns }
}
/// Slice the table from an offset
pub fn slice(&self, offset: usize, limit: usize) -> Self {
Self {
schema: self.schema.clone(),
columns: self
.columns
.clone | {
total_len
} | conditional_block |
table.rs | 0].data_type();
arrays.iter().for_each(|array| {
assert!(&array.data_type() == data_type);
num_rows += array.len();
null_count += array.null_count();
});
ChunkedArray {
chunks: arrays,
num_rows,
null_count,
}
}
/// Return the length of the arrays in the chunk. This value is pre-computed.
pub fn num_rows(&self) -> usize {
self.num_rows
}
pub fn null_count(&self) -> usize {
self.null_count
}
pub fn num_chunks(&self) -> usize {
self.chunks.len()
}
/// Get the count per chunk
///
/// This is useful for repartitioning
pub(crate) fn chunk_counts(&self) -> Vec<usize> {
self.chunks().iter().map(|chunk| chunk.len()).collect()
}
/// Get a chunk from the chunked array by index
/// TODO: should this have bounds-chacking?
pub fn chunk(&self, i: usize) -> &Arc<dyn Array> {
&self.chunks[i]
}
pub fn chunks(&self) -> &Vec<Arc<dyn Array>> {
&self.chunks
}
/// Construct a zero-copy slice of the chunked array with the indicated offset and length.
///
/// The `offset` is the position of the first element in the constructed slice.
/// `length` is the length of the slice. If there are not enough elements in the chunked array,
/// the length will be adjusted accordingly.
fn slice(&self, offset: usize, length: Option<usize>) -> Self {
let mut offset = offset;
let mut length = length.unwrap_or(std::usize::MAX);
length = std::cmp::min(length, self.num_rows());
let mut current_chunk: usize = 0;
let mut new_chunks: Vec<ArrayRef> = vec![];
// compute the first offset. If offset > whole chunks' lengths, skip those chunks
while current_chunk < self.num_chunks() && offset >= self.chunk(current_chunk).len() {
offset -= self.chunk(current_chunk).len();
current_chunk += 1;
}
while current_chunk < self.num_chunks() && length > 0 {
new_chunks.push(self.chunk(current_chunk).slice(offset, length));
length -= std::cmp::min(length, self.chunk(current_chunk).len() - offset);
offset = 0;
current_chunk += 1;
}
Self::from_arrays(new_chunks)
}
fn filter(&self, condition: &Self) -> Self {
let filtered: arrow::error::Result<Vec<ArrayRef>> = self
.chunks()
.iter()
.zip(condition.chunks())
.map(|(a, b): (&ArrayRef, &ArrayRef)| {
arrow::compute::filter(a.as_ref(), &BooleanArray::from(b.data()))
})
.collect();
Self::from_arrays(filtered.unwrap())
}
fn flatten(&self) {
unimplemented!("This is for flattening struct columns, we aren't yet there")
}
}
pub fn col_to_prim_arrays<T>(column: &Column) -> Vec<&PrimitiveArray<T>>
where
T: ArrowPrimitiveType,
{
let mut arrays: Vec<&PrimitiveArray<T>> = vec![];
for chunk in column.data().chunks() {
arrays.push(chunk.as_any().downcast_ref::<PrimitiveArray<T>>().unwrap())
}
arrays
}
pub fn col_to_string_arrays(column: &Column) -> Vec<&StringArray> {
let mut arrays = vec![];
for chunk in column.data().chunks() {
arrays.push(chunk.as_any().downcast_ref::<StringArray>().unwrap())
}
arrays
}
/// A column data structure consisting of a `Field` and `ChunkedArray`
#[derive(Clone)]
pub struct Column {
pub(crate) data: ChunkedArray,
field: arrow::datatypes::Field,
}
impl Column {
pub fn from_chunked_array(chunk: ChunkedArray, field: arrow::datatypes::Field) -> Self {
Column { data: chunk, field }
}
pub fn from_arrays(arrays: Vec<Arc<dyn Array>>, field: arrow::datatypes::Field) -> Self {
assert!(!arrays.is_empty());
for array in &arrays {
assert!(array.data_type() == field.data_type());
}
Column {
data: ChunkedArray::from_arrays(arrays),
field,
}
}
/// Merge the chunk arrays into a single array
///
/// Returns an error if concatenating the array type is not supported,
/// or the dataframe is empty
pub fn to_array(&self) -> Result<ArrayRef> {
Ok(arrow::compute::concat(self.data().chunks())?)
}
pub fn name(&self) -> &str {
self.field.name()
}
pub fn data_type(&self) -> &DataType {
self.field.data_type()
}
pub fn data(&self) -> &ChunkedArray {
&self.data
}
pub(crate) fn field(&self) -> &Field {
&self.field
}
pub fn slice(&self, offset: usize, length: Option<usize>) -> Self {
Self::from_chunked_array(self.data().slice(offset, length), self.field().clone())
}
pub fn null_count(&self) -> usize {
self.data().null_count()
}
pub fn num_rows(&self) -> usize {
self.data().num_rows()
}
/// Filter this column using a Boolean column as the mask
pub fn filter(&self, condition: &Self) -> Self {
Self::from_chunked_array(self.data.filter(condition.data()), self.field.clone())
}
/// Create a new column by taking values at indices, while repartitioning to the chunk size
pub fn take(&self, indices: &UInt32Array, chunk_size: usize) -> Result<Self> {
let mut consumed_len = 0;
let total_len = indices.len();
let values = self.to_array()?;
let mut outputs = vec![];
while consumed_len < total_len {
let bounded_len = if total_len < chunk_size {
total_len
} else if consumed_len + chunk_size > total_len {
chunk_size
} else {
total_len - consumed_len
};
let slice = indices.slice(consumed_len, bounded_len);
let slice = slice.as_any().downcast_ref::<UInt32Array>().unwrap();
let taken = arrow::compute::take(&values, slice, None)?;
outputs.push(taken);
consumed_len += bounded_len;
}
Ok(Self {
data: ChunkedArray::from_arrays(outputs),
field: self.field.clone(),
})
}
fn flatten() {}
}
/// Alogical table as a sequence of chunked arrays
pub struct Table {
schema: Arc<Schema>,
pub(crate) columns: Vec<Column>,
}
impl Table {
pub fn new(schema: Arc<Schema>, columns: Vec<Column>) -> Self {
// assert that there are some columns
assert!(
!columns.is_empty(),
"at least one column must be defined to create a record batch"
);
// assert that all columns have the same row count
let len = columns[0].data().num_rows();
for column in &columns {
assert_eq!(
len,
column.data().num_rows(),
"all columns in a record batch must have the same length"
);
}
Table { schema, columns }
}
pub fn schema(&self) -> &Arc<Schema> {
&self.schema
}
pub fn num_columns(&self) -> usize {
self.columns.len()
}
pub fn num_rows(&self) -> usize {
self.columns[0].data().num_rows()
}
// keep fn
pub fn column(&self, i: usize) -> &Column {
&self.columns[i]
}
pub fn columns(&self) -> &Vec<Column> {
&self.columns
}
// new fns
fn add_column() {}
// fn remove_column(&self, _i: usize) -> Self {
// Table {
// schema: self.schema.clone(),
// columns: self.columns
// }
// }
/// Replace a column in the table, producing a new `Table`
fn set_column() {}
fn replace_schema_metadata() {}
/// Each column with a struct type is flattened into one column per struct field.
/// Other columns are left unchanged.
fn flatten() {}
/// Construct a `Table` from a sequence of `Column`s and a schema
fn make(columns: Vec<Column>) -> Self {
let fields: Vec<Field> = columns.iter().map(|column| column.field.clone()).collect();
Self {
schema: Arc::new(Schema::new(fields)),
columns,
}
}
/// Construct a `Table` from a sequence of `Column`s and a schema
fn make_with_schema(schema: Arc<Schema>, columns: Vec<Column>) -> Self |
/// Slice the table from an offset
pub fn slice(&self, offset: usize, limit: usize) -> Self {
Self {
schema: self.schema.clone(),
columns: self
.columns
. | {
// TODO validate that schema and columns match
Self { schema, columns }
} | identifier_body |
client.rs | Transaction;
use crate::transaction::TransactionOptions;
use crate::Backoff;
use crate::BoundRange;
use crate::Result;
// FIXME: cargo-culted value
const SCAN_LOCK_BATCH_SIZE: u32 = 1024;
/// The TiKV transactional `Client` is used to interact with TiKV using transactional requests.
///
/// Transactions support optimistic and pessimistic modes. For more details see the SIG-transaction
/// [docs](https://github.com/tikv/sig-transaction/tree/master/doc/tikv#optimistic-and-pessimistic-transactions).
///
/// Begin a [`Transaction`] by calling [`begin_optimistic`](Client::begin_optimistic) or
/// [`begin_pessimistic`](Client::begin_pessimistic). A transaction must be rolled back or committed.
///
/// Besides transactions, the client provides some further functionality:
/// - `gc`: trigger a GC process which clears stale data in the cluster.
/// - `current_timestamp`: get the current `Timestamp` from PD.
/// - `snapshot`: get a [`Snapshot`] of the database at a specified timestamp.
/// A `Snapshot` is a read-only transaction.
///
/// The returned results of transactional requests are [`Future`](std::future::Future)s that must be
/// awaited to execute.
pub struct Client {
pd: Arc<PdRpcClient>,
}
impl Clone for Client {
fn clone(&self) -> Self {
Self {
pd: self.pd.clone(),
}
}
}
impl Client {
/// Create a transactional [`Client`] and connect to the TiKV cluster.
///
/// Because TiKV is managed by a [PD](https://github.com/pingcap/pd/) cluster, the endpoints for
/// PD must be provided, not the TiKV nodes. It's important to include more than one PD endpoint
/// (include all endpoints, if possible), this helps avoid having a single point of failure.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// # });
/// ```
pub async fn new<S: Into<String>>(pd_endpoints: Vec<S>) -> Result<Client> {
// debug!("creating transactional client");
Self::new_with_config(pd_endpoints, Config::default()).await
}
/// Create a transactional [`Client`] with a custom configuration, and connect to the TiKV cluster.
///
/// Because TiKV is managed by a [PD](https://github.com/pingcap/pd/) cluster, the endpoints for
/// PD must be provided, not the TiKV nodes. It's important to include more than one PD endpoint
/// (include all endpoints, if possible), this helps avoid having a single point of failure.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # use std::time::Duration;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new_with_config(
/// vec!["192.168.0.100"],
/// Config::default().with_timeout(Duration::from_secs(60)),
/// )
/// .await
/// .unwrap();
/// # });
/// ```
pub async fn new_with_config<S: Into<String>>(
pd_endpoints: Vec<S>,
config: Config,
) -> Result<Client> {
debug!("creating new transactional client");
let pd_endpoints: Vec<String> = pd_endpoints.into_iter().map(Into::into).collect();
let pd = Arc::new(PdRpcClient::connect(&pd_endpoints, config, true).await?);
Ok(Client { pd })
}
/// Creates a new optimistic [`Transaction`].
///
/// Use the transaction to issue requests like [`get`](Transaction::get) or
/// [`put`](Transaction::put).
///
/// Write operations do not lock data in TiKV, thus the commit request may fail due to a write
/// conflict.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client.begin_optimistic().await.unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_optimistic(&self) -> Result<Transaction> {
debug!("creating new optimistic transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, TransactionOptions::new_optimistic()))
}
/// Creates a new pessimistic [`Transaction`].
///
/// Write operations will lock the data until committed, thus commit requests should not suffer
/// from write conflicts.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client.begin_pessimistic().await.unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_pessimistic(&self) -> Result<Transaction> {
debug!("creating new pessimistic transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, TransactionOptions::new_pessimistic()))
}
/// Create a new customized [`Transaction`].
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient, TransactionOptions};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client
/// .begin_with_options(TransactionOptions::default().use_async_commit())
/// .await
/// .unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_with_options(&self, options: TransactionOptions) -> Result<Transaction> {
debug!("creating new customized transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, options))
}
/// Create a new [`Snapshot`](Snapshot) at the given [`Timestamp`](Timestamp).
pub fn snapshot(&self, timestamp: Timestamp, options: TransactionOptions) -> Snapshot {
debug!("creating new snapshot"); | /// Retrieve the current [`Timestamp`].
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let timestamp = client.current_timestamp().await.unwrap();
/// # });
/// ```
pub async fn current_timestamp(&self) -> Result<Timestamp> {
self.pd.clone().get_timestamp().await
}
/// Request garbage collection (GC) of the TiKV cluster.
///
/// GC deletes MVCC records whose timestamp is lower than the given `safepoint`. We must guarantee
/// that all transactions started before this timestamp had committed. We can keep an active
/// transaction list in application to decide which is the minimal start timestamp of them.
///
/// For each key, the last mutation record (unless it's a deletion) before `safepoint` is retained.
///
/// GC is performed by:
/// 1. resolving all locks with timestamp <= `safepoint`
/// 2. updating PD's known safepoint
///
/// This is a simplified version of [GC in TiDB](https://docs.pingcap.com/tidb/stable/garbage-collection-overview).
/// We skip the second step "delete ranges" which is an optimization for TiDB.
pub async fn gc(&self, safepoint: Timestamp) -> Result<bool> {
debug!("invoking transactional gc request");
let options = ResolveLocksOptions {
batch_size: SCAN_LOCK_BATCH_SIZE,
..Default::default()
};
self.cleanup_locks(.., &safepoint, options).await?;
// update safepoint to PD
let res: bool = self
.pd
.clone()
.update_safepoint(safepoint.version())
.await?;
if !res {
info!("new safepoint != user-specified safepoint");
}
Ok(res)
}
pub async fn cleanup_locks(
&self,
| Snapshot::new(self.new_transaction(timestamp, options.read_only()))
}
| random_line_split |
client.rs | ;
use crate::transaction::TransactionOptions;
use crate::Backoff;
use crate::BoundRange;
use crate::Result;
// FIXME: cargo-culted value
const SCAN_LOCK_BATCH_SIZE: u32 = 1024;
/// The TiKV transactional `Client` is used to interact with TiKV using transactional requests.
///
/// Transactions support optimistic and pessimistic modes. For more details see the SIG-transaction
/// [docs](https://github.com/tikv/sig-transaction/tree/master/doc/tikv#optimistic-and-pessimistic-transactions).
///
/// Begin a [`Transaction`] by calling [`begin_optimistic`](Client::begin_optimistic) or
/// [`begin_pessimistic`](Client::begin_pessimistic). A transaction must be rolled back or committed.
///
/// Besides transactions, the client provides some further functionality:
/// - `gc`: trigger a GC process which clears stale data in the cluster.
/// - `current_timestamp`: get the current `Timestamp` from PD.
/// - `snapshot`: get a [`Snapshot`] of the database at a specified timestamp.
/// A `Snapshot` is a read-only transaction.
///
/// The returned results of transactional requests are [`Future`](std::future::Future)s that must be
/// awaited to execute.
pub struct Client {
pd: Arc<PdRpcClient>,
}
impl Clone for Client {
fn clone(&self) -> Self {
Self {
pd: self.pd.clone(),
}
}
}
impl Client {
/// Create a transactional [`Client`] and connect to the TiKV cluster.
///
/// Because TiKV is managed by a [PD](https://github.com/pingcap/pd/) cluster, the endpoints for
/// PD must be provided, not the TiKV nodes. It's important to include more than one PD endpoint
/// (include all endpoints, if possible), this helps avoid having a single point of failure.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// # });
/// ```
pub async fn new<S: Into<String>>(pd_endpoints: Vec<S>) -> Result<Client> {
// debug!("creating transactional client");
Self::new_with_config(pd_endpoints, Config::default()).await
}
/// Create a transactional [`Client`] with a custom configuration, and connect to the TiKV cluster.
///
/// Because TiKV is managed by a [PD](https://github.com/pingcap/pd/) cluster, the endpoints for
/// PD must be provided, not the TiKV nodes. It's important to include more than one PD endpoint
/// (include all endpoints, if possible), this helps avoid having a single point of failure.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # use std::time::Duration;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new_with_config(
/// vec!["192.168.0.100"],
/// Config::default().with_timeout(Duration::from_secs(60)),
/// )
/// .await
/// .unwrap();
/// # });
/// ```
pub async fn new_with_config<S: Into<String>>(
pd_endpoints: Vec<S>,
config: Config,
) -> Result<Client> {
debug!("creating new transactional client");
let pd_endpoints: Vec<String> = pd_endpoints.into_iter().map(Into::into).collect();
let pd = Arc::new(PdRpcClient::connect(&pd_endpoints, config, true).await?);
Ok(Client { pd })
}
/// Creates a new optimistic [`Transaction`].
///
/// Use the transaction to issue requests like [`get`](Transaction::get) or
/// [`put`](Transaction::put).
///
/// Write operations do not lock data in TiKV, thus the commit request may fail due to a write
/// conflict.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client.begin_optimistic().await.unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_optimistic(&self) -> Result<Transaction> {
debug!("creating new optimistic transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, TransactionOptions::new_optimistic()))
}
/// Creates a new pessimistic [`Transaction`].
///
/// Write operations will lock the data until committed, thus commit requests should not suffer
/// from write conflicts.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client.begin_pessimistic().await.unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_pessimistic(&self) -> Result<Transaction> {
debug!("creating new pessimistic transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, TransactionOptions::new_pessimistic()))
}
/// Create a new customized [`Transaction`].
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient, TransactionOptions};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client
/// .begin_with_options(TransactionOptions::default().use_async_commit())
/// .await
/// .unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_with_options(&self, options: TransactionOptions) -> Result<Transaction> {
debug!("creating new customized transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, options))
}
/// Create a new [`Snapshot`](Snapshot) at the given [`Timestamp`](Timestamp).
pub fn snapshot(&self, timestamp: Timestamp, options: TransactionOptions) -> Snapshot {
debug!("creating new snapshot");
Snapshot::new(self.new_transaction(timestamp, options.read_only()))
}
/// Retrieve the current [`Timestamp`].
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let timestamp = client.current_timestamp().await.unwrap();
/// # });
/// ```
pub async fn current_timestamp(&self) -> Result<Timestamp> {
self.pd.clone().get_timestamp().await
}
/// Request garbage collection (GC) of the TiKV cluster.
///
/// GC deletes MVCC records whose timestamp is lower than the given `safepoint`. We must guarantee
/// that all transactions started before this timestamp had committed. We can keep an active
/// transaction list in application to decide which is the minimal start timestamp of them.
///
/// For each key, the last mutation record (unless it's a deletion) before `safepoint` is retained.
///
/// GC is performed by:
/// 1. resolving all locks with timestamp <= `safepoint`
/// 2. updating PD's known safepoint
///
/// This is a simplified version of [GC in TiDB](https://docs.pingcap.com/tidb/stable/garbage-collection-overview).
/// We skip the second step "delete ranges" which is an optimization for TiDB.
pub async fn gc(&self, safepoint: Timestamp) -> Result<bool> {
debug!("invoking transactional gc request");
let options = ResolveLocksOptions {
batch_size: SCAN_LOCK_BATCH_SIZE,
..Default::default()
};
self.cleanup_locks(.., &safepoint, options).await?;
// update safepoint to PD
let res: bool = self
.pd
.clone()
.update_safepoint(safepoint.version())
.await?;
if !res |
Ok(res)
}
pub async fn cleanup_locks(
&self | {
info!("new safepoint != user-specified safepoint");
} | conditional_block |
client.rs | ;
use crate::transaction::TransactionOptions;
use crate::Backoff;
use crate::BoundRange;
use crate::Result;
// FIXME: cargo-culted value
const SCAN_LOCK_BATCH_SIZE: u32 = 1024;
/// The TiKV transactional `Client` is used to interact with TiKV using transactional requests.
///
/// Transactions support optimistic and pessimistic modes. For more details see the SIG-transaction
/// [docs](https://github.com/tikv/sig-transaction/tree/master/doc/tikv#optimistic-and-pessimistic-transactions).
///
/// Begin a [`Transaction`] by calling [`begin_optimistic`](Client::begin_optimistic) or
/// [`begin_pessimistic`](Client::begin_pessimistic). A transaction must be rolled back or committed.
///
/// Besides transactions, the client provides some further functionality:
/// - `gc`: trigger a GC process which clears stale data in the cluster.
/// - `current_timestamp`: get the current `Timestamp` from PD.
/// - `snapshot`: get a [`Snapshot`] of the database at a specified timestamp.
/// A `Snapshot` is a read-only transaction.
///
/// The returned results of transactional requests are [`Future`](std::future::Future)s that must be
/// awaited to execute.
pub struct Client {
pd: Arc<PdRpcClient>,
}
impl Clone for Client {
fn clone(&self) -> Self {
Self {
pd: self.pd.clone(),
}
}
}
impl Client {
/// Create a transactional [`Client`] and connect to the TiKV cluster.
///
/// Because TiKV is managed by a [PD](https://github.com/pingcap/pd/) cluster, the endpoints for
/// PD must be provided, not the TiKV nodes. It's important to include more than one PD endpoint
/// (include all endpoints, if possible), this helps avoid having a single point of failure.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// # });
/// ```
pub async fn new<S: Into<String>>(pd_endpoints: Vec<S>) -> Result<Client> {
// debug!("creating transactional client");
Self::new_with_config(pd_endpoints, Config::default()).await
}
/// Create a transactional [`Client`] with a custom configuration, and connect to the TiKV cluster.
///
/// Because TiKV is managed by a [PD](https://github.com/pingcap/pd/) cluster, the endpoints for
/// PD must be provided, not the TiKV nodes. It's important to include more than one PD endpoint
/// (include all endpoints, if possible), this helps avoid having a single point of failure.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # use std::time::Duration;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new_with_config(
/// vec!["192.168.0.100"],
/// Config::default().with_timeout(Duration::from_secs(60)),
/// )
/// .await
/// .unwrap();
/// # });
/// ```
pub async fn new_with_config<S: Into<String>>(
pd_endpoints: Vec<S>,
config: Config,
) -> Result<Client> {
debug!("creating new transactional client");
let pd_endpoints: Vec<String> = pd_endpoints.into_iter().map(Into::into).collect();
let pd = Arc::new(PdRpcClient::connect(&pd_endpoints, config, true).await?);
Ok(Client { pd })
}
/// Creates a new optimistic [`Transaction`].
///
/// Use the transaction to issue requests like [`get`](Transaction::get) or
/// [`put`](Transaction::put).
///
/// Write operations do not lock data in TiKV, thus the commit request may fail due to a write
/// conflict.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client.begin_optimistic().await.unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_optimistic(&self) -> Result<Transaction> {
debug!("creating new optimistic transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, TransactionOptions::new_optimistic()))
}
/// Creates a new pessimistic [`Transaction`].
///
/// Write operations will lock the data until committed, thus commit requests should not suffer
/// from write conflicts.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client.begin_pessimistic().await.unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_pessimistic(&self) -> Result<Transaction> {
debug!("creating new pessimistic transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, TransactionOptions::new_pessimistic()))
}
/// Create a new customized [`Transaction`].
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient, TransactionOptions};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client
/// .begin_with_options(TransactionOptions::default().use_async_commit())
/// .await
/// .unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_with_options(&self, options: TransactionOptions) -> Result<Transaction> {
debug!("creating new customized transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, options))
}
/// Create a new [`Snapshot`](Snapshot) at the given [`Timestamp`](Timestamp).
pub fn snapshot(&self, timestamp: Timestamp, options: TransactionOptions) -> Snapshot |
/// Retrieve the current [`Timestamp`].
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let timestamp = client.current_timestamp().await.unwrap();
/// # });
/// ```
pub async fn current_timestamp(&self) -> Result<Timestamp> {
self.pd.clone().get_timestamp().await
}
/// Request garbage collection (GC) of the TiKV cluster.
///
/// GC deletes MVCC records whose timestamp is lower than the given `safepoint`. We must guarantee
/// that all transactions started before this timestamp had committed. We can keep an active
/// transaction list in application to decide which is the minimal start timestamp of them.
///
/// For each key, the last mutation record (unless it's a deletion) before `safepoint` is retained.
///
/// GC is performed by:
/// 1. resolving all locks with timestamp <= `safepoint`
/// 2. updating PD's known safepoint
///
/// This is a simplified version of [GC in TiDB](https://docs.pingcap.com/tidb/stable/garbage-collection-overview).
/// We skip the second step "delete ranges" which is an optimization for TiDB.
pub async fn gc(&self, safepoint: Timestamp) -> Result<bool> {
debug!("invoking transactional gc request");
let options = ResolveLocksOptions {
batch_size: SCAN_LOCK_BATCH_SIZE,
..Default::default()
};
self.cleanup_locks(.., &safepoint, options).await?;
// update safepoint to PD
let res: bool = self
.pd
.clone()
.update_safepoint(safepoint.version())
.await?;
if !res {
info!("new safepoint != user-specified safepoint");
}
Ok(res)
}
pub async fn cleanup_locks(
&self | {
debug!("creating new snapshot");
Snapshot::new(self.new_transaction(timestamp, options.read_only()))
} | identifier_body |
client.rs | ;
use crate::transaction::TransactionOptions;
use crate::Backoff;
use crate::BoundRange;
use crate::Result;
// FIXME: cargo-culted value
const SCAN_LOCK_BATCH_SIZE: u32 = 1024;
/// The TiKV transactional `Client` is used to interact with TiKV using transactional requests.
///
/// Transactions support optimistic and pessimistic modes. For more details see the SIG-transaction
/// [docs](https://github.com/tikv/sig-transaction/tree/master/doc/tikv#optimistic-and-pessimistic-transactions).
///
/// Begin a [`Transaction`] by calling [`begin_optimistic`](Client::begin_optimistic) or
/// [`begin_pessimistic`](Client::begin_pessimistic). A transaction must be rolled back or committed.
///
/// Besides transactions, the client provides some further functionality:
/// - `gc`: trigger a GC process which clears stale data in the cluster.
/// - `current_timestamp`: get the current `Timestamp` from PD.
/// - `snapshot`: get a [`Snapshot`] of the database at a specified timestamp.
/// A `Snapshot` is a read-only transaction.
///
/// The returned results of transactional requests are [`Future`](std::future::Future)s that must be
/// awaited to execute.
pub struct Client {
pd: Arc<PdRpcClient>,
}
impl Clone for Client {
fn clone(&self) -> Self {
Self {
pd: self.pd.clone(),
}
}
}
impl Client {
/// Create a transactional [`Client`] and connect to the TiKV cluster.
///
/// Because TiKV is managed by a [PD](https://github.com/pingcap/pd/) cluster, the endpoints for
/// PD must be provided, not the TiKV nodes. It's important to include more than one PD endpoint
/// (include all endpoints, if possible), this helps avoid having a single point of failure.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// # });
/// ```
pub async fn | <S: Into<String>>(pd_endpoints: Vec<S>) -> Result<Client> {
// debug!("creating transactional client");
Self::new_with_config(pd_endpoints, Config::default()).await
}
/// Create a transactional [`Client`] with a custom configuration, and connect to the TiKV cluster.
///
/// Because TiKV is managed by a [PD](https://github.com/pingcap/pd/) cluster, the endpoints for
/// PD must be provided, not the TiKV nodes. It's important to include more than one PD endpoint
/// (include all endpoints, if possible), this helps avoid having a single point of failure.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # use std::time::Duration;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new_with_config(
/// vec!["192.168.0.100"],
/// Config::default().with_timeout(Duration::from_secs(60)),
/// )
/// .await
/// .unwrap();
/// # });
/// ```
pub async fn new_with_config<S: Into<String>>(
pd_endpoints: Vec<S>,
config: Config,
) -> Result<Client> {
debug!("creating new transactional client");
let pd_endpoints: Vec<String> = pd_endpoints.into_iter().map(Into::into).collect();
let pd = Arc::new(PdRpcClient::connect(&pd_endpoints, config, true).await?);
Ok(Client { pd })
}
/// Creates a new optimistic [`Transaction`].
///
/// Use the transaction to issue requests like [`get`](Transaction::get) or
/// [`put`](Transaction::put).
///
/// Write operations do not lock data in TiKV, thus the commit request may fail due to a write
/// conflict.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client.begin_optimistic().await.unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_optimistic(&self) -> Result<Transaction> {
debug!("creating new optimistic transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, TransactionOptions::new_optimistic()))
}
/// Creates a new pessimistic [`Transaction`].
///
/// Write operations will lock the data until committed, thus commit requests should not suffer
/// from write conflicts.
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client.begin_pessimistic().await.unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_pessimistic(&self) -> Result<Transaction> {
debug!("creating new pessimistic transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, TransactionOptions::new_pessimistic()))
}
/// Create a new customized [`Transaction`].
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient, TransactionOptions};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let mut transaction = client
/// .begin_with_options(TransactionOptions::default().use_async_commit())
/// .await
/// .unwrap();
/// // ... Issue some commands.
/// transaction.commit().await.unwrap();
/// # });
/// ```
pub async fn begin_with_options(&self, options: TransactionOptions) -> Result<Transaction> {
debug!("creating new customized transaction");
let timestamp = self.current_timestamp().await?;
Ok(self.new_transaction(timestamp, options))
}
/// Create a new [`Snapshot`](Snapshot) at the given [`Timestamp`](Timestamp).
pub fn snapshot(&self, timestamp: Timestamp, options: TransactionOptions) -> Snapshot {
debug!("creating new snapshot");
Snapshot::new(self.new_transaction(timestamp, options.read_only()))
}
/// Retrieve the current [`Timestamp`].
///
/// # Examples
///
/// ```rust,no_run
/// # use tikv_client::{Config, TransactionClient};
/// # use futures::prelude::*;
/// # futures::executor::block_on(async {
/// let client = TransactionClient::new(vec!["192.168.0.100"]).await.unwrap();
/// let timestamp = client.current_timestamp().await.unwrap();
/// # });
/// ```
pub async fn current_timestamp(&self) -> Result<Timestamp> {
self.pd.clone().get_timestamp().await
}
/// Request garbage collection (GC) of the TiKV cluster.
///
/// GC deletes MVCC records whose timestamp is lower than the given `safepoint`. We must guarantee
/// that all transactions started before this timestamp had committed. We can keep an active
/// transaction list in application to decide which is the minimal start timestamp of them.
///
/// For each key, the last mutation record (unless it's a deletion) before `safepoint` is retained.
///
/// GC is performed by:
/// 1. resolving all locks with timestamp <= `safepoint`
/// 2. updating PD's known safepoint
///
/// This is a simplified version of [GC in TiDB](https://docs.pingcap.com/tidb/stable/garbage-collection-overview).
/// We skip the second step "delete ranges" which is an optimization for TiDB.
pub async fn gc(&self, safepoint: Timestamp) -> Result<bool> {
debug!("invoking transactional gc request");
let options = ResolveLocksOptions {
batch_size: SCAN_LOCK_BATCH_SIZE,
..Default::default()
};
self.cleanup_locks(.., &safepoint, options).await?;
// update safepoint to PD
let res: bool = self
.pd
.clone()
.update_safepoint(safepoint.version())
.await?;
if !res {
info!("new safepoint != user-specified safepoint");
}
Ok(res)
}
pub async fn cleanup_locks(
&self,
| new | identifier_name |
scalecontrols.js | option><option value="xlog">log</option><option disabled>y axis:</option><option value="ylinear">linear</option><option value="ylog">log</option></select>';
JS9.ScaleLimits.plotHTML='<div><center>Pixel Distribution: %s</center></div><div class="JS9ScalePlot" style="width:%spx;height:%spx"></div>';
JS9.ScaleLimits.loHTML='Low: <input type="text" class="JS9ScaleValue" value=\'%s\' onchange="JS9.ScaleLimits.xsetlo(\'%s\', \'%s\', this)" size="16">';
JS9.ScaleLimits.hiHTML='High: <input type="text" class="JS9ScaleValue" value=\'%s\' onchange="JS9.ScaleLimits.xsethi(\'%s\', \'%s\', this)" size="16">';
// change scale
JS9.ScaleLimits.xsetscale = function(did, id, target){
const im = JS9.lookupImage(id, did);
if( im ){
im.setScale(target.value);
}
};
// change low clipping limit
JS9.ScaleLimits.xsetlo = function(did, id, target){
let val;
const im = JS9.lookupImage(id, did);
if( im ){
val = parseFloat(target.value);
im.setScale(val, im.params.scalemax);
}
};
// change high clipping limit
JS9.ScaleLimits.xsethi = function(did, id, target){
let val;
const im = JS9.lookupImage(id, did);
if( im ){
val = parseFloat(target.value);
im.setScale(im.params.scalemin, val);
}
};
// other ways to determine limits
JS9.ScaleLimits.xsetlims = function(did, id, target){
const im = JS9.lookupImage(id, did);
if( im ){
switch(target.value){
case "dataminmax":
im.setScale("dataminmax", im.raw.dmin, im.raw.dmax);
break;
case "zscale_z1_z2":
im.setScale("zscale", im.params.z1, im.params.z2);
break;
case "zscale_z1_datamax":
im.setScale("zmax", im.params.z1, im.raw.dmax);
break;
default:
break;
}
}
};
// log10 scaling
JS9.ScaleLimits.log10 = function(v){
return v <= 0 ? null : Math.log(v) / Math.LN10;
};
// other ways to determine limits
JS9.ScaleLimits.xaxes = function(did, id, target){
let plugin;
const im = JS9.lookupImage(id, did);
if( im ){
// get current plugin instance
plugin = im.display.pluginInstances[JS9.ScaleLimits.BASE];
// sanity check
if( !plugin || !plugin.plot ){
return;
}
// change the scale for the specified axis
switch(target.value){
case "xlinear":
plugin.xscale = "linear";
JS9.ScaleLimits.doplot.call(plugin, im);
break;
case "xlog":
plugin.xscale = "log";
JS9.ScaleLimits.doplot.call(plugin, im);
break;
case "ylinear":
plugin.yscale = "linear";
JS9.ScaleLimits.doplot.call(plugin, im);
break;
case "ylog":
plugin.yscale = "log";
JS9.ScaleLimits.doplot.call(plugin, im);
break;
default:
break;
}
}
// reset top-level
$(target).val("Plot Axes").prop("selected", true);
};
JS9.ScaleLimits.getPixelDist = function(im, ndist){
let i, idx;
const dist = [];
const dmin = im.raw.dmin;
const drange = im.raw.dmax - im.raw.dmin;
const imlen = im.raw.width * im.raw.height;
for(i=0; i<ndist; i++){
dist[i] = 0;
}
for(i=0; i<imlen; i++){
// idx = Math.floor((im.raw.data[i] / drange) * ndist + 0.5);
idx = Math.floor(((im.raw.data[i] - dmin) / drange) * ndist + 0.5);
if( idx >= 0 && idx < ndist ){
dist[idx] += 1;
}
}
return dist;
};
JS9.ScaleLimits.to10E = function(i){
const superscripts = ["⁰", "¹", "²", "³", "⁴", "⁵", "⁶", "⁷", "⁸", "⁹"];
if( JS9.ScaleLimits.AXISFANCY && i >= 0 && i <= 9 ){
return `10${superscripts[i]}`;
}
return `10E${String(i)}`;
};
JS9.ScaleLimits.doplot = function(im){
let i, j, s, el, xmin, xmax;
let dist, distmin, distmax, ntick, tickinc;
const dmin = im.raw.dmin;
const drange = im.raw.dmax - im.raw.dmin;
const pobj = $.extend(true, {}, JS9.ScaleLimits.dataOpts);
const popts = $.extend(true, {}, JS9.ScaleLimits.plotOpts);
const gettickinc = (datarange) => {
let tickinc;
if( datarange < 10 ){
tickinc = 1;
} else if( datarange < 50 ){
tickinc = 5;
} else if( datarange < 250 ){
tickinc = 10;
} else if( datarange < 500 ){
tickinc = 50;
} else if( datarange < 2500 ){
tickinc = 100;
} else if( datarange < 5000 ){
tickinc = 500;
} else if( datarange < 25000 ){
tickinc = 1000;
} else if( datarange < 50000 ){
tickinc = 5000;
} else if( datarange < 250000 ){
tickinc = 10000;
} else if( datarange < 500000 ){
tickinc = 50000;
} else if( datarange < 2500000 ){
tickinc = 100000;
} else if( datarange < 5000000 ){
tic | ge < 25000000 ){
tickinc = 1000000;
} else {
tickinc = 10000000;
}
return tickinc;
};
const annotate = (plot, x, color) => {
const ctx = plot.getCanvas().getContext("2d");
const size = JS9.ScaleLimits.CARET;
const o = plot.pointOffset({x: x, y: 0});
ctx.beginPath();
ctx.moveTo(o.left, o.top);
ctx.lineTo(o.left - size, o.top - (size*2));
ctx.lineTo(o.left + size, o.top - (size*2));
ctx.lineTo(o.left, o.top);
ctx.fillStyle = color;
ctx.fill();
};
// flag we have just started
this.plotComplete = false;
// plot options
if( this.plotColor ){
pobj.color = this.plotColor;
}
// pixel distribution
dist = JS9.ScaleLimits.getPixelDist(im, this.ndist);
// convert to flot data
for(i=0; i<this.ndist; i++){
pobj.data[i] = [i, dist[i]];
}
// xaxis
popts.xaxis = popts.xaxis || {};
popts.xaxis.font = JS9.ScaleLimits.AXISFONT;
if( this.xscale === "linear" ){
popts.xaxis.transform = null;
popts.xaxis.ticks = [];
tickinc = gettickinc(drange);
ntick = Math.floor(drange/tickinc + 0.5) + 1;
for(i=0; i<ntick; i++){
j = i * tickinc;
s = String(j);
popts.xaxis.ticks[i] = [(j - dmin) * this.ndist / drange, s];
}
} else if( this.xscale === "log" ){
popts.xaxis.transform = JS9.ScaleLimits.log10;
popts.xaxis.min = 1;
popts.xaxis.ticks = [];
ntick = JS9.ScaleLimits.log10(this.ndist) + 1;
for(i=0; i<ntick; i++){
j = Math.floor( (Math.pow(10, i) - dmin) * this.ndist / drange);
popts.xaxis.ticks[i] = [j, JS9.ScaleLimits.to10E(i)];
}
}
// plot location of current scaling min and max for annotations
xmin = ((im.params.scalemin - dmin) / drange) * this.ndist;
| kinc = 500000;
} else if( dataran | conditional_block |
scalecontrols.js | ;
case "xlog":
plugin.xscale = "log";
JS9.ScaleLimits.doplot.call(plugin, im);
break;
case "ylinear":
plugin.yscale = "linear";
JS9.ScaleLimits.doplot.call(plugin, im);
break;
case "ylog":
plugin.yscale = "log";
JS9.ScaleLimits.doplot.call(plugin, im);
break;
default:
break;
}
}
// reset top-level
$(target).val("Plot Axes").prop("selected", true);
};
JS9.ScaleLimits.getPixelDist = function(im, ndist){
let i, idx;
const dist = [];
const dmin = im.raw.dmin;
const drange = im.raw.dmax - im.raw.dmin;
const imlen = im.raw.width * im.raw.height;
for(i=0; i<ndist; i++){
dist[i] = 0;
}
for(i=0; i<imlen; i++){
// idx = Math.floor((im.raw.data[i] / drange) * ndist + 0.5);
idx = Math.floor(((im.raw.data[i] - dmin) / drange) * ndist + 0.5);
if( idx >= 0 && idx < ndist ){
dist[idx] += 1;
}
}
return dist;
};
JS9.ScaleLimits.to10E = function(i){
const superscripts = ["⁰", "¹", "²", "³", "⁴", "⁵", "⁶", "⁷", "⁸", "⁹"];
if( JS9.ScaleLimits.AXISFANCY && i >= 0 && i <= 9 ){
return `10${superscripts[i]}`;
}
return `10E${String(i)}`;
};
JS9.ScaleLimits.doplot = function(im){
let i, j, s, el, xmin, xmax;
let dist, distmin, distmax, ntick, tickinc;
const dmin = im.raw.dmin;
const drange = im.raw.dmax - im.raw.dmin;
const pobj = $.extend(true, {}, JS9.ScaleLimits.dataOpts);
const popts = $.extend(true, {}, JS9.ScaleLimits.plotOpts);
const gettickinc = (datarange) => {
let tickinc;
if( datarange < 10 ){
tickinc = 1;
} else if( datarange < 50 ){
tickinc = 5;
} else if( datarange < 250 ){
tickinc = 10;
} else if( datarange < 500 ){
tickinc = 50;
} else if( datarange < 2500 ){
tickinc = 100;
} else if( datarange < 5000 ){
tickinc = 500;
} else if( datarange < 25000 ){
tickinc = 1000;
} else if( datarange < 50000 ){
tickinc = 5000;
} else if( datarange < 250000 ){
tickinc = 10000;
} else if( datarange < 500000 ){
tickinc = 50000;
} else if( datarange < 2500000 ){
tickinc = 100000;
} else if( datarange < 5000000 ){
tickinc = 500000;
} else if( datarange < 25000000 ){
tickinc = 1000000;
} else {
tickinc = 10000000;
}
return tickinc;
};
const annotate = (plot, x, color) => {
const ctx = plot.getCanvas().getContext("2d");
const size = JS9.ScaleLimits.CARET;
const o = plot.pointOffset({x: x, y: 0});
ctx.beginPath();
ctx.moveTo(o.left, o.top);
ctx.lineTo(o.left - size, o.top - (size*2));
ctx.lineTo(o.left + size, o.top - (size*2));
ctx.lineTo(o.left, o.top);
ctx.fillStyle = color;
ctx.fill();
};
// flag we have just started
this.plotComplete = false;
// plot options
if( this.plotColor ){
pobj.color = this.plotColor;
}
// pixel distribution
dist = JS9.ScaleLimits.getPixelDist(im, this.ndist);
// convert to flot data
for(i=0; i<this.ndist; i++){
pobj.data[i] = [i, dist[i]];
}
// xaxis
popts.xaxis = popts.xaxis || {};
popts.xaxis.font = JS9.ScaleLimits.AXISFONT;
if( this.xscale === "linear" ){
popts.xaxis.transform = null;
popts.xaxis.ticks = [];
tickinc = gettickinc(drange);
ntick = Math.floor(drange/tickinc + 0.5) + 1;
for(i=0; i<ntick; i++){
j = i * tickinc;
s = String(j);
popts.xaxis.ticks[i] = [(j - dmin) * this.ndist / drange, s];
}
} else if( this.xscale === "log" ){
popts.xaxis.transform = JS9.ScaleLimits.log10;
popts.xaxis.min = 1;
popts.xaxis.ticks = [];
ntick = JS9.ScaleLimits.log10(this.ndist) + 1;
for(i=0; i<ntick; i++){
j = Math.floor( (Math.pow(10, i) - dmin) * this.ndist / drange);
popts.xaxis.ticks[i] = [j, JS9.ScaleLimits.to10E(i)];
}
}
// plot location of current scaling min and max for annotations
xmin = ((im.params.scalemin - dmin) / drange) * this.ndist;
xmax = ((im.params.scalemax - dmin) / drange) * this.ndist;
// y axis
popts.yaxis = popts.yaxis || {};
popts.yaxis.font = JS9.ScaleLimits.AXISFONT;
if( this.yscale === "linear" ){
popts.yaxis.transform = null;
popts.yaxis.ticks = null;
} else if( this.yscale === "log" ){
popts.yaxis.transform = JS9.ScaleLimits.log10;
popts.yaxis.min = 1;
popts.yaxis.ticks = [];
// distribution limits
for(i=0; i<this.ndist; i++){
if( distmin === undefined || dist[i] < distmin ){
distmin = dist[i];
}
if( distmax === undefined || dist[i] > distmax ){
distmax = dist[i];
}
}
ntick = JS9.ScaleLimits.log10(distmax - distmin + 1);
for(i=0; i<ntick; i++){
popts.yaxis.ticks[i] = [Math.pow(10, i), JS9.ScaleLimits.to10E(i)];
}
}
el = this.divjq.find(".JS9ScalePlot");
// this timeout stuff avoids generating plots too quickly in succession
if( this.timeout ){
// don't do previous plot
window.clearTimeout(this.timeout);
this.timeout = null;
}
// select limits
el.off("plotselected");
el.on("plotselected", (event, ranges) => {
let start = ranges.xaxis.from;
let end = ranges.xaxis.to;
if( this.xscale === "log" ){
start = Math.pow(10, start);
end = Math.pow(10, end);
}
start = start * drange / this.ndist + dmin;
end = end * drange / this.ndist + dmin;
im.setScale("user", start, end);
});
el.off("plothover");
el.on("plothover", (event, pos) => {
let ctx, text, s, x, y, w, h, xval;
let px = pos.x;
// sanity checks
if( !this.plot || !this.plotComplete ){
return;
}
if( this.xscale === "log" ){
px = Math.pow(10, px);
}
xval = px * drange / this.ndist + dmin;
if( !Number.isFinite(xval) ){
return;
}
s = JS9.floatToString(xval);
// display x value in upper right corner of plot
ctx = this.plot.getCanvas().getContext("2d");
ctx.save();
ctx.textBaseline = 'top';
ctx.font = `${JS9.ScaleLimits.XTEXTHEIGHT }px ${JS9.ScaleLimits.XTEXTFONT}`;
ctx.fillStyle = JS9.ScaleLimits.XTEXTCOLOR || "black";
text = ctx.measureText(s);
w = Math.max(this.lastTextWidth, text.width + 2); | h = JS9.ScaleLimits.XTEXTHEIGHT + 2; | random_line_split |
|
DataFrameExplored_dfprep-checkpoint.py | by coerce")
print(f" - Example: {before_formatting} -->> {str(data.loc[0, i])}", end="\n")
else:
pass
return data
# Function, ...........................................................................................
def replace_text(*,df ,pat="", colnames="all", fillna=np.nan, verbose=True):
"""
searches string with a given pattern and replace it with a new patter (fillna), eg: nan,
Parameters/Input
_________________ _______________________________________________________________________________
* df Pandas Dataframe
* searched_pattern "", str literal, used by pd.Series.str.contains()
* colnames default, "all", or list with selected colnames in df
* fillna default numpy.nan, or str literal
- what do you want to place instead of searched pattern in df
Returns
_________________ _______________________________________________________________________________
* DataFrame DataFramne.copy() with new values,
* display messages. number of replaced straings in each column, and examples of replcaced values
"""
# for older version,
searched_pattern = pat
col_names = colnames
# check col_names with values to replace,
if col_names=="all":
sel_col_names = list(df.columns)
else:
sel_col_names = col_names
# display message header,
if verbose==True:
print(f"""\nReplacing Text in {len(sel_col_names)} columns: {sel_col_names}\n""")
if verbose==False:
pass
# exchnage searched pattern in each column separately,
for i, col_name in enumerate(sel_col_names):
# .. test if you really have string values in that column, otherwise it masy be float for all NaN in a column, and no action will be taken
if is_string_dtype(df[col_name]):
try:
# .... find postions with a given pattern and select three examples to display for the user,
positions_to_replace = df[col_name].str.contains(searched_pattern, na=False).values# arr
examples_to_display = [str(x) for x in list(df.loc[list(positions_to_replace), col_name].str[0:20].values.tolist()[0:3])]
# .... replace postions, and find examples of unchnaged postions,
df.loc[list(positions_to_replace), col_name] = [fillna]*positions_to_replace.sum()
examples_of_positions_that_were_not_replaced = [str(x) for x in list(df.loc[list(positions_to_replace==False), col_name].str[0:20].values.tolist()[0:3])]
# .... diplay info,
if verbose==True:
perc_of_replaced_pos_in_col = "".join([str(positions_to_replace.sum()/df.shape[0]*100),"%"])
print(f"{i} - {col_name} - - {positions_to_replace.sum()} positions out of {df.shape[0]}, were replaced with {fillna}, ie. {perc_of_replaced_pos_in_col}")
print(f" - three examples of replaced postions: {'; '.join(examples_to_display)}", end="\n")
print(f" - three examples of unchanged postions: {'; '.join(examples_of_positions_that_were_not_replaced)}", end="\n\n")
# the second print returns three first examples of exchanged values, just to see what i did,
else:
pass
except:
if verbose==True:
print(f"{i} - {col_name} - - probably only missing data datected, Values were not replaced! \n")
else:
pass
else:
if verbose==True:
print(f"{i} - {col_name} - - is not of string type, Values were not replaced! \n")
else:
pass
return df.copy()
# Function, ...........................................................................................
def replace_numeric_values(*, df, colnames="all", lower_limit="none", upper_limit="none", equal=False, replace_with=np.nan, verbose=True):
"""
Replace numerical values that are outside of range of a values
prediced with a theoretical limits of a given variable,
eg less then 0 in weight of a product,
Provide examples and numbers of replaced instances
Parameters/Input
_________________ _______________________________________________________________________________
* df : Pandas DataFrame
* cols_in_df : list, exact colnames of selected or all columns in df
* lower_limit : int,float,"none", if "none" no action is taken
* upper_limit : int,float,"none", if "none" no action is taken
* replace_with : str, np.nan, int, float
* equal : bool, if True, >= and <= values then limits will be replaced,
if False (default), > and < values then limits will be replaced,
Returns
_________________ _______________________________________________________________________________
* DataFrame DataFramne.copy() with new values,
* display messages. number of replaced straings in each column, and examples of replcaced values
"""
cols_names = colnames
# .. check provided col_names,
if cols_names=="all":
cols = list(df.columns)
else:
cols = cols_names
# .. info, header,
if verbose==True:
print(f"""\n{"".join(["-"]*80)} \n Replacing Numerical Values in {len(cols)} columns""")
print(f" lower filter={lower_limit}, upper filter ={upper_limit}")
if equal==True:
print(f" Caution, equal=True, ie. values >= and <= then requested limits will be replaced")
print(f'{"".join(["-"]*80)}\n')
if verbose==False:
pass
# .. intelligent info,
total_count=[]
# .. count, to limit the number of displayed messages,
count = 0
# .. replace values and collect examples,
for i, j in enumerate(cols):
# ..... assume no values were replaced, so the messages work later,
info_lower_filter = 0
info_upper_filter = 0
# ..... test if the column is of the numeric type:
# from pandas.api.types import is_numeric_dtype
if is_numeric_dtype(df[j]):
# * replace values < or <= lower limit,
# - ----------------------------------
if lower_limit!="none":
if equal == True:
lower_filter = df.loc[:,j]<=lower_limit
if equal == False:
lower_filter = df.loc[:,j]<lower_limit
# info,
info_lower_filter=lower_filter.sum()
df.loc[list(lower_filter),j]=replace_with
# * replace values > or >= upper limit,
# - ----------------------------------
if upper_limit!="none":
if equal == True:
upper_filter = df.loc[:,j]>=upper_limit
if equal == False:
upper_filter = df.loc[:,j]>upper_limit
# info,
info_upper_filter=upper_filter.sum()
df.loc[list(upper_filter),j]=replace_with
# * find how many values were replaced, and add that to the total_count list
total_count.append(info_upper_filter+info_lower_filter)
# * display examples for 3 first columns with replaced values,
if verbose==True:
if info_upper_filter+info_lower_filter>0 and count <4:
print(f"eg: {i}, {j} : {info_lower_filter} values <{lower_limit}, ...{info_upper_filter} values <{upper_limit}")
else:
pass
# * add 1 to count, to limit the number of displayed examples,
count += 1
else:
if verbose==True:
print(f"{i, j} is not of numeric type, values were not replaced !")
else:
pass
# .. additional message, if more then 2 columns had replaced values,
if verbose==True:
if len(total_count)>3 and pd.Series(total_count).sum()>0:
print(f". and {len(total_count)-3} other columns had in total {pd.Series(total_count).sum()} replaced values \n")
# .. message in case no values vere replaced at all,
if pd.Series(total_count).sum()==0:
print("No values were replaced in requested columns....")
else:
pass
# .. return,
return df.copy()
# function, ...................................................
def drop_nan(df, method="any", row=True, verbose=True):
| '''
function to dropna with thresholds from rows and columns
. method
. any : row/column wiht any missing data are removed
. all : row/column only wiht missing data are removed
. int, >0 : keeps row/clumns wiht this or larger number of non missing data
. float, >0 : as in the above, as fraction
'''
assert type(df)==pd.DataFrame, "incorrect df dtype"
df = df.copy()
if verbose==True:
print(df.shape)
else:
pass
# set funtion for rows or columns,
if row==True: | identifier_body |
|
DataFrameExplored_dfprep-checkpoint.py | (*, series, pattern):
"I used that function when i don't remeber full name of a given column"
res = series.loc[series.str.contains(pattern)]
return res
# Function, ...........................................................................................
def load_csv(*, path, filename, sep="\t", verbose=True):
"""
Loads csv into pandas df, based on pandas.read_scv(),
Returns error, if file or directoy not found
Parameters/Input
_________________ _______________________________________________________________________________
* path full path to directory
* csv_name. full csv file name
* separator "\t", by default
* display_head bool, True, by default, display df.head(),
irrespectively when the futions was called.
Returns
_________________ _______________________________________________________________________________
* DataFrame by Pandas
"""
os.chdir(path)
if len(glob.glob(filename))==1:
df = pd.read_csv(filename, sep=sep, low_memory=False)
# display example,
if verbose==True:
display(df.head(3))
print(df.shape)
else:
pass
# return,
return df
else:
if verbose==True:
print(f"""ERROR :csv file {filename}, was not found in: \n {path}""")
else:
pass
# Function, ............................................................................
def find_patter_in_series(*, s, pat, tolist=True):
'''
I used that function when i don't remeber full name of a given column
'''
res = s.loc[s.str.contains(pat)]
if tolist==True:
return res.values.tolist()
else:
return res
# Function, ...........................................................................................
def format_to_datetime(*, data, pattern_list, timezone='UTC', unixtime=False, dt_format='%Y-%m-%d %H:%M:%S', verbose=False):
'''
formats columns in df into datetime dtype, and set all times to UTC
work with unix time units, ie. second number since 1970
columns in df, are find using full comlumn name or keywords in column name
'''
assert type(data)==pd.DataFrame, "please provide data in pandas dataframe format"
if isinstance(pattern_list, str):
pattern_list = [pattern_list]
else:
pass
for pat in pattern_list:
# find column names using provided patterns or their full names,
columns_with_potential_datetime_obj = list(find_and_display_patter_in_series(series=pd.Series(data.columns), pattern=pat))
# replace
for i in columns_with_potential_datetime_obj:
# keep example of old cell
before_formatting = str(data.loc[0, i])
# convert to one format
if unixtime==True:
s = pd.to_datetime(data.loc[:, i], errors="coerce", unit='s').copy()#,format cannot be used with unit="s", but it will be the same
data.loc[:, i] = s
if timezone!=None:
data.loc[:, i] = data.loc[:, i].dt.tz_localize(timezone)
else:
pass
else:
s = pd.to_datetime(data.loc[:, i], errors="coerce",format=dt_format).copy()
data.loc[:, i] = s
if timezone!=None:
data.loc[:, i] = data.loc[:, i].dt.tz_convert(timezone)
else:
pass
# info
if verbose==True:
print(f"date time formatted in: {i}")
print(f" - {data.loc[:, i].isnull().sum()} NaN were instroduced by coerce")
print(f" - Example: {before_formatting} -->> {str(data.loc[0, i])}", end="\n")
else:
pass
return data
# Function, ...........................................................................................
def replace_text(*,df ,pat="", colnames="all", fillna=np.nan, verbose=True):
"""
searches string with a given pattern and replace it with a new patter (fillna), eg: nan,
Parameters/Input
_________________ _______________________________________________________________________________
* df Pandas Dataframe
* searched_pattern "", str literal, used by pd.Series.str.contains()
* colnames default, "all", or list with selected colnames in df
* fillna default numpy.nan, or str literal
- what do you want to place instead of searched pattern in df
Returns
_________________ _______________________________________________________________________________
* DataFrame DataFramne.copy() with new values,
* display messages. number of replaced straings in each column, and examples of replcaced values
"""
# for older version,
searched_pattern = pat
col_names = colnames
# check col_names with values to replace,
if col_names=="all":
sel_col_names = list(df.columns)
else:
sel_col_names = col_names
# display message header,
if verbose==True:
print(f"""\nReplacing Text in {len(sel_col_names)} columns: {sel_col_names}\n""")
if verbose==False:
pass
# exchnage searched pattern in each column separately,
for i, col_name in enumerate(sel_col_names):
# .. test if you really have string values in that column, otherwise it masy be float for all NaN in a column, and no action will be taken
if is_string_dtype(df[col_name]):
try:
# .... find postions with a given pattern and select three examples to display for the user,
positions_to_replace = df[col_name].str.contains(searched_pattern, na=False).values# arr
examples_to_display = [str(x) for x in list(df.loc[list(positions_to_replace), col_name].str[0:20].values.tolist()[0:3])]
# .... replace postions, and find examples of unchnaged postions,
df.loc[list(positions_to_replace), col_name] = [fillna]*positions_to_replace.sum()
examples_of_positions_that_were_not_replaced = [str(x) for x in list(df.loc[list(positions_to_replace==False), col_name].str[0:20].values.tolist()[0:3])]
# .... diplay info,
if verbose==True:
perc_of_replaced_pos_in_col = "".join([str(positions_to_replace.sum()/df.shape[0]*100),"%"])
print(f"{i} - {col_name} - - {positions_to_replace.sum()} positions out of {df.shape[0]}, were replaced with {fillna}, ie. {perc_of_replaced_pos_in_col}")
print(f" - three examples of replaced postions: {'; '.join(examples_to_display)}", end="\n")
print(f" - three examples of unchanged postions: {'; '.join(examples_of_positions_that_were_not_replaced)}", end="\n\n")
# the second print returns three first examples of exchanged values, just to see what i did,
else:
pass
except:
if verbose==True:
print(f"{i} - {col_name} - - probably only missing data datected, Values were not replaced! \n")
else:
pass
else:
if verbose==True:
print(f"{i} - {col_name} - - is not of string type, Values were not replaced! \n")
else:
pass
return df.copy()
# Function, ...........................................................................................
def replace_numeric_values(*, df, colnames="all", lower_limit="none", upper_limit="none", equal=False, replace_with=np.nan, verbose=True):
"""
Replace numerical values that are outside of range of a values
prediced with a theoretical limits of a given variable,
eg less then 0 in weight of a product,
Provide examples and numbers of replaced instances
Parameters/Input
_________________ _______________________________________________________________________________
* df : Pandas DataFrame
* cols_in_df : list, exact colnames of selected or all columns in df
* lower_limit : int,float,"none", if "none" no action is taken
* upper_limit : int,float,"none", if "none" no action is taken
* replace_with : str, np.nan, int, float
* equal : bool, if True, >= and <= values then limits will be replaced,
if False (default), > and < values then limits will be replaced,
Returns
_________________ _______________________________________________________________________________
* DataFrame DataFramne.copy() with new values,
* display messages. number of replaced straings in each column, and examples of replcaced values
"""
cols_names = colnames
# .. check provided col_names,
if cols_names=="all":
cols = list(df.columns)
else:
cols = cols_names
# .. info, header,
if verbose==True:
print(f"""\n{"".join(["-"]*80)} \n Replacing Numerical Values in {len(cols)} columns""")
print(f" lower | find_and_display_patter_in_series | identifier_name |
|
DataFrameExplored_dfprep-checkpoint.py | col_names with values to replace,
if col_names=="all":
sel_col_names = list(df.columns)
else:
sel_col_names = col_names
# display message header,
if verbose==True:
print(f"""\nReplacing Text in {len(sel_col_names)} columns: {sel_col_names}\n""")
if verbose==False:
pass
# exchnage searched pattern in each column separately,
for i, col_name in enumerate(sel_col_names):
# .. test if you really have string values in that column, otherwise it masy be float for all NaN in a column, and no action will be taken
if is_string_dtype(df[col_name]):
try:
# .... find postions with a given pattern and select three examples to display for the user,
positions_to_replace = df[col_name].str.contains(searched_pattern, na=False).values# arr
examples_to_display = [str(x) for x in list(df.loc[list(positions_to_replace), col_name].str[0:20].values.tolist()[0:3])]
# .... replace postions, and find examples of unchnaged postions,
df.loc[list(positions_to_replace), col_name] = [fillna]*positions_to_replace.sum()
examples_of_positions_that_were_not_replaced = [str(x) for x in list(df.loc[list(positions_to_replace==False), col_name].str[0:20].values.tolist()[0:3])]
# .... diplay info,
if verbose==True:
perc_of_replaced_pos_in_col = "".join([str(positions_to_replace.sum()/df.shape[0]*100),"%"])
print(f"{i} - {col_name} - - {positions_to_replace.sum()} positions out of {df.shape[0]}, were replaced with {fillna}, ie. {perc_of_replaced_pos_in_col}")
print(f" - three examples of replaced postions: {'; '.join(examples_to_display)}", end="\n")
print(f" - three examples of unchanged postions: {'; '.join(examples_of_positions_that_were_not_replaced)}", end="\n\n")
# the second print returns three first examples of exchanged values, just to see what i did,
else:
pass
except:
if verbose==True:
print(f"{i} - {col_name} - - probably only missing data datected, Values were not replaced! \n")
else:
pass
else:
if verbose==True:
print(f"{i} - {col_name} - - is not of string type, Values were not replaced! \n")
else:
pass
return df.copy()
# Function, ...........................................................................................
def replace_numeric_values(*, df, colnames="all", lower_limit="none", upper_limit="none", equal=False, replace_with=np.nan, verbose=True):
"""
Replace numerical values that are outside of range of a values
prediced with a theoretical limits of a given variable,
eg less then 0 in weight of a product,
Provide examples and numbers of replaced instances
Parameters/Input
_________________ _______________________________________________________________________________
* df : Pandas DataFrame
* cols_in_df : list, exact colnames of selected or all columns in df
* lower_limit : int,float,"none", if "none" no action is taken
* upper_limit : int,float,"none", if "none" no action is taken
* replace_with : str, np.nan, int, float
* equal : bool, if True, >= and <= values then limits will be replaced,
if False (default), > and < values then limits will be replaced,
Returns
_________________ _______________________________________________________________________________
* DataFrame DataFramne.copy() with new values,
* display messages. number of replaced straings in each column, and examples of replcaced values
"""
cols_names = colnames
# .. check provided col_names,
if cols_names=="all":
cols = list(df.columns)
else:
cols = cols_names
# .. info, header,
if verbose==True:
print(f"""\n{"".join(["-"]*80)} \n Replacing Numerical Values in {len(cols)} columns""")
print(f" lower filter={lower_limit}, upper filter ={upper_limit}")
if equal==True:
print(f" Caution, equal=True, ie. values >= and <= then requested limits will be replaced")
print(f'{"".join(["-"]*80)}\n')
if verbose==False:
pass
# .. intelligent info,
total_count=[]
# .. count, to limit the number of displayed messages,
count = 0
# .. replace values and collect examples,
for i, j in enumerate(cols):
# ..... assume no values were replaced, so the messages work later,
info_lower_filter = 0
info_upper_filter = 0
# ..... test if the column is of the numeric type:
# from pandas.api.types import is_numeric_dtype
if is_numeric_dtype(df[j]):
# * replace values < or <= lower limit,
# - ----------------------------------
if lower_limit!="none":
if equal == True:
lower_filter = df.loc[:,j]<=lower_limit
if equal == False:
lower_filter = df.loc[:,j]<lower_limit
# info,
info_lower_filter=lower_filter.sum()
df.loc[list(lower_filter),j]=replace_with
# * replace values > or >= upper limit,
# - ----------------------------------
if upper_limit!="none":
if equal == True:
upper_filter = df.loc[:,j]>=upper_limit
if equal == False:
upper_filter = df.loc[:,j]>upper_limit
# info,
info_upper_filter=upper_filter.sum()
df.loc[list(upper_filter),j]=replace_with
# * find how many values were replaced, and add that to the total_count list
total_count.append(info_upper_filter+info_lower_filter)
# * display examples for 3 first columns with replaced values,
if verbose==True:
if info_upper_filter+info_lower_filter>0 and count <4:
print(f"eg: {i}, {j} : {info_lower_filter} values <{lower_limit}, ...{info_upper_filter} values <{upper_limit}")
else:
pass
# * add 1 to count, to limit the number of displayed examples,
count += 1
else:
if verbose==True:
print(f"{i, j} is not of numeric type, values were not replaced !")
else:
pass
# .. additional message, if more then 2 columns had replaced values,
if verbose==True:
if len(total_count)>3 and pd.Series(total_count).sum()>0:
print(f". and {len(total_count)-3} other columns had in total {pd.Series(total_count).sum()} replaced values \n")
# .. message in case no values vere replaced at all,
if pd.Series(total_count).sum()==0:
print("No values were replaced in requested columns....")
else:
pass
# .. return,
return df.copy()
# function, ...................................................
def drop_nan(df, method="any", row=True, verbose=True):
'''
function to dropna with thresholds from rows and columns
. method
. any : row/column wiht any missing data are removed
. all : row/column only wiht missing data are removed
. int, >0 : keeps row/clumns wiht this or larger number of non missing data
. float, >0 : as in the above, as fraction
'''
assert type(df)==pd.DataFrame, "incorrect df dtype"
df = df.copy()
if verbose==True:
print(df.shape)
else:
pass
# set funtion for rows or columns,
if row==True:
shapeidx, dfaxis = 1, 0
else:
shapeidx, dfaxis = 0, 1
# use threshold or "all", or None for do nothing,
if method==None:
pass
elif isinstance(method, str):
df = df.dropna(how=method, axis=dfaxis) # removes rows with NaN in all columns
elif isinstance(method, int):
tr = method
if tr==0:
pass
else:
if tr>=df.shape[shapeidx]:
tr=df.shape[shapeidx]
else:
pass
df = df.dropna(thresh=tr, axis=dfaxis) # eg Keep only the rows with at least 2 non-NA value
elif isinstance(method, float):
tr = int(np.ceil(df.shape[shapeidx]*(method)))
if tr==0:
pass
else:
if tr>=df.shape[shapeidx]:
tr=df.shape[shapeidx]
else:
pass
df = df.dropna(thresh=tr, axis=dfaxis) # eg Keep only the rows with at least 2 non-NA value
else:
| pass | conditional_block |
|
DataFrameExplored_dfprep-checkpoint.py | full csv file name
* separator "\t", by default
* display_head bool, True, by default, display df.head(),
irrespectively when the futions was called.
Returns
_________________ _______________________________________________________________________________
* DataFrame by Pandas
"""
os.chdir(path)
if len(glob.glob(filename))==1:
df = pd.read_csv(filename, sep=sep, low_memory=False)
# display example,
if verbose==True:
display(df.head(3))
print(df.shape)
else:
pass
# return,
return df
else:
if verbose==True:
print(f"""ERROR :csv file {filename}, was not found in: \n {path}""")
else:
pass
# Function, ............................................................................
def find_patter_in_series(*, s, pat, tolist=True):
'''
I used that function when i don't remeber full name of a given column
'''
res = s.loc[s.str.contains(pat)]
if tolist==True:
return res.values.tolist()
else:
return res
# Function, ...........................................................................................
def format_to_datetime(*, data, pattern_list, timezone='UTC', unixtime=False, dt_format='%Y-%m-%d %H:%M:%S', verbose=False):
'''
formats columns in df into datetime dtype, and set all times to UTC
work with unix time units, ie. second number since 1970
columns in df, are find using full comlumn name or keywords in column name
'''
assert type(data)==pd.DataFrame, "please provide data in pandas dataframe format"
if isinstance(pattern_list, str):
pattern_list = [pattern_list]
else:
pass
for pat in pattern_list:
# find column names using provided patterns or their full names,
columns_with_potential_datetime_obj = list(find_and_display_patter_in_series(series=pd.Series(data.columns), pattern=pat))
# replace
for i in columns_with_potential_datetime_obj:
# keep example of old cell
before_formatting = str(data.loc[0, i])
# convert to one format
if unixtime==True:
s = pd.to_datetime(data.loc[:, i], errors="coerce", unit='s').copy()#,format cannot be used with unit="s", but it will be the same
data.loc[:, i] = s
if timezone!=None:
data.loc[:, i] = data.loc[:, i].dt.tz_localize(timezone)
else:
pass
else:
s = pd.to_datetime(data.loc[:, i], errors="coerce",format=dt_format).copy()
data.loc[:, i] = s
if timezone!=None:
data.loc[:, i] = data.loc[:, i].dt.tz_convert(timezone)
else:
pass
# info
if verbose==True:
print(f"date time formatted in: {i}")
print(f" - {data.loc[:, i].isnull().sum()} NaN were instroduced by coerce")
print(f" - Example: {before_formatting} -->> {str(data.loc[0, i])}", end="\n")
else:
pass
return data
# Function, ...........................................................................................
def replace_text(*,df ,pat="", colnames="all", fillna=np.nan, verbose=True):
"""
searches string with a given pattern and replace it with a new patter (fillna), eg: nan,
Parameters/Input
_________________ _______________________________________________________________________________
* df Pandas Dataframe
* searched_pattern "", str literal, used by pd.Series.str.contains()
* colnames default, "all", or list with selected colnames in df
* fillna default numpy.nan, or str literal
- what do you want to place instead of searched pattern in df
Returns
_________________ _______________________________________________________________________________
* DataFrame DataFramne.copy() with new values,
* display messages. number of replaced straings in each column, and examples of replcaced values
"""
# for older version,
searched_pattern = pat
col_names = colnames
# check col_names with values to replace,
if col_names=="all":
sel_col_names = list(df.columns)
else:
sel_col_names = col_names
# display message header,
if verbose==True:
print(f"""\nReplacing Text in {len(sel_col_names)} columns: {sel_col_names}\n""")
if verbose==False:
pass
# exchnage searched pattern in each column separately,
for i, col_name in enumerate(sel_col_names):
# .. test if you really have string values in that column, otherwise it masy be float for all NaN in a column, and no action will be taken
if is_string_dtype(df[col_name]):
try:
# .... find postions with a given pattern and select three examples to display for the user,
positions_to_replace = df[col_name].str.contains(searched_pattern, na=False).values# arr
examples_to_display = [str(x) for x in list(df.loc[list(positions_to_replace), col_name].str[0:20].values.tolist()[0:3])]
# .... replace postions, and find examples of unchnaged postions,
df.loc[list(positions_to_replace), col_name] = [fillna]*positions_to_replace.sum()
examples_of_positions_that_were_not_replaced = [str(x) for x in list(df.loc[list(positions_to_replace==False), col_name].str[0:20].values.tolist()[0:3])]
# .... diplay info,
if verbose==True:
perc_of_replaced_pos_in_col = "".join([str(positions_to_replace.sum()/df.shape[0]*100),"%"])
print(f"{i} - {col_name} - - {positions_to_replace.sum()} positions out of {df.shape[0]}, were replaced with {fillna}, ie. {perc_of_replaced_pos_in_col}")
print(f" - three examples of replaced postions: {'; '.join(examples_to_display)}", end="\n")
print(f" - three examples of unchanged postions: {'; '.join(examples_of_positions_that_were_not_replaced)}", end="\n\n")
# the second print returns three first examples of exchanged values, just to see what i did,
else:
pass
except:
if verbose==True:
print(f"{i} - {col_name} - - probably only missing data datected, Values were not replaced! \n")
else:
pass
else:
if verbose==True:
print(f"{i} - {col_name} - - is not of string type, Values were not replaced! \n")
else:
pass
return df.copy()
|
# Function, ...........................................................................................
def replace_numeric_values(*, df, colnames="all", lower_limit="none", upper_limit="none", equal=False, replace_with=np.nan, verbose=True):
"""
Replace numerical values that are outside of range of a values
prediced with a theoretical limits of a given variable,
eg less then 0 in weight of a product,
Provide examples and numbers of replaced instances
Parameters/Input
_________________ _______________________________________________________________________________
* df : Pandas DataFrame
* cols_in_df : list, exact colnames of selected or all columns in df
* lower_limit : int,float,"none", if "none" no action is taken
* upper_limit : int,float,"none", if "none" no action is taken
* replace_with : str, np.nan, int, float
* equal : bool, if True, >= and <= values then limits will be replaced,
if False (default), > and < values then limits will be replaced,
Returns
_________________ _______________________________________________________________________________
* DataFrame DataFramne.copy() with new values,
* display messages. number of replaced straings in each column, and examples of replcaced values
"""
cols_names = colnames
# .. check provided col_names,
if cols_names=="all":
cols = list(df.columns)
else:
cols = cols_names
# .. info, header,
if verbose==True:
print(f"""\n{"".join(["-"]*80)} \n Replacing Numerical Values in {len(cols)} columns""")
print(f" lower filter={lower_limit}, upper filter ={upper_limit}")
if equal==True:
print(f" Caution, equal=True, ie. values >= and <= then requested limits will be replaced")
print(f'{"".join(["-"]*80)}\n')
if verbose==False:
pass
# .. intelligent info,
total_count=[]
# .. count, to limit the number of displayed messages,
count = 0
# .. replace values and collect examples,
for i, j in enumerate(cols):
# ..... assume no values were replaced, so the | random_line_split |
|
main.rs | ,
future_presentation_infos: Vec<flatland::PresentationInfo>,
},
#[allow(dead_code)]
OnFramePresented {
frame_presented_info: fidl_fuchsia_scenic_scheduling::FramePresentedInfo,
},
Relayout {
width: u32,
height: u32,
},
}
struct AppModel<'a> {
flatland: &'a fland::FlatlandProxy,
allocator: fland::AllocatorProxy,
internal_sender: UnboundedSender<MessageInternal>,
allocation: Option<fsysmem::BufferCollectionInfo2>,
sched_lib: &'a dyn SchedulingLib,
hue: f32,
page_size: usize,
last_expected_presentation_time: zx::Time,
}
impl<'a> AppModel<'a> {
fn new(
flatland: &'a fland::FlatlandProxy,
allocator: fland::AllocatorProxy,
internal_sender: UnboundedSender<MessageInternal>,
sched_lib: &'a dyn SchedulingLib,
) -> AppModel<'a> {
AppModel {
flatland,
allocator,
internal_sender,
sched_lib,
allocation: None,
hue: 0.0,
page_size: zx::system_get_page_size().try_into().unwrap(),
last_expected_presentation_time: zx::Time::from_nanos(0),
}
}
async fn init_scene(&mut self) {
// BufferAllocator is a helper which makes it easier to obtain and set constraints on a
// sysmem::BufferCollectionToken. This token can then be registered with Scenic, which will
// set its own constraints; see below.
let mut buffer_allocator = BufferCollectionAllocator::new(
IMAGE_WIDTH,
IMAGE_HEIGHT,
fidl_fuchsia_sysmem::PixelFormatType::Bgra32,
FrameUsage::Cpu,
1,
)
.expect("failed to create BufferCollectionAllocator");
buffer_allocator.set_name(100, "Flatland ViewProvider Example").expect("fidl error");
let sysmem_buffer_collection_token =
buffer_allocator.duplicate_token().await.expect("error duplicating token");
// Register the sysmem BufferCollectionToken with the Scenic Allocator API. This is done by
// creating an import/export token pair, which is fundamentally a pair of zx::event. The
// export token is used as a key to register the sysmem BufferCollectionToken. The
// corresponding import token can be used to access the allocated buffers via other Scenic
// APIs, such as the "Gfx" and "Flatland" APIs, the latter being used in this example. See
// the following invocation of "flatland.create_image()".
let mut buffer_tokens = BufferCollectionTokenPair::new();
let args = fland::RegisterBufferCollectionArgs {
export_token: Some(buffer_tokens.export_token),
buffer_collection_token: Some(sysmem_buffer_collection_token),
..fland::RegisterBufferCollectionArgs::EMPTY
};
self.allocator
.register_buffer_collection(args)
.await
.expect("fidl error")
.expect("error registering buffer collection");
// Now that the BufferCollectionToken has been registered, Scenic is able to set constraints
// on it so that the eventually-allocated buffer can be used by e.g. both Vulkan and the
// hardware display controller. Allocate the buffer and wait for the allocation to finish,
// which cannot happen until Scenic has set all necessary constraints of its own.
self.allocation =
Some(buffer_allocator.allocate_buffers(true).await.expect("buffer allocation failed"));
self.set_image_colors();
// Create an image in the Flatland session, using the sysmem buffer we just allocated.
// As mentioned above, this uses the import token corresponding to the export token that was
// used to register the BufferCollectionToken with the Scenic Allocator.
let image_props = fland::ImageProperties {
size: Some(fmath::SizeU { width: IMAGE_WIDTH, height: IMAGE_HEIGHT }),
..fland::ImageProperties::EMPTY
};
// TODO(fxbug.dev/76640): generated FIDL methods currently expect "&mut" args. This will
// change; according to fxbug.dev/65845 the generated FIDL will use "&" instead (at least
// for POD structs like these). When this lands we can remove the ".clone()" from the call
// sites below.
self.flatland
.create_image(&mut IMAGE_ID.clone(), &mut buffer_tokens.import_token, 0, image_props)
.expect("fidl error");
// Populate the rest of the Flatland scene. There is a single transform which is set as the
// root transform; the newly-created image is set as the content of that transform.
self.flatland.create_transform(&mut TRANSFORM_ID.clone()).expect("fidl error");
self.flatland.set_root_transform(&mut TRANSFORM_ID.clone()).expect("fidl error");
self.flatland
.set_content(&mut TRANSFORM_ID.clone(), &mut IMAGE_ID.clone())
.expect("fidl error");
}
fn create_parent_viewport_watcher(
&mut self,
mut view_creation_token: fviews::ViewCreationToken,
) {
let (parent_viewport_watcher, server_end) =
create_proxy::<fland::ParentViewportWatcherMarker>()
.expect("failed to create ParentViewportWatcherProxy");
// NOTE: it isn't necessary to call maybe_present() for this to take effect, because we will
// relayout when receive the initial layout info. See CreateView() FIDL docs.
self.flatland.create_view(&mut view_creation_token, server_end).expect("fidl error");
// NOTE: there may be a race condition if TemporaryFlatlandViewProvider.CreateView() is
// invoked a second time, causing us to create another graph link. Because Zircon doesn't
// guarantee ordering on responses of different channels, we might receive data from the old | // should be more careful (this assumes that the client expects CreateView() to be called
// multiple times, which clients commonly don't).
let sender = self.internal_sender.clone();
fasync::Task::spawn(async move {
let mut layout_info_stream =
HangingGetStream::new(Box::new(move || Some(parent_viewport_watcher.get_layout())));
while let Some(result) = layout_info_stream.next().await {
match result {
Ok(layout_info) => {
let mut width = 0;
let mut height = 0;
if let Some(logical_size) = layout_info.logical_size {
width = logical_size.width;
height = logical_size.height;
}
sender
.unbounded_send(MessageInternal::Relayout { width, height })
.expect("failed to send MessageInternal.");
}
Err(fidl::Error::ClientChannelClosed { .. }) => {
info!("graph link connection closed.");
return; // from spawned task closure
}
Err(fidl_error) => {
warn!("graph link GetLayout() error: {:?}", fidl_error);
return; // from spawned task closure
}
}
}
})
.detach();
}
fn draw(&mut self, expected_presentation_time: zx::Time) {
trace::duration!("gfx", "FlatlandViewProvider::draw");
let time_since_last_draw_in_seconds = ((expected_presentation_time.into_nanos()
- self.last_expected_presentation_time.into_nanos())
as f32)
/ 1_000_000_000.0;
self.last_expected_presentation_time = expected_presentation_time;
let hue_change_time_per_second = 30 as f32;
self.hue =
(self.hue + hue_change_time_per_second * time_since_last_draw_in_seconds) % 360.0;
self.set_image_colors();
self.sched_lib.request_present();
}
fn on_relayout(&mut self, width: u32, height: u32) {
self.flatland
.set_image_destination_size(&mut IMAGE_ID.clone(), &mut fmath::SizeU { width, height })
.expect("fidl error");
self.sched_lib.request_present();
}
fn set_image_colors(&mut self) {
let allocation = self.allocation.as_ref().unwrap();
// Write pixel values into the allocated buffer.
match &allocation.buffers[0].vmo {
Some(vmo) => {
assert!(IMAGE_WIDTH == 2);
assert!(IMAGE_HEIGHT == 2);
// Compute the same row-pitch as Flatland will compute internally.
assert!(allocation.settings.has_image_format_constraints);
let row_pitch: usize =
minimum_row_bytes(allocation.settings.image_format_constraints, IMAGE_WIDTH)
.expect("failed to compute row-pitch")
.try_into()
.unwrap();
// TODO(fxbug.dev/76640): should look at pixel-format, instead of assuming 32-bit
// BGRA pixels. For now, format is hard-coded anyway.
let p00: [u8; 4] = hsv_to_rgba(self.hue, 30.0, 75.0);
let p10: [u8; 4] | // link after data from the new link, just before the old link is closed. Non-example code | random_line_split |
main.rs | (h: f32, s: f32, v: f32) -> [u8; 4] {
assert!(s <= 100.0);
assert!(v <= 100.0);
let h = pos_mod(h, 360.0);
let c = v / 100.0 * s / 100.0;
let x = c * (1.0 - (((h / 60.0) % 2.0) - 1.0).abs());
let m = (v / 100.0) - c;
let (mut r, mut g, mut b) = match h {
h if h < 60.0 => (c, x, 0.0),
h if h < 120.0 => (x, c, 0.0),
h if h < 180.0 => (0.0, c, x),
h if h < 240.0 => (0.0, x, c),
h if h < 300.0 => (x, 0.0, c),
_ => (c, 0.0, x),
};
r += m;
g += m;
b += m;
return [(r * 255.0) as u8, (g * 255.0) as u8, (b * 255.0) as u8, 255];
}
enum MessageInternal {
CreateView(fviews::ViewCreationToken, fviews::ViewRefControl, fviews::ViewRef),
OnPresentError {
error: fland::FlatlandError,
},
OnNextFrameBegin {
additional_present_credits: u32,
future_presentation_infos: Vec<flatland::PresentationInfo>,
},
#[allow(dead_code)]
OnFramePresented {
frame_presented_info: fidl_fuchsia_scenic_scheduling::FramePresentedInfo,
},
Relayout {
width: u32,
height: u32,
},
}
struct AppModel<'a> {
flatland: &'a fland::FlatlandProxy,
allocator: fland::AllocatorProxy,
internal_sender: UnboundedSender<MessageInternal>,
allocation: Option<fsysmem::BufferCollectionInfo2>,
sched_lib: &'a dyn SchedulingLib,
hue: f32,
page_size: usize,
last_expected_presentation_time: zx::Time,
}
impl<'a> AppModel<'a> {
fn new(
flatland: &'a fland::FlatlandProxy,
allocator: fland::AllocatorProxy,
internal_sender: UnboundedSender<MessageInternal>,
sched_lib: &'a dyn SchedulingLib,
) -> AppModel<'a> {
AppModel {
flatland,
allocator,
internal_sender,
sched_lib,
allocation: None,
hue: 0.0,
page_size: zx::system_get_page_size().try_into().unwrap(),
last_expected_presentation_time: zx::Time::from_nanos(0),
}
}
async fn init_scene(&mut self) {
// BufferAllocator is a helper which makes it easier to obtain and set constraints on a
// sysmem::BufferCollectionToken. This token can then be registered with Scenic, which will
// set its own constraints; see below.
let mut buffer_allocator = BufferCollectionAllocator::new(
IMAGE_WIDTH,
IMAGE_HEIGHT,
fidl_fuchsia_sysmem::PixelFormatType::Bgra32,
FrameUsage::Cpu,
1,
)
.expect("failed to create BufferCollectionAllocator");
buffer_allocator.set_name(100, "Flatland ViewProvider Example").expect("fidl error");
let sysmem_buffer_collection_token =
buffer_allocator.duplicate_token().await.expect("error duplicating token");
// Register the sysmem BufferCollectionToken with the Scenic Allocator API. This is done by
// creating an import/export token pair, which is fundamentally a pair of zx::event. The
// export token is used as a key to register the sysmem BufferCollectionToken. The
// corresponding import token can be used to access the allocated buffers via other Scenic
// APIs, such as the "Gfx" and "Flatland" APIs, the latter being used in this example. See
// the following invocation of "flatland.create_image()".
let mut buffer_tokens = BufferCollectionTokenPair::new();
let args = fland::RegisterBufferCollectionArgs {
export_token: Some(buffer_tokens.export_token),
buffer_collection_token: Some(sysmem_buffer_collection_token),
..fland::RegisterBufferCollectionArgs::EMPTY
};
self.allocator
.register_buffer_collection(args)
.await
.expect("fidl error")
.expect("error registering buffer collection");
// Now that the BufferCollectionToken has been registered, Scenic is able to set constraints
// on it so that the eventually-allocated buffer can be used by e.g. both Vulkan and the
// hardware display controller. Allocate the buffer and wait for the allocation to finish,
// which cannot happen until Scenic has set all necessary constraints of its own.
self.allocation =
Some(buffer_allocator.allocate_buffers(true).await.expect("buffer allocation failed"));
self.set_image_colors();
// Create an image in the Flatland session, using the sysmem buffer we just allocated.
// As mentioned above, this uses the import token corresponding to the export token that was
// used to register the BufferCollectionToken with the Scenic Allocator.
let image_props = fland::ImageProperties {
size: Some(fmath::SizeU { width: IMAGE_WIDTH, height: IMAGE_HEIGHT }),
..fland::ImageProperties::EMPTY
};
// TODO(fxbug.dev/76640): generated FIDL methods currently expect "&mut" args. This will
// change; according to fxbug.dev/65845 the generated FIDL will use "&" instead (at least
// for POD structs like these). When this lands we can remove the ".clone()" from the call
// sites below.
self.flatland
.create_image(&mut IMAGE_ID.clone(), &mut buffer_tokens.import_token, 0, image_props)
.expect("fidl error");
// Populate the rest of the Flatland scene. There is a single transform which is set as the
// root transform; the newly-created image is set as the content of that transform.
self.flatland.create_transform(&mut TRANSFORM_ID.clone()).expect("fidl error");
self.flatland.set_root_transform(&mut TRANSFORM_ID.clone()).expect("fidl error");
self.flatland
.set_content(&mut TRANSFORM_ID.clone(), &mut IMAGE_ID.clone())
.expect("fidl error");
}
fn create_parent_viewport_watcher(
&mut self,
mut view_creation_token: fviews::ViewCreationToken,
) {
let (parent_viewport_watcher, server_end) =
create_proxy::<fland::ParentViewportWatcherMarker>()
.expect("failed to create ParentViewportWatcherProxy");
// NOTE: it isn't necessary to call maybe_present() for this to take effect, because we will
// relayout when receive the initial layout info. See CreateView() FIDL docs.
self.flatland.create_view(&mut view_creation_token, server_end).expect("fidl error");
// NOTE: there may be a race condition if TemporaryFlatlandViewProvider.CreateView() is
// invoked a second time, causing us to create another graph link. Because Zircon doesn't
// guarantee ordering on responses of different channels, we might receive data from the old
// link after data from the new link, just before the old link is closed. Non-example code
// should be more careful (this assumes that the client expects CreateView() to be called
// multiple times, which clients commonly don't).
let sender = self.internal_sender.clone();
fasync::Task::spawn(async move {
let mut layout_info_stream =
HangingGetStream::new(Box::new(move || Some(parent_viewport_watcher.get_layout())));
while let Some(result) = layout_info_stream.next().await {
match result {
Ok(layout_info) => {
let mut width = 0;
let mut height = 0;
if let Some(logical_size) = layout_info.logical_size {
width = logical_size.width;
height = logical_size.height;
}
sender
.unbounded_send(MessageInternal::Relayout { width, height })
.expect("failed to send MessageInternal.");
}
Err(fidl::Error::ClientChannelClosed { .. }) => {
info!("graph link connection closed.");
return; // from spawned task closure
}
Err(fidl_error) => {
warn!("graph link GetLayout() error: {:?}", fidl_error);
return; // from spawned task closure
}
}
}
})
.detach();
}
fn draw(&mut self, expected_presentation_time: zx::Time) {
trace::duration!("gfx", "FlatlandViewProvider::draw");
let time_since_last_draw_in_seconds = ((expected_presentation_time.into_nanos()
| hsv_to_rgba | identifier_name |
|
main.rs | future_presentation_infos: Vec<flatland::PresentationInfo>,
},
#[allow(dead_code)]
OnFramePresented {
frame_presented_info: fidl_fuchsia_scenic_scheduling::FramePresentedInfo,
},
Relayout {
width: u32,
height: u32,
},
}
struct AppModel<'a> {
flatland: &'a fland::FlatlandProxy,
allocator: fland::AllocatorProxy,
internal_sender: UnboundedSender<MessageInternal>,
allocation: Option<fsysmem::BufferCollectionInfo2>,
sched_lib: &'a dyn SchedulingLib,
hue: f32,
page_size: usize,
last_expected_presentation_time: zx::Time,
}
impl<'a> AppModel<'a> {
fn new(
flatland: &'a fland::FlatlandProxy,
allocator: fland::AllocatorProxy,
internal_sender: UnboundedSender<MessageInternal>,
sched_lib: &'a dyn SchedulingLib,
) -> AppModel<'a> {
AppModel {
flatland,
allocator,
internal_sender,
sched_lib,
allocation: None,
hue: 0.0,
page_size: zx::system_get_page_size().try_into().unwrap(),
last_expected_presentation_time: zx::Time::from_nanos(0),
}
}
async fn init_scene(&mut self) {
// BufferAllocator is a helper which makes it easier to obtain and set constraints on a
// sysmem::BufferCollectionToken. This token can then be registered with Scenic, which will
// set its own constraints; see below.
let mut buffer_allocator = BufferCollectionAllocator::new(
IMAGE_WIDTH,
IMAGE_HEIGHT,
fidl_fuchsia_sysmem::PixelFormatType::Bgra32,
FrameUsage::Cpu,
1,
)
.expect("failed to create BufferCollectionAllocator");
buffer_allocator.set_name(100, "Flatland ViewProvider Example").expect("fidl error");
let sysmem_buffer_collection_token =
buffer_allocator.duplicate_token().await.expect("error duplicating token");
// Register the sysmem BufferCollectionToken with the Scenic Allocator API. This is done by
// creating an import/export token pair, which is fundamentally a pair of zx::event. The
// export token is used as a key to register the sysmem BufferCollectionToken. The
// corresponding import token can be used to access the allocated buffers via other Scenic
// APIs, such as the "Gfx" and "Flatland" APIs, the latter being used in this example. See
// the following invocation of "flatland.create_image()".
let mut buffer_tokens = BufferCollectionTokenPair::new();
let args = fland::RegisterBufferCollectionArgs {
export_token: Some(buffer_tokens.export_token),
buffer_collection_token: Some(sysmem_buffer_collection_token),
..fland::RegisterBufferCollectionArgs::EMPTY
};
self.allocator
.register_buffer_collection(args)
.await
.expect("fidl error")
.expect("error registering buffer collection");
// Now that the BufferCollectionToken has been registered, Scenic is able to set constraints
// on it so that the eventually-allocated buffer can be used by e.g. both Vulkan and the
// hardware display controller. Allocate the buffer and wait for the allocation to finish,
// which cannot happen until Scenic has set all necessary constraints of its own.
self.allocation =
Some(buffer_allocator.allocate_buffers(true).await.expect("buffer allocation failed"));
self.set_image_colors();
// Create an image in the Flatland session, using the sysmem buffer we just allocated.
// As mentioned above, this uses the import token corresponding to the export token that was
// used to register the BufferCollectionToken with the Scenic Allocator.
let image_props = fland::ImageProperties {
size: Some(fmath::SizeU { width: IMAGE_WIDTH, height: IMAGE_HEIGHT }),
..fland::ImageProperties::EMPTY
};
// TODO(fxbug.dev/76640): generated FIDL methods currently expect "&mut" args. This will
// change; according to fxbug.dev/65845 the generated FIDL will use "&" instead (at least
// for POD structs like these). When this lands we can remove the ".clone()" from the call
// sites below.
self.flatland
.create_image(&mut IMAGE_ID.clone(), &mut buffer_tokens.import_token, 0, image_props)
.expect("fidl error");
// Populate the rest of the Flatland scene. There is a single transform which is set as the
// root transform; the newly-created image is set as the content of that transform.
self.flatland.create_transform(&mut TRANSFORM_ID.clone()).expect("fidl error");
self.flatland.set_root_transform(&mut TRANSFORM_ID.clone()).expect("fidl error");
self.flatland
.set_content(&mut TRANSFORM_ID.clone(), &mut IMAGE_ID.clone())
.expect("fidl error");
}
fn create_parent_viewport_watcher(
&mut self,
mut view_creation_token: fviews::ViewCreationToken,
) | while let Some(result) = layout_info_stream.next().await {
match result {
Ok(layout_info) => {
let mut width = 0;
let mut height = 0;
if let Some(logical_size) = layout_info.logical_size {
width = logical_size.width;
height = logical_size.height;
}
sender
.unbounded_send(MessageInternal::Relayout { width, height })
.expect("failed to send MessageInternal.");
}
Err(fidl::Error::ClientChannelClosed { .. }) => {
info!("graph link connection closed.");
return; // from spawned task closure
}
Err(fidl_error) => {
warn!("graph link GetLayout() error: {:?}", fidl_error);
return; // from spawned task closure
}
}
}
})
.detach();
}
fn draw(&mut self, expected_presentation_time: zx::Time) {
trace::duration!("gfx", "FlatlandViewProvider::draw");
let time_since_last_draw_in_seconds = ((expected_presentation_time.into_nanos()
- self.last_expected_presentation_time.into_nanos())
as f32)
/ 1_000_000_000.0;
self.last_expected_presentation_time = expected_presentation_time;
let hue_change_time_per_second = 30 as f32;
self.hue =
(self.hue + hue_change_time_per_second * time_since_last_draw_in_seconds) % 360.0;
self.set_image_colors();
self.sched_lib.request_present();
}
fn on_relayout(&mut self, width: u32, height: u32) {
self.flatland
.set_image_destination_size(&mut IMAGE_ID.clone(), &mut fmath::SizeU { width, height })
.expect("fidl error");
self.sched_lib.request_present();
}
fn set_image_colors(&mut self) {
let allocation = self.allocation.as_ref().unwrap();
// Write pixel values into the allocated buffer.
match &allocation.buffers[0].vmo {
Some(vmo) => {
assert!(IMAGE_WIDTH == 2);
assert!(IMAGE_HEIGHT == 2);
// Compute the same row-pitch as Flatland will compute internally.
assert!(allocation.settings.has_image_format_constraints);
let row_pitch: usize =
minimum_row_bytes(allocation.settings.image_format_constraints, IMAGE_WIDTH)
.expect("failed to compute row-pitch")
.try_into()
.unwrap();
// TODO(fxbug.dev/76640): should look at pixel-format, instead of assuming 32-bit
// BGRA pixels. For now, format is hard-coded anyway.
let p00: [u8; 4] = hsv_to_rgba(self.hue, 30.0, 75.0);
let p10: [u8; 4 | {
let (parent_viewport_watcher, server_end) =
create_proxy::<fland::ParentViewportWatcherMarker>()
.expect("failed to create ParentViewportWatcherProxy");
// NOTE: it isn't necessary to call maybe_present() for this to take effect, because we will
// relayout when receive the initial layout info. See CreateView() FIDL docs.
self.flatland.create_view(&mut view_creation_token, server_end).expect("fidl error");
// NOTE: there may be a race condition if TemporaryFlatlandViewProvider.CreateView() is
// invoked a second time, causing us to create another graph link. Because Zircon doesn't
// guarantee ordering on responses of different channels, we might receive data from the old
// link after data from the new link, just before the old link is closed. Non-example code
// should be more careful (this assumes that the client expects CreateView() to be called
// multiple times, which clients commonly don't).
let sender = self.internal_sender.clone();
fasync::Task::spawn(async move {
let mut layout_info_stream =
HangingGetStream::new(Box::new(move || Some(parent_viewport_watcher.get_layout())));
| identifier_body |
eks.go |
}
type EksUtilsConfig struct {
WriteKubeConfig map[string]string `yaml:"write_kubeconfig"`
}
type EksConfig struct {
clusterName string // set after parsing the eks YAML
Binary string // path to the eksctl binary
Params struct {
Global map[string]string
GetCluster map[string]string `yaml:"get_cluster"`
CreateCluster map[string]string `yaml:"create_cluster"`
DeleteCluster map[string]string `yaml:"delete_cluster"`
UpdateCluster map[string]string `yaml:"update_cluster"`
Utils EksUtilsConfig
ConfigFile map[string]interface{} `yaml:"config_file"`
}
}
// Instantiates a new instance
func newEksProvisioner(stackConfig interfaces.IStack, clusterSot interfaces.IClusterSot) (*EksProvisioner, error) {
eksConfig, err := parseEksConfig(stackConfig)
if err != nil {
return nil, errors.WithStack(err)
}
return &EksProvisioner{
stack: stackConfig,
eksConfig: *eksConfig,
clusterSot: clusterSot,
}, nil
}
func (p EksProvisioner) GetStack() interfaces.IStack {
return p.stack
}
func (p EksProvisioner) ClusterSot() interfaces.IClusterSot {
return p.clusterSot
}
func (p EksProvisioner) Binary() string {
return p.eksConfig.Binary
}
// Returns a bool indicating whether the cluster exists (but it may not yet respond to kubectl commands)
func (p EksProvisioner) clusterExists() (bool, error) {
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return false, errors.WithStack(err)
}
log.Logger.Info("Checking if an EKS cluster already exists...")
log.Logger.Tracef("Checking if a Eks cluster config exists for values: %#v", templatedVars)
args := []string{"get", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.GetCluster)
var stdoutBuf, stderrBuf bytes.Buffer
err = utils.ExecCommand(p.eksConfig.Binary, args, map[string]string{}, &stdoutBuf,
&stderrBuf, "", eksCommandTimeoutSeconds, 0, false)
if err != nil {
if errors.Cause(err) == context.DeadlineExceeded {
return false, errors.Wrap(err,
"Timed out trying to retrieve EKS cluster config. "+
"Check your credentials.")
}
// todo - catch errors due to missing/expired AWS credentials and throw an error
if _, ok := errors.Cause(err).(*exec.ExitError); ok {
log.Logger.Info("EKS cluster doesn't exist")
return false, nil
} else {
return false, errors.Wrap(err, "Error fetching EKS clusters")
}
}
return true, nil
}
// Writes any configuration for a config file to a temporary file and returns it. If
// that key doesn't exist, an empty path is returned.
func (p EksProvisioner) writeConfigFile() (string, error) {
if len(p.eksConfig.Params.ConfigFile) > 0 {
// marshal the struct to YAML
yamlBytes, err := yaml.Marshal(&p.eksConfig.Params.ConfigFile)
if err != nil {
return "", errors.WithStack(err)
}
yamlString := string(yamlBytes[:])
// write the config to a temporary file
tmpfile, err := ioutil.TempFile("", "eks.*.yaml")
if err != nil {
return "", errors.WithStack(err)
}
defer tmpfile.Close()
if _, err := tmpfile.Write([]byte(yamlString)); err != nil {
return "", errors.WithStack(err)
}
if err := tmpfile.Close(); err != nil {
return "", errors.WithStack(err)
}
log.Logger.Debugf("EKS config file written to: %s", tmpfile.Name())
return tmpfile.Name(), nil
} else {
log.Logger.Infof("No EKS config file data configured. No config file path will be passed " +
"to eksctl commands")
return "", nil
}
}
// Creates an EKS cluster.
func (p EksProvisioner) | (dryRun bool) error {
clusterExists, err := p.clusterExists()
if err != nil {
return errors.WithStack(err)
}
if clusterExists {
log.Logger.Debugf("An EKS cluster already exists called '%s'. Won't recreate it...",
p.GetStack().GetConfig().GetCluster())
return nil
}
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return errors.WithStack(err)
}
log.Logger.Debugf("Templated stack config vars: %#v", templatedVars)
args := []string{"create", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.CreateCluster)
configFilePath, err := p.writeConfigFile()
if err != nil {
return errors.WithStack(err)
}
if configFilePath != "" {
args = append(args, []string{"-f", configFilePath}...)
}
_, err = printer.Fprintf("Creating EKS cluster (this may take some time)...\n")
if err != nil {
return errors.WithStack(err)
}
err = utils.ExecCommandUnbuffered(p.eksConfig.Binary, args, map[string]string{},
os.Stdout, os.Stderr, "", 0, 0, dryRun)
if err != nil {
return errors.WithStack(err)
}
if !dryRun {
log.Logger.Infof("EKS cluster created")
err = p.renameKubeContext()
if err != nil {
return errors.WithStack(err)
}
}
p.stack.GetStatus().SetStartedThisRun(true)
// only sleep before checking the cluster fo readiness if we started it
p.stack.GetStatus().SetSleepBeforeReadyCheck(eksSleepSecondsBeforeReadyCheck)
return nil
}
// When eksctl downloads a kubeconfig file for a cluster it uses the IAM username as the
// name of the kubecontext. This would complicate configuring the kubecontext, so let's
// just strip the username from the kubecontext
func (p EksProvisioner) renameKubeContext() error {
log.Logger.Debugf("Renaming kube context for EKS cluster '%s'", p.eksConfig.clusterName)
pathOptions := clientcmd.NewDefaultPathOptions()
kubeConfig, err := pathOptions.GetStartingConfig()
if err != nil {
return errors.WithStack(err)
}
shortClusterName := p.eksConfig.clusterName
clusterNameRe := regexp.MustCompile(fmt.Sprintf(".*@%s.%s.eksctl.io", shortClusterName,
p.stack.GetConfig().GetRegion()))
contextName := ""
fullClusterName := ""
for name, ctx := range kubeConfig.Contexts {
if clusterNameRe.MatchString(name) {
log.Logger.Debugf("Kubeconfig context '%s' matches regex '%s'", name, clusterNameRe.String())
contextName = name
fullClusterName = ctx.Cluster
}
}
if contextName != "" {
log.Logger.Infof("Renaming EKS cluster contex† from '%s' to '%s' in kubeconfig file",
contextName, fullClusterName)
kubeConfig.Contexts[fullClusterName] = kubeConfig.Contexts[contextName]
delete(kubeConfig.Contexts, contextName)
// also set the renamed cluster as the default context
kubeConfig.CurrentContext = fullClusterName
err = clientcmd.ModifyConfig(pathOptions, *kubeConfig, false)
if err != nil {
return errors.WithStack(err)
}
} else {
log.Logger.Infof("Not renaming cluster context for EKS cluster '%s'", shortClusterName)
}
return nil
}
// Deletes a cluster
func (p EksProvisioner) Delete(approved bool, dryRun bool) error {
dryRunPrefix := ""
if dryRun {
dryRunPrefix = "[Dry run] "
}
clusterExists, err := p.clusterExists()
if err != nil {
return errors.WithStack(err)
}
if !clusterExists {
return errors.New("No EKS cluster exists to delete")
}
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return errors.WithStack(err)
}
log.Logger.Debugf("Templated stack config vars: %#v", templatedVars)
args := []string{"delete", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.DeleteCluster)
configFilePath, err := p.writeConfigFile()
if err != nil {
return errors.WithStack(err)
}
if configFilePath != "" {
args = append(args, []string{"-f", configFilePath}...)
}
if approved {
_, err = printer.Fprintf("%sDeleting EKS cluster...\n", dryRunPrefix)
if err != nil {
return errors.WithStack(err)
}
err = utils.ExecCommandUnbuffered(p.eksConfig.Binary, args, map[string]string{}, os.Stdout,
| Create | identifier_name |
eks.go | .Cmd
}
type EksUtilsConfig struct {
WriteKubeConfig map[string]string `yaml:"write_kubeconfig"`
}
type EksConfig struct {
clusterName string // set after parsing the eks YAML
Binary string // path to the eksctl binary
Params struct {
Global map[string]string
GetCluster map[string]string `yaml:"get_cluster"`
CreateCluster map[string]string `yaml:"create_cluster"`
DeleteCluster map[string]string `yaml:"delete_cluster"`
UpdateCluster map[string]string `yaml:"update_cluster"`
Utils EksUtilsConfig
ConfigFile map[string]interface{} `yaml:"config_file"`
}
}
// Instantiates a new instance
func newEksProvisioner(stackConfig interfaces.IStack, clusterSot interfaces.IClusterSot) (*EksProvisioner, error) {
eksConfig, err := parseEksConfig(stackConfig)
if err != nil {
return nil, errors.WithStack(err)
}
return &EksProvisioner{
stack: stackConfig,
eksConfig: *eksConfig,
clusterSot: clusterSot,
}, nil
}
func (p EksProvisioner) GetStack() interfaces.IStack {
return p.stack
}
func (p EksProvisioner) ClusterSot() interfaces.IClusterSot {
return p.clusterSot
}
func (p EksProvisioner) Binary() string {
return p.eksConfig.Binary
}
// Returns a bool indicating whether the cluster exists (but it may not yet respond to kubectl commands)
func (p EksProvisioner) clusterExists() (bool, error) {
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return false, errors.WithStack(err)
}
log.Logger.Info("Checking if an EKS cluster already exists...")
log.Logger.Tracef("Checking if a Eks cluster config exists for values: %#v", templatedVars)
args := []string{"get", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.GetCluster)
var stdoutBuf, stderrBuf bytes.Buffer
err = utils.ExecCommand(p.eksConfig.Binary, args, map[string]string{}, &stdoutBuf,
&stderrBuf, "", eksCommandTimeoutSeconds, 0, false)
if err != nil {
if errors.Cause(err) == context.DeadlineExceeded {
return false, errors.Wrap(err,
"Timed out trying to retrieve EKS cluster config. "+
"Check your credentials.")
}
// todo - catch errors due to missing/expired AWS credentials and throw an error
if _, ok := errors.Cause(err).(*exec.ExitError); ok {
log.Logger.Info("EKS cluster doesn't exist")
return false, nil
} else {
return false, errors.Wrap(err, "Error fetching EKS clusters")
}
}
return true, nil
}
// Writes any configuration for a config file to a temporary file and returns it. If
// that key doesn't exist, an empty path is returned.
func (p EksProvisioner) writeConfigFile() (string, error) {
if len(p.eksConfig.Params.ConfigFile) > 0 {
// marshal the struct to YAML
yamlBytes, err := yaml.Marshal(&p.eksConfig.Params.ConfigFile)
if err != nil {
return "", errors.WithStack(err)
}
yamlString := string(yamlBytes[:])
// write the config to a temporary file
tmpfile, err := ioutil.TempFile("", "eks.*.yaml")
if err != nil {
return "", errors.WithStack(err)
}
defer tmpfile.Close()
if _, err := tmpfile.Write([]byte(yamlString)); err != nil {
return "", errors.WithStack(err)
}
if err := tmpfile.Close(); err != nil {
return "", errors.WithStack(err)
}
log.Logger.Debugf("EKS config file written to: %s", tmpfile.Name())
return tmpfile.Name(), nil
} else {
log.Logger.Infof("No EKS config file data configured. No config file path will be passed " +
"to eksctl commands")
return "", nil
}
}
// Creates an EKS cluster.
func (p EksProvisioner) Create(dryRun bool) error {
clusterExists, err := p.clusterExists()
if err != nil {
return errors.WithStack(err)
}
if clusterExists {
log.Logger.Debugf("An EKS cluster already exists called '%s'. Won't recreate it...",
p.GetStack().GetConfig().GetCluster())
return nil
}
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return errors.WithStack(err)
}
log.Logger.Debugf("Templated stack config vars: %#v", templatedVars)
args := []string{"create", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.CreateCluster)
configFilePath, err := p.writeConfigFile()
if err != nil {
return errors.WithStack(err)
}
if configFilePath != "" {
args = append(args, []string{"-f", configFilePath}...)
}
_, err = printer.Fprintf("Creating EKS cluster (this may take some time)...\n")
if err != nil {
return errors.WithStack(err)
}
err = utils.ExecCommandUnbuffered(p.eksConfig.Binary, args, map[string]string{},
os.Stdout, os.Stderr, "", 0, 0, dryRun)
if err != nil {
return errors.WithStack(err)
}
if !dryRun {
log.Logger.Infof("EKS cluster created")
err = p.renameKubeContext()
if err != nil {
return errors.WithStack(err)
}
}
p.stack.GetStatus().SetStartedThisRun(true)
// only sleep before checking the cluster fo readiness if we started it
p.stack.GetStatus().SetSleepBeforeReadyCheck(eksSleepSecondsBeforeReadyCheck)
return nil
}
// When eksctl downloads a kubeconfig file for a cluster it uses the IAM username as the
// name of the kubecontext. This would complicate configuring the kubecontext, so let's
// just strip the username from the kubecontext
func (p EksProvisioner) renameKubeContext() error {
log.Logger.Debugf("Renaming kube context for EKS cluster '%s'", p.eksConfig.clusterName)
pathOptions := clientcmd.NewDefaultPathOptions()
kubeConfig, err := pathOptions.GetStartingConfig()
if err != nil {
return errors.WithStack(err)
}
shortClusterName := p.eksConfig.clusterName
clusterNameRe := regexp.MustCompile(fmt.Sprintf(".*@%s.%s.eksctl.io", shortClusterName,
p.stack.GetConfig().GetRegion()))
contextName := ""
fullClusterName := ""
for name, ctx := range kubeConfig.Contexts {
if clusterNameRe.MatchString(name) {
log.Logger.Debugf("Kubeconfig context '%s' matches regex '%s'", name, clusterNameRe.String())
contextName = name
fullClusterName = ctx.Cluster
}
}
if contextName != "" {
log.Logger.Infof("Renaming EKS cluster contex† from '%s' to '%s' in kubeconfig file",
contextName, fullClusterName)
kubeConfig.Contexts[fullClusterName] = kubeConfig.Contexts[contextName]
delete(kubeConfig.Contexts, contextName)
// also set the renamed cluster as the default context
kubeConfig.CurrentContext = fullClusterName
err = clientcmd.ModifyConfig(pathOptions, *kubeConfig, false)
if err != nil {
return errors.WithStack(err)
}
} else {
log.Logger.Infof("Not renaming cluster context for EKS cluster '%s'", shortClusterName)
}
return nil
}
// Deletes a cluster
func (p EksProvisioner) Delete(approved bool, dryRun bool) error {
| args := []string{"delete", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.DeleteCluster)
configFilePath, err := p.writeConfigFile()
if err != nil {
return errors.WithStack(err)
}
if configFilePath != "" {
args = append(args, []string{"-f", configFilePath}...)
}
if approved {
_, err = printer.Fprintf("%sDeleting EKS cluster...\n", dryRunPrefix)
if err != nil {
return errors.WithStack(err)
}
err = utils.ExecCommandUnbuffered(p.eksConfig.Binary, args, map[string]string{}, os.Stdout,
| dryRunPrefix := ""
if dryRun {
dryRunPrefix = "[Dry run] "
}
clusterExists, err := p.clusterExists()
if err != nil {
return errors.WithStack(err)
}
if !clusterExists {
return errors.New("No EKS cluster exists to delete")
}
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return errors.WithStack(err)
}
log.Logger.Debugf("Templated stack config vars: %#v", templatedVars)
| identifier_body |
eks.go | config"`
}
type EksConfig struct {
clusterName string // set after parsing the eks YAML
Binary string // path to the eksctl binary
Params struct {
Global map[string]string
GetCluster map[string]string `yaml:"get_cluster"`
CreateCluster map[string]string `yaml:"create_cluster"`
DeleteCluster map[string]string `yaml:"delete_cluster"`
UpdateCluster map[string]string `yaml:"update_cluster"`
Utils EksUtilsConfig
ConfigFile map[string]interface{} `yaml:"config_file"`
}
}
// Instantiates a new instance
func newEksProvisioner(stackConfig interfaces.IStack, clusterSot interfaces.IClusterSot) (*EksProvisioner, error) {
eksConfig, err := parseEksConfig(stackConfig)
if err != nil {
return nil, errors.WithStack(err)
}
return &EksProvisioner{
stack: stackConfig,
eksConfig: *eksConfig,
clusterSot: clusterSot,
}, nil
}
func (p EksProvisioner) GetStack() interfaces.IStack {
return p.stack
}
func (p EksProvisioner) ClusterSot() interfaces.IClusterSot {
return p.clusterSot
}
func (p EksProvisioner) Binary() string {
return p.eksConfig.Binary
}
// Returns a bool indicating whether the cluster exists (but it may not yet respond to kubectl commands)
func (p EksProvisioner) clusterExists() (bool, error) {
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return false, errors.WithStack(err)
}
log.Logger.Info("Checking if an EKS cluster already exists...")
log.Logger.Tracef("Checking if a Eks cluster config exists for values: %#v", templatedVars)
args := []string{"get", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.GetCluster)
var stdoutBuf, stderrBuf bytes.Buffer
err = utils.ExecCommand(p.eksConfig.Binary, args, map[string]string{}, &stdoutBuf,
&stderrBuf, "", eksCommandTimeoutSeconds, 0, false)
if err != nil {
if errors.Cause(err) == context.DeadlineExceeded {
return false, errors.Wrap(err,
"Timed out trying to retrieve EKS cluster config. "+
"Check your credentials.")
}
// todo - catch errors due to missing/expired AWS credentials and throw an error
if _, ok := errors.Cause(err).(*exec.ExitError); ok {
log.Logger.Info("EKS cluster doesn't exist")
return false, nil
} else {
return false, errors.Wrap(err, "Error fetching EKS clusters")
}
}
return true, nil
}
// Writes any configuration for a config file to a temporary file and returns it. If
// that key doesn't exist, an empty path is returned.
func (p EksProvisioner) writeConfigFile() (string, error) {
if len(p.eksConfig.Params.ConfigFile) > 0 {
// marshal the struct to YAML
yamlBytes, err := yaml.Marshal(&p.eksConfig.Params.ConfigFile)
if err != nil {
return "", errors.WithStack(err)
}
yamlString := string(yamlBytes[:])
// write the config to a temporary file
tmpfile, err := ioutil.TempFile("", "eks.*.yaml")
if err != nil {
return "", errors.WithStack(err)
}
defer tmpfile.Close()
if _, err := tmpfile.Write([]byte(yamlString)); err != nil {
return "", errors.WithStack(err)
}
if err := tmpfile.Close(); err != nil {
return "", errors.WithStack(err)
}
log.Logger.Debugf("EKS config file written to: %s", tmpfile.Name())
return tmpfile.Name(), nil
} else {
log.Logger.Infof("No EKS config file data configured. No config file path will be passed " +
"to eksctl commands")
return "", nil
}
}
// Creates an EKS cluster.
func (p EksProvisioner) Create(dryRun bool) error {
clusterExists, err := p.clusterExists()
if err != nil {
return errors.WithStack(err)
}
if clusterExists {
log.Logger.Debugf("An EKS cluster already exists called '%s'. Won't recreate it...",
p.GetStack().GetConfig().GetCluster())
return nil
}
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return errors.WithStack(err)
}
log.Logger.Debugf("Templated stack config vars: %#v", templatedVars)
args := []string{"create", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.CreateCluster)
configFilePath, err := p.writeConfigFile()
if err != nil {
return errors.WithStack(err)
}
if configFilePath != "" {
args = append(args, []string{"-f", configFilePath}...)
}
_, err = printer.Fprintf("Creating EKS cluster (this may take some time)...\n")
if err != nil {
return errors.WithStack(err)
}
err = utils.ExecCommandUnbuffered(p.eksConfig.Binary, args, map[string]string{},
os.Stdout, os.Stderr, "", 0, 0, dryRun)
if err != nil {
return errors.WithStack(err)
}
if !dryRun {
log.Logger.Infof("EKS cluster created")
err = p.renameKubeContext()
if err != nil {
return errors.WithStack(err)
}
}
p.stack.GetStatus().SetStartedThisRun(true)
// only sleep before checking the cluster fo readiness if we started it
p.stack.GetStatus().SetSleepBeforeReadyCheck(eksSleepSecondsBeforeReadyCheck)
return nil
}
// When eksctl downloads a kubeconfig file for a cluster it uses the IAM username as the
// name of the kubecontext. This would complicate configuring the kubecontext, so let's
// just strip the username from the kubecontext
func (p EksProvisioner) renameKubeContext() error {
log.Logger.Debugf("Renaming kube context for EKS cluster '%s'", p.eksConfig.clusterName)
pathOptions := clientcmd.NewDefaultPathOptions()
kubeConfig, err := pathOptions.GetStartingConfig()
if err != nil {
return errors.WithStack(err)
}
shortClusterName := p.eksConfig.clusterName
clusterNameRe := regexp.MustCompile(fmt.Sprintf(".*@%s.%s.eksctl.io", shortClusterName,
p.stack.GetConfig().GetRegion()))
contextName := ""
fullClusterName := ""
for name, ctx := range kubeConfig.Contexts {
if clusterNameRe.MatchString(name) {
log.Logger.Debugf("Kubeconfig context '%s' matches regex '%s'", name, clusterNameRe.String())
contextName = name
fullClusterName = ctx.Cluster
}
}
if contextName != "" {
log.Logger.Infof("Renaming EKS cluster contex† from '%s' to '%s' in kubeconfig file",
contextName, fullClusterName)
kubeConfig.Contexts[fullClusterName] = kubeConfig.Contexts[contextName]
delete(kubeConfig.Contexts, contextName)
// also set the renamed cluster as the default context
kubeConfig.CurrentContext = fullClusterName
err = clientcmd.ModifyConfig(pathOptions, *kubeConfig, false)
if err != nil {
return errors.WithStack(err)
}
} else {
log.Logger.Infof("Not renaming cluster context for EKS cluster '%s'", shortClusterName)
}
return nil
}
// Deletes a cluster
func (p EksProvisioner) Delete(approved bool, dryRun bool) error {
dryRunPrefix := ""
if dryRun {
dryRunPrefix = "[Dry run] "
}
clusterExists, err := p.clusterExists()
if err != nil {
return errors.WithStack(err)
}
if !clusterExists {
return errors.New("No EKS cluster exists to delete")
}
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return errors.WithStack(err)
}
log.Logger.Debugf("Templated stack config vars: %#v", templatedVars)
args := []string{"delete", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.DeleteCluster)
configFilePath, err := p.writeConfigFile()
if err != nil {
return errors.WithStack(err)
}
if configFilePath != "" {
args = append(args, []string{"-f", configFilePath}...)
}
if approved {
_, err = printer.Fprintf("%sDeleting EKS cluster...\n", dryRunPrefix)
if err != nil {
return errors.WithStack(err)
}
err = utils.ExecCommandUnbuffered(p.eksConfig.Binary, args, map[string]string{}, os.Stdout, | os.Stderr, "", eksCommandTimeoutSecondsLong, 0, dryRun)
if err != nil { | random_line_split |
|
eks.go | .Cmd
}
type EksUtilsConfig struct {
WriteKubeConfig map[string]string `yaml:"write_kubeconfig"`
}
type EksConfig struct {
clusterName string // set after parsing the eks YAML
Binary string // path to the eksctl binary
Params struct {
Global map[string]string
GetCluster map[string]string `yaml:"get_cluster"`
CreateCluster map[string]string `yaml:"create_cluster"`
DeleteCluster map[string]string `yaml:"delete_cluster"`
UpdateCluster map[string]string `yaml:"update_cluster"`
Utils EksUtilsConfig
ConfigFile map[string]interface{} `yaml:"config_file"`
}
}
// Instantiates a new instance
func newEksProvisioner(stackConfig interfaces.IStack, clusterSot interfaces.IClusterSot) (*EksProvisioner, error) {
eksConfig, err := parseEksConfig(stackConfig)
if err != nil {
return nil, errors.WithStack(err)
}
return &EksProvisioner{
stack: stackConfig,
eksConfig: *eksConfig,
clusterSot: clusterSot,
}, nil
}
func (p EksProvisioner) GetStack() interfaces.IStack {
return p.stack
}
func (p EksProvisioner) ClusterSot() interfaces.IClusterSot {
return p.clusterSot
}
func (p EksProvisioner) Binary() string {
return p.eksConfig.Binary
}
// Returns a bool indicating whether the cluster exists (but it may not yet respond to kubectl commands)
func (p EksProvisioner) clusterExists() (bool, error) {
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return false, errors.WithStack(err)
}
log.Logger.Info("Checking if an EKS cluster already exists...")
log.Logger.Tracef("Checking if a Eks cluster config exists for values: %#v", templatedVars)
args := []string{"get", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.GetCluster)
var stdoutBuf, stderrBuf bytes.Buffer
err = utils.ExecCommand(p.eksConfig.Binary, args, map[string]string{}, &stdoutBuf,
&stderrBuf, "", eksCommandTimeoutSeconds, 0, false)
if err != nil {
if errors.Cause(err) == context.DeadlineExceeded {
return false, errors.Wrap(err,
"Timed out trying to retrieve EKS cluster config. "+
"Check your credentials.")
}
// todo - catch errors due to missing/expired AWS credentials and throw an error
if _, ok := errors.Cause(err).(*exec.ExitError); ok {
log.Logger.Info("EKS cluster doesn't exist")
return false, nil
} else {
return false, errors.Wrap(err, "Error fetching EKS clusters")
}
}
return true, nil
}
// Writes any configuration for a config file to a temporary file and returns it. If
// that key doesn't exist, an empty path is returned.
func (p EksProvisioner) writeConfigFile() (string, error) {
if len(p.eksConfig.Params.ConfigFile) > 0 {
// marshal the struct to YAML
yamlBytes, err := yaml.Marshal(&p.eksConfig.Params.ConfigFile)
if err != nil {
return "", errors.WithStack(err)
}
yamlString := string(yamlBytes[:])
// write the config to a temporary file
tmpfile, err := ioutil.TempFile("", "eks.*.yaml")
if err != nil {
return "", errors.WithStack(err)
}
defer tmpfile.Close()
if _, err := tmpfile.Write([]byte(yamlString)); err != nil {
return "", errors.WithStack(err)
}
if err := tmpfile.Close(); err != nil {
return "", errors.WithStack(err)
}
log.Logger.Debugf("EKS config file written to: %s", tmpfile.Name())
return tmpfile.Name(), nil
} else {
log.Logger.Infof("No EKS config file data configured. No config file path will be passed " +
"to eksctl commands")
return "", nil
}
}
// Creates an EKS cluster.
func (p EksProvisioner) Create(dryRun bool) error {
clusterExists, err := p.clusterExists()
if err != nil {
return errors.WithStack(err)
}
if clusterExists {
log.Logger.Debugf("An EKS cluster already exists called '%s'. Won't recreate it...",
p.GetStack().GetConfig().GetCluster())
return nil
}
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return errors.WithStack(err)
}
log.Logger.Debugf("Templated stack config vars: %#v", templatedVars)
args := []string{"create", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.CreateCluster)
configFilePath, err := p.writeConfigFile()
if err != nil {
return errors.WithStack(err)
}
if configFilePath != "" {
args = append(args, []string{"-f", configFilePath}...)
}
_, err = printer.Fprintf("Creating EKS cluster (this may take some time)...\n")
if err != nil {
return errors.WithStack(err)
}
err = utils.ExecCommandUnbuffered(p.eksConfig.Binary, args, map[string]string{},
os.Stdout, os.Stderr, "", 0, 0, dryRun)
if err != nil {
return errors.WithStack(err)
}
if !dryRun {
log.Logger.Infof("EKS cluster created")
err = p.renameKubeContext()
if err != nil {
return errors.WithStack(err)
}
}
p.stack.GetStatus().SetStartedThisRun(true)
// only sleep before checking the cluster fo readiness if we started it
p.stack.GetStatus().SetSleepBeforeReadyCheck(eksSleepSecondsBeforeReadyCheck)
return nil
}
// When eksctl downloads a kubeconfig file for a cluster it uses the IAM username as the
// name of the kubecontext. This would complicate configuring the kubecontext, so let's
// just strip the username from the kubecontext
func (p EksProvisioner) renameKubeContext() error {
log.Logger.Debugf("Renaming kube context for EKS cluster '%s'", p.eksConfig.clusterName)
pathOptions := clientcmd.NewDefaultPathOptions()
kubeConfig, err := pathOptions.GetStartingConfig()
if err != nil {
return errors.WithStack(err)
}
shortClusterName := p.eksConfig.clusterName
clusterNameRe := regexp.MustCompile(fmt.Sprintf(".*@%s.%s.eksctl.io", shortClusterName,
p.stack.GetConfig().GetRegion()))
contextName := ""
fullClusterName := ""
for name, ctx := range kubeConfig.Contexts {
if clusterNameRe.MatchString(name) {
log.Logger.Debugf("Kubeconfig context '%s' matches regex '%s'", name, clusterNameRe.String())
contextName = name
fullClusterName = ctx.Cluster
}
}
if contextName != "" {
log.Logger.Infof("Renaming EKS cluster contex† from '%s' to '%s' in kubeconfig file",
contextName, fullClusterName)
kubeConfig.Contexts[fullClusterName] = kubeConfig.Contexts[contextName]
delete(kubeConfig.Contexts, contextName)
// also set the renamed cluster as the default context
kubeConfig.CurrentContext = fullClusterName
err = clientcmd.ModifyConfig(pathOptions, *kubeConfig, false)
if err != nil {
return errors.WithStack(err)
}
} else {
log.Logger.Infof("Not renaming cluster context for EKS cluster '%s'", shortClusterName)
}
return nil
}
// Deletes a cluster
func (p EksProvisioner) Delete(approved bool, dryRun bool) error {
dryRunPrefix := ""
if dryRun {
| clusterExists, err := p.clusterExists()
if err != nil {
return errors.WithStack(err)
}
if !clusterExists {
return errors.New("No EKS cluster exists to delete")
}
templatedVars, err := p.stack.GetTemplatedVars(nil, map[string]interface{}{})
if err != nil {
return errors.WithStack(err)
}
log.Logger.Debugf("Templated stack config vars: %#v", templatedVars)
args := []string{"delete", "cluster"}
args = parameteriseValues(args, p.eksConfig.Params.Global)
args = parameteriseValues(args, p.eksConfig.Params.DeleteCluster)
configFilePath, err := p.writeConfigFile()
if err != nil {
return errors.WithStack(err)
}
if configFilePath != "" {
args = append(args, []string{"-f", configFilePath}...)
}
if approved {
_, err = printer.Fprintf("%sDeleting EKS cluster...\n", dryRunPrefix)
if err != nil {
return errors.WithStack(err)
}
err = utils.ExecCommandUnbuffered(p.eksConfig.Binary, args, map[string]string{}, os.Stdout,
| dryRunPrefix = "[Dry run] "
}
| conditional_block |
main.rs | let data1 = compute_data(segments1);
let data2 = compute_data(segments2);
// Next we split each segment into horizontal and vertical
// vectors, then sort them according to their horizontal component
fn partition_and_sort(seg: &[ComputeData]) -> (Vec<&ComputeData>, Vec<&ComputeData>) {
let (mut horizontals, mut verticals): (Vec<_>, Vec<_>) =
seg.iter().partition(|data| data.polarity.is_horizontal());
horizontals.sort_by_key(|data| data.segment.0.x);
verticals.sort_by_key(|data| data.segment.0.x);
(horizontals, verticals)
}
let (h1s, v1s) = partition_and_sort(&data1);
let (h2s, v2s) = partition_and_sort(&data2);
// now we can iterate over each horizontal and vertical pair in O(n+m)
fn find_manhattan_and_min_sum_distances(
horizontals: &[&ComputeData],
verticals: &[&ComputeData],
) -> (u64, u64) {
let mut h_iter = horizontals.iter();
let mut v_iter = verticals.iter();
let h_item = h_iter.next();
let v_item = v_iter.next();
// huh? Why the inner stuff here?
// We might run into cases where there are multiple horizontals
// and verticals crossing each other (think of the pound sign -> #).
// Iterating to the next vertical or horizontal after a successful
// intersection would be incorrect. Here, I've chosen to clone the
// verticals' iterator and run nested checks against the following
// vertical segments until they extend past the current horizontal
// segment. After that nested scan is complete, we could then move
// on to the next horizontal segment in the outer loop. ^
// P.S. would you look at that alignment!? ----------------------^
fn inner_find<'a>(
mut h_item: Option<&'a &'a ComputeData>,
mut v_item: Option<&'a &'a ComputeData>,
h_iter: &mut impl Iterator<Item = &'a &'a ComputeData>,
v_iter: &mut (impl Iterator<Item = &'a &'a ComputeData> + Clone),
nested: bool,
) -> (u64, u64) {
let mut min_manhattan = std::u64::MAX;
let mut min_sum = std::u64::MAX;
while let (Some(h_data), Some(v_data)) = (h_item, v_item) {
// In these cases, the vertical segment doesn't intersect, and
// there still might be other vertical segments that could
// intersect with this horizontal segment (due to sorting by x)
// so just move to the next vertical segment.
if v_data.bounds.bar <= h_data.bounds.low
|| h_data.bounds.bar <= v_data.bounds.low
|| h_data.bounds.bar >= v_data.bounds.high
{
v_item = v_iter.next();
continue;
}
// Here the vertical segment is beyond the current horizontal
// segment. Given that we sort by x, no more vertical
// segments will intersect with this horizontal segment. Move
// to the next horizontal segment. If we're in the nested
// loop, then just exit. The outer loop will increment for us.
if v_data.bounds.bar >= h_data.bounds.high {
if nested {
return (min_manhattan, min_sum);
}
h_item = h_iter.next();
continue;
}
let p = Point {
x: v_data.bounds.bar,
y: h_data.bounds.bar,
};
let p_manhattan = p.manhattan_distance();
if p_manhattan < min_manhattan {
min_manhattan = p_manhattan;
}
let p_min_sum = h_data.previous_length
+ h_data.segment.0.flat_distance_to(&p)
+ v_data.previous_length
+ v_data.segment.0.flat_distance_to(&p);
if p_min_sum < min_sum {
min_sum = p_min_sum;
}
if nested {
v_item = v_iter.next();
continue;
}
let (inner_manhattan, inner_min_sum) =
inner_find(h_item, v_item, h_iter, &mut v_iter.clone(), true);
if inner_manhattan < min_manhattan {
min_manhattan = inner_manhattan;
}
if inner_min_sum < min_sum {
min_sum = inner_min_sum;
}
h_item = h_iter.next();
}
(min_manhattan, min_sum)
}
inner_find(h_item, v_item, &mut h_iter, &mut v_iter, false)
}
let (manhattan_a, min_sum_a) = find_manhattan_and_min_sum_distances(&h1s, &v2s);
let (manhattan_b, min_sum_b) = find_manhattan_and_min_sum_distances(&h2s, &v1s);
(manhattan_a.min(manhattan_b), min_sum_a.min(min_sum_b))
}
}
#[cfg(test)]
mod test {
use super::*;
macro_rules! segments_cross {
{(($x1:expr, $y1:expr), ($x2:expr, $y2:expr)) <-|-> (($x3:expr, $y3:expr), ($x4:expr, $y4:expr)) @ ($c1:expr, $c2:expr)} => {
let segment1 = Segment(Point{x: $x1, y: $y1}, Point{x: $x2, y: $y2});
let segment2 = Segment(Point{x: $x3, y: $y3}, Point{x: $x4, y: $y4});
let cross = segment1.crosses(&segment2);
assert!(cross.is_some());
let (cross, ..) = cross.unwrap();
assert_eq!(cross.x, $c1);
assert_eq!(cross.y, $c2);
};
{(($x1:expr, $y1:expr), ($x2:expr, $y2:expr)) <---> (($x3:expr, $y3:expr), ($x4:expr, $y4:expr))} => {
let segment1 = Segment(Point{x: $x1, y: $y1}, Point{x: $x2, y: $y2});
let segment2 = Segment(Point{x: $x3, y: $y3}, Point{x: $x4, y: $y4});
assert!(segment1.crosses(&segment2).is_none());
};
}
#[test]
fn segments_cross() {
// two vertical | |
segments_cross!((( 0, 1), ( 0, -1)) <---> (( 0, 2), ( 0, -2)));
// two horizontal =
segments_cross!((( 1, 0), (-1, 0)) <---> (( 2, 0), (-2, 0)));
// too far left - |
segments_cross!(((-1, 0), ( 1, 0)) <---> ((-2, 1), (-2, -1)));
// too far right | -
segments_cross!(((-1, 0), ( 1, 0)) <---> (( 2, 1), ( 2, -1)));
// too far up |
// -
segments_cross!((( 2, -1), ( 2, 1)) <---> ((-1, 0), ( 1, 0)));
// too far down -
// |
segments_cross!(((-2, -1), (-2, 1)) <---> ((-1, 0), ( 1, 0)));
// cross +
segments_cross!(((-1, 0), ( 1, 0)) <-|-> (( 0, -1), ( 0, 1)) @ (0, 0));
// on-edge should not cross -|
segments_cross!(((-1, 0), ( 1, 0)) <---> (( 1, -1), ( 1, 1)));
}
macro_rules! assert_segments_eq {
($seg:expr, {($x1:expr, $y1:expr), ($x2:expr, $y2:expr)}) => {
assert_eq!($seg.0.x, $x1);
assert_eq!($seg.0.y, $y1);
assert_eq!($seg.1.x, $x2);
assert_eq!($seg.1.y, $y2);
};
}
#[test]
fn runner() {
let mut runner = Runner::new();
runner.follow(Route::Up(4));
runner.follow(Route::Right(4));
runner.follow(Route::Down(4));
runner.follow(Route::Left(4));
let path = runner.finish();
assert_segments_eq!(path[0], {(0, 0), (0, 4)});
assert_segments_eq!(path[1], {(0, 4), (4, 4)});
assert_segments_eq!(path[2], {(4, 4), (4, 0)});
assert_segments_eq!(path[3], {(4, 0), (0, 0)}); | random_line_split |
||
main.rs | (&self) -> u64 {
self.x.abs() as u64 + self.y.abs() as u64
}
fn flat_distance_to(&self, other: &Self) -> u64 {
(self.x - other.x).abs() as u64 + (self.y - other.y).abs() as u64
}
}
enum Polarity {
Vertical,
Horizontal,
}
#[allow(dead_code)]
impl Polarity {
fn is_horizontal(&self) -> bool {
match self {
Polarity::Horizontal => true,
_ => false,
}
}
fn is_vertical(&self) -> bool {
match self {
Polarity::Vertical => true,
_ => false,
}
}
}
struct Bounds {
low: i64,
high: i64,
bar: i64,
}
struct Segment(Point, Point);
impl Segment {
fn polarity_and_bounds(&self) -> (Polarity, Bounds) {
if self.0.x == self.1.x {
if self.0.y < self.1.y {
(
Polarity::Vertical,
Bounds {
low: self.0.y,
high: self.1.y,
bar: self.0.x,
},
)
} else {
(
Polarity::Vertical,
Bounds {
low: self.1.y,
high: self.0.y,
bar: self.0.x,
},
)
}
} else {
if self.0.x < self.1.x {
(
Polarity::Horizontal,
Bounds {
low: self.0.x,
high: self.1.x,
bar: self.0.y,
},
)
} else {
(
Polarity::Horizontal,
Bounds {
low: self.1.x,
high: self.0.x,
bar: self.0.y,
},
)
}
}
}
fn crosses(&self, other: &Segment) -> Option<(Point, u64, u64)> {
let point = match (self.polarity_and_bounds(), other.polarity_and_bounds()) {
((Polarity::Horizontal, ..), (Polarity::Horizontal, ..))
| ((Polarity::Vertical, ..), (Polarity::Vertical, ..)) => None,
((Polarity::Vertical, v_bounds), (Polarity::Horizontal, h_bounds))
| ((Polarity::Horizontal, h_bounds), (Polarity::Vertical, v_bounds)) => {
if h_bounds.bar <= v_bounds.low
|| h_bounds.bar >= v_bounds.high
|| v_bounds.bar <= h_bounds.low
|| v_bounds.bar >= h_bounds.high
{
None
} else {
Some(Point {
x: v_bounds.bar,
y: h_bounds.bar,
})
}
}
};
point.map(|p| (p, self.0.flat_distance_to(&p), other.0.flat_distance_to(&p)))
}
fn length(&self) -> u64 {
match self.polarity_and_bounds() {
(_, Bounds { low, high, .. }) => (high - low) as u64,
}
}
}
impl Debug for Segment {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
write!(
f,
"{{({}, {}) - ({}, {})}}",
self.0.x, self.0.y, self.1.x, self.1.y
)
}
}
#[derive(Copy, Clone, Debug)]
enum Route {
Up(u32),
Down(u32),
Left(u32),
Right(u32),
}
#[derive(Debug)]
enum Either<T, U> {
A(T),
B(U),
}
impl FromStr for Route {
type Err = Either<char, ParseIntError>;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let first_char = s.as_bytes()[0] as char;
let num = s[1..].parse().map_err(Either::B)?;
match first_char {
'U' => Ok(Route::Up(num)),
'D' => Ok(Route::Down(num)),
'L' => Ok(Route::Left(num)),
'R' => Ok(Route::Right(num)),
_ => Err(Either::A(first_char)),
}
}
}
struct Runner {
path: Vec<Segment>,
cursor: Point,
}
impl Runner {
fn new() -> Self {
Self {
path: vec![],
cursor: Point { x: 0, y: 0 },
}
}
fn follow(&mut self, route: Route) {
let next = match route {
Route::Up(u) => Point {
y: self.cursor.y + u as i64,
..self.cursor
},
Route::Down(d) => Point {
y: self.cursor.y - d as i64,
..self.cursor
},
Route::Left(l) => Point {
x: self.cursor.x - l as i64,
..self.cursor
},
Route::Right(r) => Point {
x: self.cursor.x + r as i64,
..self.cursor
},
};
let segment = Segment(self.cursor, next);
self.path.push(segment);
self.cursor = next;
}
fn finish(self) -> Vec<Segment> {
self.path
}
}
fn run(route1: Vec<Route>, route2: Vec<Route>) -> (u64, u64) {
let mut runner1 = Runner::new();
for route in &route1 {
runner1.follow(*route);
}
let segments1 = runner1.finish();
let mut runner2 = Runner::new();
for route in &route2 {
runner2.follow(*route);
}
let segments2 = runner2.finish();
// This whole section could definitely be optimized...
// O(n*m)
#[cfg(not(feature = "optimized"))]
{
let mut crosses = vec![];
let mut cross_distances = HashMap::<Point, u64>::new();
let mut s1sum = 0;
for s1 in &segments1 {
let mut s2sum = 0;
for s2 in &segments2 {
if let Some((p, s1dist, s2dist)) = s1.crosses(s2) {
crosses.push(p);
if !cross_distances.contains_key(&p) {
cross_distances.insert(p, s1sum + s1dist + s2sum + s2dist);
}
}
s2sum += s2.length();
}
s1sum += s1.length();
}
let min_manhattan = crosses
.into_iter()
.map(|p| p.manhattan_distance())
.min()
.unwrap();
let min_sum_dist = cross_distances.into_iter().map(|(_, v)| v).min().unwrap();
(min_manhattan, min_sum_dist)
}
// optimized
// O(n log n + m log m)
#[cfg(feature = "optimized")]
{
struct ComputeData {
segment: Segment,
polarity: Polarity,
bounds: Bounds,
previous_length: u64,
}
// First we compute the lengths to get to each segment
// and store them together
fn compute_data(seg: Vec<Segment>) -> Vec<ComputeData> {
let mut length = 0;
seg.into_iter()
.map(|segment| {
let next_length = segment.length();
let (polarity, bounds) = segment.polarity_and_bounds();
let result = ComputeData {
segment,
polarity,
bounds,
previous_length: length,
};
length += next_length;
result
})
.collect()
}
let data1 = compute_data(segments1);
let data2 = compute_data(segments2);
// Next we split each segment into horizontal and vertical
// vectors, then sort them according to their horizontal component
fn partition_and_sort(seg: &[ComputeData]) -> (Vec<&ComputeData>, Vec<&ComputeData>) {
let (mut horizontals, mut verticals): (Vec<_>, Vec<_>) =
seg.iter().partition(|data| data.polarity.is_horizontal());
horizontals.sort_by_key(|data| data.segment.0.x);
verticals.sort_by_key(|data| data.segment.0.x);
(horizontals, verticals)
}
let (h1s, v1s) = partition_and_sort(&data1);
let (h2s, v2s) = partition_and_sort(&data2);
// now we can iterate over each horizontal and vertical pair in O(n+m)
fn find_manhattan_and_min_sum_distances(
horizontals: &[&ComputeData],
verticals: &[&ComputeData],
) -> (u64, u64) {
let mut h_iter = horizontals.iter();
let mut v_iter = verticals.iter();
let h_item = h_iter.next();
let v_item = v_iter.next();
// huh? Why the inner stuff here?
// We might run into cases where there are multiple horizontals
// and verticals crossing each other (think of the pound sign -> #).
// Iterating to the next vertical or horizontal after a successful
| manhattan_distance | identifier_name |
|
viewModels.js | Load.error);
}
},
error : function(xhr, ajaxOptions, thrownError) {
console.log(xhr);
console.log(ajaxOptions);
console.log(thrownError);
},
complete : function() {
self.phase("upload");
self.uploadDataAsync();
},
beforeSend : setHeader
});
}, 100);
};
self.uploadDataAsync = function() {
setTimeout(function() {
var inputjson = {
"filename" : self.parent().fileName(),
"data" : self.data()
};
var jsonString = JSON.stringify(inputjson);
$.ajax({
url : 'jds/files',
type : 'POST',
data : jsonString,
dataType : "text",
contentType : "application/json",
async : true,
success : function(largeLoad) {
},
error : function(xhr, ajaxOptions, thrownError) {
console.log(xhr);
console.log(ajaxOptions);
console.log(thrownError);
},
complete : function() {
self.phase("search");
self.parent().canRunSearchQuery(true);
self.parent().tryQuery();
},
beforeSend : setUploadHeader
});
}, 100);
};
};
/**
* Creates a search menu view model.
*
* @param database
* the search database
* @param journals
* the search journals
* @param keywordSearch
* the keyword search
* @param query
* the query
*/
var SearchVm = function(database, databases, journals, startRange, endRange,
query, tool, email) {
'use strict';
self.databases = window.ko.observableArray(databases);
self.journals = window.ko.observableArray(journals);
self.files = window.ko.observableArray([]);
self.newJournal = window.ko.observable("");
self.newDatabase = window.ko.observable("pubmed");
self.startRange = window.ko.observable(startRange);
self.endRange = window.ko.observable(endRange);
self.fileName = window.ko.observable("savefilename.xml");
self.type = window.ko.observable("Search");
self.query = window.ko.observable(query);
self.stillMoreQueries = window.ko.observable(false);
self.prefix = window.ko
.observable("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/");
self.tool = window.ko.observable(tool);
self.email = window.ko.observable(email);
self.canRunSearchQuery = window.ko.observable(true);
self.intervalId = window.ko.observable(0);
self.searchButton = window.ko.observable('Start Search');
self.toggleSearchButton = function() {
if (self.searchButton() === 'Start Search') {
self.searchButton('Stop Search');
self.startCanRunQueryToggler();
} else {
clearInterval(self.intervalId());
self.searchButton('Start Search');
}
}
self.startCanRunQueryToggler = function() {
clearInterval(self.intervalId());
var id = setInterval(function() {
self.tryQuery();
self.getResultFilesAsync();
}, 2000);
self.intervalId(id);
};
self.tryQuery = function() {
console.log("tryQuery called");
var arrayIndex = 0;
var count = 0;
for (arrayIndex; arrayIndex < self.journals().length; arrayIndex++) {
if (self.canRunSearchQuery() === false) {
break;
}
var currentjournal = self.journals()[arrayIndex];
var count = currentjournal.count() !== 0 ? currentjournal.count()
: 1;
var retStart = currentjournal.retStart();
if (currentjournal.checked() === true
&& (currentjournal.retStart() === 0 || currentjournal
.retStart() < currentjournal.count())) {
self.canRunSearchQuery(false);
currentjournal.value(Math.floor((retStart / count) * 100));
var searchquery = self.buildSearchQuery(currentjournal
.database(), currentjournal.name(), currentjournal
.retStart(), currentjournal.retMax(), currentjournal
.retmode(), self.startRange(), self.endRange(), self
.prefix(), self.tool(), self.email());
currentjournal.currentQuery(searchquery);
currentjournal.datatype("XML");
currentjournal.getSearchDataAsync(self.startRange(), self.endRange());
}
}
};
self.getResultFilesAsync = function() {
// setTimeout(function() {
// $.ajax({
// url : "jds/files",
// type : 'GET',
// dataType : "json",
// contentType : "application/json",
// async : true,
// success : function(largeLoad) {
// if (typeof largeLoad.error === 'undefined') {
// self.files(largeLoad);
// } else {
// alert('ERRORS: ' + largeLoad.error);
// }
// },
// error : function(xhr, ajaxOptions, thrownError) {
// console.log(xhr);
// console.log(ajaxOptions);
// console.log(thrownError);
// },
// complete : function() {
// },
// // beforeSend : setHeader
// });
// }, 100);
};
self.getResultFilesAsync();
self.testUpload = function() {
var currentjournal = self.journals()[0];
currentjournal.currentQuery("jds/files");
currentjournal
.data('<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE PubmedArticleSet PUBLIC "-//NLM//DTD PubMedArticle, 1st January 2015//EN" "http://www.ncbi.nlm.nih.gov/corehtml/query/DTD/pubmed_150101.dtd"><PubmedArticleSet><PubmedArticle></PubmedArticle></PubmedArticleSet>')
currentjournal.uploadDataAsync();
}
self.addJournal = function() {
self.journals.push(new JournalVM(self.newJournal(), true, "", self
.newDatabase(), this));
};
self.removeJournal = function(item) {
self.journals.remove(item);
}
self.buildSearchQuery = function(db, journal, retStart, retMax, retmode,
sYear, eYear, prefix, tool, email) {
var myjournal = journal.replace(/\s+/g, '+')
var query = prefix + "esearch.fcgi" + "?" + "db=" + db + "&" + "term="
+ myjournal + "%5bjournal%5d" + "+AND+" + sYear + ":" + eYear
+ "%5bpdat%5d" + "&retStart=" + retStart + "&retMax=" + retMax
+ "&retmode=" + retmode + "&tool=" + tool + "&email=" + email;
return query;
};
self.buildSummaryQuery = function(db, ids, prefix, tool, email) {
var query = "";
if (ids !== undefined && ids.length > 0) {
query = prefix + "esummary.fcgi" + "?" + "db=" + db + "&" + "id="
+ ids + "&tool=" + tool + "&email=" + email;
}
return query;
};
self.buildFetchQuery = function(db, ids, retmode, prefix, tool, email) {
var query = "";
if (ids !== undefined && ids.length > 0) {
query = prefix + "efetch.fcgi" + "?" + "db=" + db + "&" + "id="
+ ids + "&retmode=" + retmode + "&tool=" + tool + "&email="
+ email;
}
return query;
};
// Bind twitter typeahead
ko.bindingHandlers.typeahead = {
init : function(element, valueAccessor, allBindingsAccessor, viewModel,
bindingContext) {
var $element = $(element);
var allBindings = allBindingsAccessor();
var substringMatcher = function(strs) {
return function findMatches(q, cb) {
var matches, substrRegex;
// an array that will be populated with substring matches
matches = [];
// regex used to determine if a string contains the
// substring `q`
substrRegex = new RegExp(q, 'i');
// iterate through the pool of strings and for any string
// that
// contains the substring `q`, add it to the `matches` array
$.each(strs, function(i, str) {
if (substrRegex.test(str)) {
// the typeahead jQuery plugin expects suggestions
// to a
// JavaScript object, refer to typeahead docs for
// more info
matches.push(str);
}
});
cb(matches);
};
};
var typeaheadOpts = {
source : substringMatcher(ko.utils
.unwrapObservable(valueAccessor()))
};
if (allBindings.typeaheadOptions) {
$.each(allBindings.typeaheadOptions, function(optionName,
optionValue) {
typeaheadOpts[optionName] = ko.utils
.unwrapObservable(optionValue);
});
}
$element.attr("autocomplete", "off").typeahead({
hint : true,
highlight : true,
minLength : 1
}, typeaheadOpts);
} | random_line_split |
||
viewModels.js |
parent.internal(true);
parent.allchecked(allTrue);
parent.internal(false);
if (partialTrue && allTrue === false) {
parent.linksIndeterminate(true);
} else {
parent.linksIndeterminate(false);
}
}
};
self.getSearchDataAsync = function(sYear, eYear) {
setTimeout(function() {
console.log("getSearchDataAsync called");
var i26;
var myjournal = self.name();//.replace(/\s+/g, '+');
// var sYear = self.startRange();
// var eYear = self.endRange();
var quertInfo = {
id : null,
name : "query4",
content : self.currentQuery(),
language : "xml",
error : "",
count : 0,
retMax : 0,
fetchItems : self.fetchItems(),
database : self.database(),
retmode : self.retmode(),
rettype : self.rettype(),
prefix : self.parent().prefix(),
tool : self.parent().tool(),
email : self.parent().email(),
term : myjournal + "[journal]" + " AND " + sYear + ":" + eYear + "[pdat]"
}
console.log(quertInfo);
var args = {
url : 'jds/queries',
type : 'POST',
dataType : 'json',
contentType : 'application/json',
data : JSON.stringify(quertInfo),
success : function(largeLoad) {
console.log(largeLoad);
/*
* Do this next part on the server side too. if (typeof
* largeLoad.error === 'undefined') { var esearchresult = $
* (largeLoad).find ( "esearchresult"); $ (largeLoad) .find
* ("esearchresult") .each ( function() { var error = $ (
* largeLoad) .find ( "ERROR") .text (); var ecount = $ (
* largeLoad) .find ( "Count:first"); var eretMax = $ (
* largeLoad) .find ( "RetMax"); $ (largeLoad) .find (
* "IdList") .each ( function( index) { var ids = $ ( this)
* .text () .replace ( /\n+/g, ','); self .fetchItems (ids
* .substring ( 1, ids.length - 1)); }); self.error (error);
* self.count (ecount .text ()); self .retMax (eretMax .text
* ()); }); } else { alert ('ERRORS: ' + largeLoad.error); }
*/
},
error : function(xhr, ajaxOptions, thrownError) {
console.log(xhr);
console.log(ajaxOptions);
console.log(thrownError);
},
complete : function() {
self.retStart(Number(self.retStart())
+ Number(self.retMax()));
var fetchquery = self.parent().buildFetchQuery(
self.database(), self.fetchItems(), self.retmode(),
self.parent().prefix(), self.parent().tool(),
self.parent().email());
self.currentQuery(fetchquery)
self.datatype("XML");
self.phase('fetch');
self.getFetchDataAsync();
}
};
$.ajax(args);
}, 100);
};
self.getFetchDataAsync = function() {
setTimeout(function() {
console.log("getFetchDataAsync called");
console.log(self.currentQuery());
$.ajax({
url : self.currentQuery(),
type : 'POST',
dataType : "text",
// contentType : "application/text",
async : true,
success : function(largeLoad) {
if (typeof largeLoad.error === 'undefined') {
var esearchresult = $(largeLoad).find("esearchresult");
self.currentQuery("jds/files");
self.data(largeLoad);
} else {
alert('ERRORS: ' + largeLoad.error);
}
},
error : function(xhr, ajaxOptions, thrownError) {
console.log(xhr);
console.log(ajaxOptions);
console.log(thrownError);
},
complete : function() {
self.phase("upload");
self.uploadDataAsync();
},
beforeSend : setHeader
});
}, 100);
};
self.uploadDataAsync = function() {
setTimeout(function() {
var inputjson = {
"filename" : self.parent().fileName(),
"data" : self.data()
};
var jsonString = JSON.stringify(inputjson);
$.ajax({
url : 'jds/files',
type : 'POST',
data : jsonString,
dataType : "text",
contentType : "application/json",
async : true,
success : function(largeLoad) {
},
error : function(xhr, ajaxOptions, thrownError) {
console.log(xhr);
console.log(ajaxOptions);
console.log(thrownError);
},
complete : function() {
self.phase("search");
self.parent().canRunSearchQuery(true);
self.parent().tryQuery();
},
beforeSend : setUploadHeader
});
}, 100);
};
};
/**
* Creates a search menu view model.
*
* @param database
* the search database
* @param journals
* the search journals
* @param keywordSearch
* the keyword search
* @param query
* the query
*/
var SearchVm = function(database, databases, journals, startRange, endRange,
query, tool, email) {
'use strict';
self.databases = window.ko.observableArray(databases);
self.journals = window.ko.observableArray(journals);
self.files = window.ko.observableArray([]);
self.newJournal = window.ko.observable("");
self.newDatabase = window.ko.observable("pubmed");
self.startRange = window.ko.observable(startRange);
self.endRange = window.ko.observable(endRange);
self.fileName = window.ko.observable("savefilename.xml");
self.type = window.ko.observable("Search");
self.query = window.ko.observable(query);
self.stillMoreQueries = window.ko.observable(false);
self.prefix = window.ko
.observable("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/");
self.tool = window.ko.observable(tool);
self.email = window.ko.observable(email);
self.canRunSearchQuery = window.ko.observable(true);
self.intervalId = window.ko.observable(0);
self.searchButton = window.ko.observable('Start Search');
self.toggleSearchButton = function() {
if (self.searchButton() === 'Start Search') {
self.searchButton('Stop Search');
self.startCanRunQueryToggler();
} else {
clearInterval(self.intervalId());
self.searchButton('Start Search');
}
}
self.startCanRunQueryToggler = function() {
clearInterval(self.intervalId());
var id = setInterval(function() {
self.tryQuery();
self.getResultFilesAsync();
}, 2000);
self.intervalId(id);
};
self.tryQuery = function() {
console.log("tryQuery called");
var arrayIndex = 0;
var count = 0;
for (arrayIndex; arrayIndex < self.journals().length; arrayIndex++) {
if (self.canRunSearchQuery() === false) {
break;
}
var currentjournal = self.journals()[arrayIndex];
var count = currentjournal.count() !== 0 ? currentjournal.count()
: 1;
var retStart = currentjournal.retStart();
if (currentjournal.checked() === true
&& (currentjournal.retStart() === 0 || currentjournal
.retStart() < currentjournal.count())) {
self.canRunSearchQuery(false);
currentjournal.value(Math.floor((retStart / count) * 100));
var searchquery = self.buildSearchQuery(currentjournal
.database(), currentjournal.name(), currentjournal
.retStart(), currentjournal.retMax(), currentjournal
.retmode(), self.startRange(), self.endRange(), self
.prefix(), self.tool(), self.email());
currentjournal.currentQuery(searchquery);
currentjournal.datatype("XML");
currentjournal.getSearchDataAsync(self.startRange(), self.endRange());
}
}
};
self.getResultFilesAsync = function() {
// setTimeout(function() {
// $.ajax({
// url : "jds/files",
// type : 'GET',
// dataType : "json",
// contentType : "application/json",
// async : true,
// success : function(largeLoad) {
// if (typeof largeLoad.error === 'undefined') {
// self.files(largeLoad);
// } else {
// alert('ERRORS: ' + largeLoad.error);
// }
// },
// error : function(xhr, ajaxOptions, thrownError) {
// console.log(xhr);
// console.log(ajaxOptions);
// console.log(thrownError);
// },
// complete : function() {
// },
// // beforeSend : setHeader
// });
// }, 100);
};
self | {
if (catlinks[j].checked() === true) {
partialTrue = true;
} else {
allTrue = false;
}
} | conditional_block |
|
window.go | .mu.Unlock()
var ps image.Point
ps.X, ps.Y = w.glw.GetPos()
w.Pos = ps
return ps
}
func (w *windowImpl) PhysicalDPI() float32 {
w.mu.Lock()
defer w.mu.Unlock()
return w.PhysDPI
}
func (w *windowImpl) LogicalDPI() float32 {
w.mu.Lock()
defer w.mu.Unlock()
return w.LogDPI
}
func (w *windowImpl) SetLogicalDPI(dpi float32) {
w.mu.Lock()
defer w.mu.Unlock()
w.LogDPI = dpi
}
func (w *windowImpl) SetTitle(title string) {
if w.IsClosed() {
return
}
w.Titl = title
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetTitle(title)
})
}
func (w *windowImpl) SetSize(sz image.Point) {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetSize(sz.X, sz.Y)
})
}
func (w *windowImpl) SetPixSize(sz image.Point) {
if w.IsClosed() {
return
}
sc := w.getScreen()
sz.X = int(float32(sz.X) / sc.DevicePixelRatio)
sz.Y = int(float32(sz.Y) / sc.DevicePixelRatio)
w.SetSize(sz)
}
func (w *windowImpl) SetPos(pos image.Point) {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetPos(pos.X, pos.Y)
})
}
func (w *windowImpl) SetGeom(pos image.Point, sz image.Point) {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetSize(sz.X, sz.Y)
w.glw.SetPos(pos.X, pos.Y)
})
}
func (w *windowImpl) show() {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.Show()
})
}
func (w *windowImpl) Raise() {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
if bitflag.HasAtomic(&w.Flag, int(oswin.Minimized)) {
w.glw.Restore()
} else {
w.glw.Focus()
}
})
}
func (w *windowImpl) Minimize() {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.Iconify()
})
}
func (w *windowImpl) SetCloseReqFunc(fun func(win oswin.Window)) {
w.mu.Lock()
defer w.mu.Unlock()
w.closeReqFunc = fun
}
func (w *windowImpl) SetCloseCleanFunc(fun func(win oswin.Window)) {
w.mu.Lock()
defer w.mu.Unlock()
w.closeCleanFunc = fun
}
func (w *windowImpl) CloseReq() {
if theApp.quitting {
w.Close()
}
if w.closeReqFunc != nil {
w.closeReqFunc(w)
} else {
w.Close()
}
}
func (w *windowImpl) CloseClean() {
if w.closeCleanFunc != nil {
w.closeCleanFunc(w)
}
}
func (w *windowImpl) Close() {
// this is actually the final common pathway for closing here
w.mu.Lock()
w.winClose <- struct{}{} // break out of draw loop
w.CloseClean()
// fmt.Printf("sending close event to window: %v\n", w.Nm)
w.sendWindowEvent(window.Close)
theApp.DeleteWin(w)
w.app.RunOnMain(func() {
if w.winTex != nil {
w.winTex.Delete()
w.winTex = nil
}
if w.drawQuads != nil {
w.drawQuads.Delete()
w.drawQuads = nil
}
if w.fillQuads != nil {
w.fillQuads.Delete()
w.fillQuads = nil
}
w.glw.Destroy()
w.glw = nil // marks as closed for all other calls
})
if theApp.quitting {
theApp.quitCloseCnt <- struct{}{}
}
w.mu.Unlock()
}
func (w *windowImpl) SetMousePos(x, y float64) {
if !w.IsVisible() {
return
}
w.mu.Lock()
defer w.mu.Unlock()
if theApp.Platform() == oswin.MacOS {
w.glw.SetCursorPos(x/float64(w.DevPixRatio), y/float64(w.DevPixRatio))
} else {
w.glw.SetCursorPos(x, y)
}
}
func (w *windowImpl) SetCursorEnabled(enabled, raw bool) {
if enabled {
w.mouseDisabled = false
w.glw.SetInputMode(glfw.CursorMode, glfw.CursorNormal)
} else {
w.mouseDisabled = true
w.glw.SetInputMode(glfw.CursorMode, glfw.CursorDisabled)
if raw && glfw.RawMouseMotionSupported() {
w.glw.SetInputMode(glfw.RawMouseMotion, glfw.True)
}
}
}
/////////////////////////////////////////////////////////
// Window Callbacks
func (w *windowImpl) getScreen() *oswin.Screen {
if w == nil || w.glw == nil {
return theApp.screens[0]
}
w.mu.Lock()
var sc *oswin.Screen
mon := w.glw.GetMonitor() // this returns nil for windowed windows -- i.e., most windows
// that is super useless it seems.
if mon != nil {
if monitorDebug {
log.Printf("glos window: %v getScreen() -- got screen: %v\n", w.Nm, mon.GetName())
}
sc = theApp.ScreenByName(mon.GetName())
if sc == nil {
log.Printf("glos getScreen: could not find screen of name: %v\n", mon.GetName())
sc = theApp.screens[0]
}
} else {
sc = theApp.ScreenByName(w.scrnName)
got := false
if sc == nil || w.DevPixRatio != sc.DevicePixelRatio {
for _, scc := range theApp.screens {
if w.DevPixRatio == scc.DevicePixelRatio {
sc = scc
got = true
if monitorDebug {
log.Printf("glos window: %v getScreen(): matched pix ratio %v for screen: %v\n", w.Nm, w.DevPixRatio, sc.Name)
}
w.LogDPI = sc.LogicalDPI
break
}
}
if !got {
sc = theApp.screens[0]
w.LogDPI = sc.LogicalDPI
if monitorDebug {
log.Printf("glos window: %v getScreen(): reverting to first screen %v\n", w.Nm, sc.Name)
}
}
}
}
w.scrnName = sc.Name
w.PhysDPI = sc.PhysicalDPI
w.DevPixRatio = sc.DevicePixelRatio
if w.LogDPI == 0 {
w.LogDPI = sc.LogicalDPI
}
w.mu.Unlock()
return sc
}
func (w *windowImpl) moved(gw *glfw.Window, x, y int) {
w.mu.Lock()
w.Pos = image.Point{x, y}
w.mu.Unlock()
w.getScreen()
w.sendWindowEvent(window.Move)
}
func (w *windowImpl) winResized(gw *glfw.Window, width, height int) {
w.updtGeom()
}
func (w *windowImpl) updtGeom() {
w.mu.Lock()
cscx, _ := w.glw.GetContentScale()
// curDevPixRatio := w.DevPixRatio
w.DevPixRatio = cscx
// if curDevPixRatio != w.DevPixRatio {
// fmt.Printf("got cont scale: %v\n", cscx)
// }
cursc := w.scrnName
w.mu.Unlock()
sc := w.getScreen()
w.mu.Lock()
var wsz image.Point | random_line_split |
||
window.go | z := opts.Size // note: this is already in standard window size units!
win, err := glfw.CreateWindow(sz.X, sz.Y, opts.GetTitle(), nil, theApp.shareWin)
if err != nil {
return win, err
}
win.SetPos(opts.Pos.X, opts.Pos.Y)
return win, err
}
// for sending window.Event's
func (w *windowImpl) sendWindowEvent(act window.Actions) {
winEv := window.Event{
Action: act,
}
winEv.Init()
w.Send(&winEv)
}
// NextEvent implements the oswin.EventDeque interface.
func (w *windowImpl) NextEvent() oswin.Event {
e := w.Deque.NextEvent()
return e
}
// winLoop is the window's own locked processing loop.
func (w *windowImpl) winLoop() {
outer:
for {
select {
case <-w.winClose:
break outer
case f := <-w.runQueue:
if w.glw == nil {
break outer
}
f.f()
if f.done != nil {
f.done <- true
}
case <-w.publish:
if w.glw == nil {
break outer
}
if !theApp.noScreens {
theApp.RunOnMain(func() {
if !w.Activate() {
return
}
w.glw.SwapBuffers() // note: implicitly does a flush
// note: generally don't need this:
// gpu.Draw.Clear(true, true)
})
w.publishDone <- struct{}{}
}
}
}
}
// RunOnWin runs given function on the window's unique locked thread.
func (w *windowImpl) RunOnWin(f func()) {
if w.IsClosed() {
return
}
done := make(chan bool)
w.runQueue <- funcRun{f: f, done: done}
<-done
}
// GoRunOnWin runs given function on window's unique locked thread and returns immediately
func (w *windowImpl) GoRunOnWin(f func()) {
if w.IsClosed() {
return
}
go func() {
w.runQueue <- funcRun{f: f, done: nil}
}()
}
// Publish does the equivalent of SwapBuffers on OpenGL: pushes the
// current rendered back-buffer to the front (and ensures that any
// ongoing rendering has completed) (see also PublishTex)
func (w *windowImpl) Publish() {
if !w.IsVisible() {
return
}
glfw.PostEmptyEvent()
w.publish <- struct{}{}
<-w.publishDone
glfw.PostEmptyEvent()
}
// PublishTex draws the current WinTex texture to the window and then
// calls Publish() -- this is the typical update call.
func (w *windowImpl) PublishTex() {
if !w.IsVisible() {
return
}
theApp.RunOnMain(func() {
if !w.Activate() || w.winTex == nil {
return
}
w.Copy(image.ZP, w.winTex, w.winTex.Bounds(), oswin.Src, nil)
})
w.Publish()
}
// SendEmptyEvent sends an empty, blank event to this window, which just has
// the effect of pushing the system along during cases when the window
// event loop needs to be "pinged" to get things moving along..
func (w *windowImpl) SendEmptyEvent() {
if w.IsClosed() {
return
}
oswin.SendCustomEvent(w, nil)
glfw.PostEmptyEvent() // for good measure
}
// WinTex() returns the current Texture of the same size as the window that
// is typically used to update the window contents.
// Use the various Drawer and SetSubImage methods to update this Texture, and
// then call PublishTex() to update the window.
// This Texture is automatically resized when the window is resized, and
// when that occurs, existing contents are lost -- a full update of the
// Texture at the current size is required at that point.
func (w *windowImpl) WinTex() oswin.Texture {
return w.winTex
}
// SetWinTexSubImage calls SetSubImage on WinTex with given parameters.
// convenience routine that activates the window context and runs on the
// window's thread.
func (w *windowImpl) SetWinTexSubImage(dp image.Point, src image.Image, sr image.Rectangle) error {
if !w.IsVisible() {
return nil
}
var err error
theApp.RunOnMain(func() {
if !w.Activate() || w.winTex == nil {
return
}
err = w.winTex.SetSubImage(dp, src, sr)
})
return err
}
////////////////////////////////////////////////
// Drawer wrappers
func (w *windowImpl) Draw(src2dst mat32.Mat3, src oswin.Texture, sr image.Rectangle, op draw.Op, opts *oswin.DrawOptions) {
if !w.IsVisible() {
return
}
theApp.RunOnMain(func() {
if !w.Activate() {
return
}
gpu.TheGPU.RenderToWindow()
gpu.Draw.Viewport(image.Rectangle{Max: w.PxSize})
if w.drawQuads == nil {
w.drawQuads = theApp.drawQuadsBuff()
}
sz := w.Size()
theApp.draw(sz, src2dst, src, sr, op, opts, w.drawQuads, true) // true = dest has botZero
})
}
func (w *windowImpl) DrawUniform(src2dst mat32.Mat3, src color.Color, sr image.Rectangle, op draw.Op, opts *oswin.DrawOptions) {
if !w.IsVisible() {
return
}
theApp.RunOnMain(func() {
if !w.Activate() {
return
}
gpu.TheGPU.RenderToWindow()
gpu.Draw.Viewport(image.Rectangle{Max: w.PxSize})
if w.fillQuads == nil {
w.fillQuads = theApp.fillQuadsBuff()
}
sz := w.Size()
theApp.drawUniform(sz, src2dst, src, sr, op, opts, w.fillQuads, true) // true = dest has botZero
})
}
func (w *windowImpl) Copy(dp image.Point, src oswin.Texture, sr image.Rectangle, op draw.Op, opts *oswin.DrawOptions) {
if !w.IsVisible() {
return
}
drawer.Copy(w, dp, src, sr, op, opts)
}
func (w *windowImpl) Scale(dr image.Rectangle, src oswin.Texture, sr image.Rectangle, op draw.Op, opts *oswin.DrawOptions) {
if !w.IsVisible() {
return
}
drawer.Scale(w, dr, src, sr, op, opts)
}
func (w *windowImpl) Fill(dr image.Rectangle, src color.Color, op draw.Op) {
if !w.IsVisible() {
return
}
theApp.RunOnMain(func() {
if !w.Activate() {
return
}
gpu.TheGPU.RenderToWindow()
gpu.Draw.Viewport(image.Rectangle{Max: w.PxSize})
if w.fillQuads == nil {
w.fillQuads = theApp.fillQuadsBuff()
}
sz := w.Size()
theApp.fillRect(sz, dr, src, op, w.fillQuads, true) // true = dest has botZero
})
}
////////////////////////////////////////////////////////////
// Geom etc
func (w *windowImpl) Screen() *oswin.Screen {
sc := w.getScreen()
return sc
}
func (w *windowImpl) Size() image.Point {
// w.mu.Lock() // this prevents race conditions but also locks up
// defer w.mu.Unlock()
return w.PxSize
}
func (w *windowImpl) WinSize() image.Point {
// w.mu.Lock()
// defer w.mu.Unlock()
return w.WnSize
}
func (w *windowImpl) Position() image.Point {
w.mu.Lock()
defer w.mu.Unlock()
var ps image.Point
ps.X, ps.Y = w.glw.GetPos()
w.Pos = ps
return ps
}
func (w *windowImpl) PhysicalDPI() float32 {
w.mu.Lock()
defer w.mu.Unlock()
return w.PhysDPI
}
func (w *windowImpl) LogicalDPI() float32 {
w.mu.Lock()
defer w.mu.Unlock()
return w.LogDPI
}
func (w *windowImpl) SetLogicalDPI(dpi float32) {
w.mu.Lock()
defer w.mu.Unlock()
w.LogDPI = dpi
}
func (w *windowImpl) SetTitle(title string) {
if w.IsClosed() {
return
}
w.Titl = title
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetTitle(title)
})
}
func (w *windowImpl) SetSize(sz image.Point) {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetSize(sz.X, sz.Y)
})
}
func (w *windowImpl) | SetPixSize | identifier_name |
|
window.go | windowImpl) Scale(dr image.Rectangle, src oswin.Texture, sr image.Rectangle, op draw.Op, opts *oswin.DrawOptions) {
if !w.IsVisible() {
return
}
drawer.Scale(w, dr, src, sr, op, opts)
}
func (w *windowImpl) Fill(dr image.Rectangle, src color.Color, op draw.Op) {
if !w.IsVisible() {
return
}
theApp.RunOnMain(func() {
if !w.Activate() {
return
}
gpu.TheGPU.RenderToWindow()
gpu.Draw.Viewport(image.Rectangle{Max: w.PxSize})
if w.fillQuads == nil {
w.fillQuads = theApp.fillQuadsBuff()
}
sz := w.Size()
theApp.fillRect(sz, dr, src, op, w.fillQuads, true) // true = dest has botZero
})
}
////////////////////////////////////////////////////////////
// Geom etc
func (w *windowImpl) Screen() *oswin.Screen {
sc := w.getScreen()
return sc
}
func (w *windowImpl) Size() image.Point {
// w.mu.Lock() // this prevents race conditions but also locks up
// defer w.mu.Unlock()
return w.PxSize
}
func (w *windowImpl) WinSize() image.Point {
// w.mu.Lock()
// defer w.mu.Unlock()
return w.WnSize
}
func (w *windowImpl) Position() image.Point {
w.mu.Lock()
defer w.mu.Unlock()
var ps image.Point
ps.X, ps.Y = w.glw.GetPos()
w.Pos = ps
return ps
}
func (w *windowImpl) PhysicalDPI() float32 {
w.mu.Lock()
defer w.mu.Unlock()
return w.PhysDPI
}
func (w *windowImpl) LogicalDPI() float32 {
w.mu.Lock()
defer w.mu.Unlock()
return w.LogDPI
}
func (w *windowImpl) SetLogicalDPI(dpi float32) {
w.mu.Lock()
defer w.mu.Unlock()
w.LogDPI = dpi
}
func (w *windowImpl) SetTitle(title string) {
if w.IsClosed() {
return
}
w.Titl = title
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetTitle(title)
})
}
func (w *windowImpl) SetSize(sz image.Point) {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetSize(sz.X, sz.Y)
})
}
func (w *windowImpl) SetPixSize(sz image.Point) {
if w.IsClosed() {
return
}
sc := w.getScreen()
sz.X = int(float32(sz.X) / sc.DevicePixelRatio)
sz.Y = int(float32(sz.Y) / sc.DevicePixelRatio)
w.SetSize(sz)
}
func (w *windowImpl) SetPos(pos image.Point) {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetPos(pos.X, pos.Y)
})
}
func (w *windowImpl) SetGeom(pos image.Point, sz image.Point) {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetSize(sz.X, sz.Y)
w.glw.SetPos(pos.X, pos.Y)
})
}
func (w *windowImpl) show() {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.Show()
})
}
func (w *windowImpl) Raise() {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
if bitflag.HasAtomic(&w.Flag, int(oswin.Minimized)) {
w.glw.Restore()
} else {
w.glw.Focus()
}
})
}
func (w *windowImpl) Minimize() {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.Iconify()
})
}
func (w *windowImpl) SetCloseReqFunc(fun func(win oswin.Window)) {
w.mu.Lock()
defer w.mu.Unlock()
w.closeReqFunc = fun
}
func (w *windowImpl) SetCloseCleanFunc(fun func(win oswin.Window)) {
w.mu.Lock()
defer w.mu.Unlock()
w.closeCleanFunc = fun
}
func (w *windowImpl) CloseReq() {
if theApp.quitting {
w.Close()
}
if w.closeReqFunc != nil {
w.closeReqFunc(w)
} else {
w.Close()
}
}
func (w *windowImpl) CloseClean() {
if w.closeCleanFunc != nil {
w.closeCleanFunc(w)
}
}
func (w *windowImpl) Close() {
// this is actually the final common pathway for closing here
w.mu.Lock()
w.winClose <- struct{}{} // break out of draw loop
w.CloseClean()
// fmt.Printf("sending close event to window: %v\n", w.Nm)
w.sendWindowEvent(window.Close)
theApp.DeleteWin(w)
w.app.RunOnMain(func() {
if w.winTex != nil {
w.winTex.Delete()
w.winTex = nil
}
if w.drawQuads != nil {
w.drawQuads.Delete()
w.drawQuads = nil
}
if w.fillQuads != nil {
w.fillQuads.Delete()
w.fillQuads = nil
}
w.glw.Destroy()
w.glw = nil // marks as closed for all other calls
})
if theApp.quitting {
theApp.quitCloseCnt <- struct{}{}
}
w.mu.Unlock()
}
func (w *windowImpl) SetMousePos(x, y float64) {
if !w.IsVisible() {
return
}
w.mu.Lock()
defer w.mu.Unlock()
if theApp.Platform() == oswin.MacOS {
w.glw.SetCursorPos(x/float64(w.DevPixRatio), y/float64(w.DevPixRatio))
} else {
w.glw.SetCursorPos(x, y)
}
}
func (w *windowImpl) SetCursorEnabled(enabled, raw bool) {
if enabled {
w.mouseDisabled = false
w.glw.SetInputMode(glfw.CursorMode, glfw.CursorNormal)
} else {
w.mouseDisabled = true
w.glw.SetInputMode(glfw.CursorMode, glfw.CursorDisabled)
if raw && glfw.RawMouseMotionSupported() {
w.glw.SetInputMode(glfw.RawMouseMotion, glfw.True)
}
}
}
/////////////////////////////////////////////////////////
// Window Callbacks
func (w *windowImpl) getScreen() *oswin.Screen {
if w == nil || w.glw == nil {
return theApp.screens[0]
}
w.mu.Lock()
var sc *oswin.Screen
mon := w.glw.GetMonitor() // this returns nil for windowed windows -- i.e., most windows
// that is super useless it seems.
if mon != nil {
if monitorDebug {
log.Printf("glos window: %v getScreen() -- got screen: %v\n", w.Nm, mon.GetName())
}
sc = theApp.ScreenByName(mon.GetName())
if sc == nil {
log.Printf("glos getScreen: could not find screen of name: %v\n", mon.GetName())
sc = theApp.screens[0]
}
} else {
sc = theApp.ScreenByName(w.scrnName)
got := false
if sc == nil || w.DevPixRatio != sc.DevicePixelRatio {
for _, scc := range theApp.screens {
if w.DevPixRatio == scc.DevicePixelRatio | {
sc = scc
got = true
if monitorDebug {
log.Printf("glos window: %v getScreen(): matched pix ratio %v for screen: %v\n", w.Nm, w.DevPixRatio, sc.Name)
}
w.LogDPI = sc.LogicalDPI
break
} | conditional_block |
|
window.go | Tex texture to the window and then
// calls Publish() -- this is the typical update call.
func (w *windowImpl) PublishTex() {
if !w.IsVisible() {
return
}
theApp.RunOnMain(func() {
if !w.Activate() || w.winTex == nil {
return
}
w.Copy(image.ZP, w.winTex, w.winTex.Bounds(), oswin.Src, nil)
})
w.Publish()
}
// SendEmptyEvent sends an empty, blank event to this window, which just has
// the effect of pushing the system along during cases when the window
// event loop needs to be "pinged" to get things moving along..
func (w *windowImpl) SendEmptyEvent() {
if w.IsClosed() {
return
}
oswin.SendCustomEvent(w, nil)
glfw.PostEmptyEvent() // for good measure
}
// WinTex() returns the current Texture of the same size as the window that
// is typically used to update the window contents.
// Use the various Drawer and SetSubImage methods to update this Texture, and
// then call PublishTex() to update the window.
// This Texture is automatically resized when the window is resized, and
// when that occurs, existing contents are lost -- a full update of the
// Texture at the current size is required at that point.
func (w *windowImpl) WinTex() oswin.Texture {
return w.winTex
}
// SetWinTexSubImage calls SetSubImage on WinTex with given parameters.
// convenience routine that activates the window context and runs on the
// window's thread.
func (w *windowImpl) SetWinTexSubImage(dp image.Point, src image.Image, sr image.Rectangle) error {
if !w.IsVisible() {
return nil
}
var err error
theApp.RunOnMain(func() {
if !w.Activate() || w.winTex == nil {
return
}
err = w.winTex.SetSubImage(dp, src, sr)
})
return err
}
////////////////////////////////////////////////
// Drawer wrappers
func (w *windowImpl) Draw(src2dst mat32.Mat3, src oswin.Texture, sr image.Rectangle, op draw.Op, opts *oswin.DrawOptions) {
if !w.IsVisible() {
return
}
theApp.RunOnMain(func() {
if !w.Activate() {
return
}
gpu.TheGPU.RenderToWindow()
gpu.Draw.Viewport(image.Rectangle{Max: w.PxSize})
if w.drawQuads == nil {
w.drawQuads = theApp.drawQuadsBuff()
}
sz := w.Size()
theApp.draw(sz, src2dst, src, sr, op, opts, w.drawQuads, true) // true = dest has botZero
})
}
func (w *windowImpl) DrawUniform(src2dst mat32.Mat3, src color.Color, sr image.Rectangle, op draw.Op, opts *oswin.DrawOptions) {
if !w.IsVisible() {
return
}
theApp.RunOnMain(func() {
if !w.Activate() {
return
}
gpu.TheGPU.RenderToWindow()
gpu.Draw.Viewport(image.Rectangle{Max: w.PxSize})
if w.fillQuads == nil {
w.fillQuads = theApp.fillQuadsBuff()
}
sz := w.Size()
theApp.drawUniform(sz, src2dst, src, sr, op, opts, w.fillQuads, true) // true = dest has botZero
})
}
func (w *windowImpl) Copy(dp image.Point, src oswin.Texture, sr image.Rectangle, op draw.Op, opts *oswin.DrawOptions) {
if !w.IsVisible() {
return
}
drawer.Copy(w, dp, src, sr, op, opts)
}
func (w *windowImpl) Scale(dr image.Rectangle, src oswin.Texture, sr image.Rectangle, op draw.Op, opts *oswin.DrawOptions) {
if !w.IsVisible() {
return
}
drawer.Scale(w, dr, src, sr, op, opts)
}
func (w *windowImpl) Fill(dr image.Rectangle, src color.Color, op draw.Op) {
if !w.IsVisible() {
return
}
theApp.RunOnMain(func() {
if !w.Activate() {
return
}
gpu.TheGPU.RenderToWindow()
gpu.Draw.Viewport(image.Rectangle{Max: w.PxSize})
if w.fillQuads == nil {
w.fillQuads = theApp.fillQuadsBuff()
}
sz := w.Size()
theApp.fillRect(sz, dr, src, op, w.fillQuads, true) // true = dest has botZero
})
}
////////////////////////////////////////////////////////////
// Geom etc
func (w *windowImpl) Screen() *oswin.Screen {
sc := w.getScreen()
return sc
}
func (w *windowImpl) Size() image.Point {
// w.mu.Lock() // this prevents race conditions but also locks up
// defer w.mu.Unlock()
return w.PxSize
}
func (w *windowImpl) WinSize() image.Point {
// w.mu.Lock()
// defer w.mu.Unlock()
return w.WnSize
}
func (w *windowImpl) Position() image.Point {
w.mu.Lock()
defer w.mu.Unlock()
var ps image.Point
ps.X, ps.Y = w.glw.GetPos()
w.Pos = ps
return ps
}
func (w *windowImpl) PhysicalDPI() float32 {
w.mu.Lock()
defer w.mu.Unlock()
return w.PhysDPI
}
func (w *windowImpl) LogicalDPI() float32 {
w.mu.Lock()
defer w.mu.Unlock()
return w.LogDPI
}
func (w *windowImpl) SetLogicalDPI(dpi float32) {
w.mu.Lock()
defer w.mu.Unlock()
w.LogDPI = dpi
}
func (w *windowImpl) SetTitle(title string) {
if w.IsClosed() {
return
}
w.Titl = title
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetTitle(title)
})
}
func (w *windowImpl) SetSize(sz image.Point) {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetSize(sz.X, sz.Y)
})
}
func (w *windowImpl) SetPixSize(sz image.Point) {
if w.IsClosed() {
return
}
sc := w.getScreen()
sz.X = int(float32(sz.X) / sc.DevicePixelRatio)
sz.Y = int(float32(sz.Y) / sc.DevicePixelRatio)
w.SetSize(sz)
}
func (w *windowImpl) SetPos(pos image.Point) {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetPos(pos.X, pos.Y)
})
}
func (w *windowImpl) SetGeom(pos image.Point, sz image.Point) {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.SetSize(sz.X, sz.Y)
w.glw.SetPos(pos.X, pos.Y)
})
}
func (w *windowImpl) show() {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.Show()
})
}
func (w *windowImpl) Raise() {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
if bitflag.HasAtomic(&w.Flag, int(oswin.Minimized)) {
w.glw.Restore()
} else {
w.glw.Focus()
}
})
}
func (w *windowImpl) Minimize() {
if w.IsClosed() {
return
}
// note: anything run on main only doesn't need lock -- implicit lock
w.app.RunOnMain(func() {
if w.glw == nil { // by time we got to main, could be diff
return
}
w.glw.Iconify()
})
}
func (w *windowImpl) SetCloseReqFunc(fun func(win oswin.Window)) | {
w.mu.Lock()
defer w.mu.Unlock()
w.closeReqFunc = fun
} | identifier_body |
|
translate.rs |
pub struct CharSetCase {
pub start: char,
pub end: char
}
pub struct TaggedExpr {
pub name: Option<String>,
pub expr: Box<Expr>,
}
pub enum Expr {
AnyCharExpr,
LiteralExpr(String),
CharSetExpr(bool, Vec<CharSetCase>),
RuleExpr(String),
SequenceExpr(Vec<Expr>),
ChoiceExpr(Vec<Expr>),
OptionalExpr(Box<Expr>),
Repeat(Box<Expr>, /*min*/ uint, /*max*/ Option<uint>, /*sep*/ Option<Box<Expr>>),
PosAssertExpr(Box<Expr>),
NegAssertExpr(Box<Expr>),
ActionExpr(Vec<TaggedExpr>, String),
}
pub fn compile_grammar(ctxt: &rustast::ExtCtxt, grammar: &Grammar) -> rustast::P<rustast::Mod> {
let mut imports = grammar.imports.clone();
imports.push(RustUseGlob("self::ParseResult".to_string()));
let view_items = translate_view_items(ctxt, imports.as_slice());
let items = header_items(ctxt).into_iter()
.chain(grammar.rules.iter().map(|rule|{
compile_rule(ctxt, rule)
}))
.chain(grammar.rules.iter().filter(|rule| rule.exported).map(|rule| {
compile_rule_export(ctxt, rule)
}))
.collect::<Vec<_>>();
rustast::module(view_items, items)
}
pub fn translate_view_items(ctxt: &rustast::ExtCtxt, imports: &[RustUse]) -> Vec<rustast::ViewItem> {
imports.iter().map(| i |{
match *i {
RustUseSimple(ref p) => ctxt.view_use_simple(DUMMY_SP, rustast::ast::Inherited, rustast::parse_path(p.as_slice())),
RustUseGlob(ref p) => ctxt.view_use_glob(DUMMY_SP, rustast::ast::Inherited, rustast::parse_path_vec(p.as_slice())),
RustUseList(ref p, ref v) => ctxt.view_use_list(DUMMY_SP, rustast::ast::Inherited, rustast::parse_path_vec(p.as_slice()),
v.iter().map(|s| rustast::str_to_ident(s.as_slice())).collect::<Vec<_>>().as_slice()
),
}
}).collect()
}
pub fn header_items(ctxt: &rustast::ExtCtxt) -> Vec<rustast::P<rustast::Item>> {
let mut items = Vec::new();
items.push(quote_item!(ctxt,
enum ParseResult<T> {
Matched(uint, T),
Failed,
}
).unwrap());
items.push(quote_item!(ctxt,
struct ParseState {
max_err_pos: uint,
expected: ::std::collections::HashSet<&'static str>,
}
).unwrap());
items.push(quote_item!(ctxt,
impl ParseState {
fn new() -> ParseState {
ParseState{ max_err_pos: 0, expected: ::std::collections::HashSet::new() }
}
fn mark_failure(&mut self, pos: uint, expected: &'static str) -> ParseResult<()> {
if pos > self.max_err_pos {
self.max_err_pos = pos;
self.expected.clear();
}
if pos == self.max_err_pos {
self.expected.insert(expected);
}
Failed
}
}
).unwrap());
items.push(quote_item!(ctxt,
fn slice_eq(input: &str, state: &mut ParseState, pos: uint, m: &'static str) -> ParseResult<()> {
#![inline]
#![allow(dead_code)]
let l = m.len();
if input.len() >= pos + l && input.as_bytes().slice(pos, pos+l) == m.as_bytes() {
Matched(pos+l, ())
} else {
state.mark_failure(pos, m)
}
}
).unwrap());
items.push(quote_item!(ctxt,
fn any_char(input: &str, state: &mut ParseState, pos: uint) -> ParseResult<()> {
#![inline]
#![allow(dead_code)]
if input.len() > pos {
Matched(input.char_range_at(pos).next, ())
} else {
state.mark_failure(pos, "<character>")
}
}
).unwrap());
items.push(quote_item!(ctxt,
fn pos_to_line(input: &str, pos: uint) -> (uint, uint) {
let mut remaining = pos;
let mut lineno: uint = 1;
for line in input.lines() {
let line_length = line.len() + 1;
if remaining < line_length {
return (lineno, remaining + 1);
}
remaining -= line_length;
lineno += 1;
}
return (lineno, remaining + 1);
}
).unwrap());
items
}
fn compile_rule(ctxt: &rustast::ExtCtxt, rule: &Rule) -> rustast::P<rustast::Item> {
let name = rustast::str_to_ident(format!("parse_{}", rule.name).as_slice());
let ret = rustast::parse_type(rule.ret_type.as_slice());
let body = compile_expr(ctxt, &*rule.expr, rule.ret_type.as_slice() != "()");
(quote_item!(ctxt,
fn $name<'input>(input: &'input str, state: &mut ParseState, pos: uint) -> ParseResult<$ret> {
$body
}
)).unwrap()
}
fn compile_rule_export(ctxt: &rustast::ExtCtxt, rule: &Rule) -> rustast::P<rustast::Item> {
let name = rustast::str_to_ident(rule.name.as_slice());
let ret = rustast::parse_type(rule.ret_type.as_slice());
let parse_fn = rustast::str_to_ident(format!("parse_{}", rule.name).as_slice());
(quote_item!(ctxt,
pub fn $name<'input>(input: &'input str) -> Result<$ret, String> {
let mut state = ParseState::new();
match $parse_fn(input, &mut state, 0) {
Matched(pos, value) => {
if pos == input.len() {
return Ok(value)
}
}
_ => {}
}
let expected = state.expected.to_string().escape_default();
Err(format!("Error at {}: Expected {}", pos_to_line(input, state.max_err_pos), expected))
}
)).unwrap()
}
fn compile_match_and_then(ctxt: &rustast::ExtCtxt, e: &Expr, value_name: Option<&str>, then: rustast::P<rustast::Expr>) -> rustast::P<rustast::Expr> {
let seq_res = compile_expr(ctxt, e, value_name.is_some());
let name_pat = match value_name {
Some(name) => rustast::str_to_ident(name),
None => rustast::str_to_ident("_")
};
quote_expr!(ctxt, {
let seq_res = $seq_res;
match seq_res {
Matched(pos, $name_pat) => { $then }
Failed => Failed,
}
})
}
fn cond_swap<T>(swap: bool, tup: (T, T)) -> (T, T) {
let (a, b) = tup;
if swap {
(b, a)
} else {
(a, b)
}
}
fn format_char_set(cases: &[CharSetCase]) -> String {
let mut r = "[".into_string();
for &CharSetCase{start, end} in cases.iter() {
r.push(start);
if start != end {
r.push('-');
r.push(end);
}
}
r.push(']');
r
}
#[allow(unused_imports)] // quote_tokens! imports things
fn compile_expr(ctxt: &rustast::ExtCtxt, e: &Expr, result_used: bool) -> rustast::P<rustast::Expr> {
match *e {
AnyCharExpr => {
quote_expr!(ctxt, any_char(input, state, pos))
}
LiteralExpr(ref s) => {
let sl = s.as_slice();
quote_expr!(ctxt, slice_eq(input, state, pos, $sl))
}
CharSetExpr(invert, ref cases) => {
let expected_set = format_char_set(cases.as_slice());
let expected_str = expected_set.as_slice();
let (in_set, not_in_set) = cond_swap(invert, (
quote_expr!(ctxt, Matched(next, ())),
quote_expr!(ctxt, state.mark_failure(pos, $expected_str)),
));
let m = ctxt.expr_match(DUMMY_SP, quote_expr!(ctxt, ch), vec!(
ctxt.arm(DUMMY_SP, cases.iter().map(|case| {
if case.start == case.end {
ctxt.pat_lit(DUMMY_SP, ctxt.expr_lit(DUMMY_SP, rustast::ast::LitChar(case.start)))
} else {
ctxt.pat(DUMMY_SP, rustast::ast::PatRange(
ctxt.expr_lit(DUMMY_SP, rustast::ast::LitChar(case.start)),
ctxt.expr_lit(DUMMY_SP, rustast::ast::LitChar(case.end))
))
}
}).collect::<Vec<_>>(), in_set),
| pub exported: bool,
} | random_line_split |
|
translate.rs | {
pub name: String,
pub expr: Box<Expr>,
pub ret_type: String,
pub exported: bool,
}
pub struct CharSetCase {
pub start: char,
pub end: char
}
pub struct TaggedExpr {
pub name: Option<String>,
pub expr: Box<Expr>,
}
pub enum Expr {
AnyCharExpr,
LiteralExpr(String),
CharSetExpr(bool, Vec<CharSetCase>),
RuleExpr(String),
SequenceExpr(Vec<Expr>),
ChoiceExpr(Vec<Expr>),
OptionalExpr(Box<Expr>),
Repeat(Box<Expr>, /*min*/ uint, /*max*/ Option<uint>, /*sep*/ Option<Box<Expr>>),
PosAssertExpr(Box<Expr>),
NegAssertExpr(Box<Expr>),
ActionExpr(Vec<TaggedExpr>, String),
}
pub fn compile_grammar(ctxt: &rustast::ExtCtxt, grammar: &Grammar) -> rustast::P<rustast::Mod> {
let mut imports = grammar.imports.clone();
imports.push(RustUseGlob("self::ParseResult".to_string()));
let view_items = translate_view_items(ctxt, imports.as_slice());
let items = header_items(ctxt).into_iter()
.chain(grammar.rules.iter().map(|rule|{
compile_rule(ctxt, rule)
}))
.chain(grammar.rules.iter().filter(|rule| rule.exported).map(|rule| {
compile_rule_export(ctxt, rule)
}))
.collect::<Vec<_>>();
rustast::module(view_items, items)
}
pub fn translate_view_items(ctxt: &rustast::ExtCtxt, imports: &[RustUse]) -> Vec<rustast::ViewItem> {
imports.iter().map(| i |{
match *i {
RustUseSimple(ref p) => ctxt.view_use_simple(DUMMY_SP, rustast::ast::Inherited, rustast::parse_path(p.as_slice())),
RustUseGlob(ref p) => ctxt.view_use_glob(DUMMY_SP, rustast::ast::Inherited, rustast::parse_path_vec(p.as_slice())),
RustUseList(ref p, ref v) => ctxt.view_use_list(DUMMY_SP, rustast::ast::Inherited, rustast::parse_path_vec(p.as_slice()),
v.iter().map(|s| rustast::str_to_ident(s.as_slice())).collect::<Vec<_>>().as_slice()
),
}
}).collect()
}
pub fn header_items(ctxt: &rustast::ExtCtxt) -> Vec<rustast::P<rustast::Item>> {
let mut items = Vec::new();
items.push(quote_item!(ctxt,
enum ParseResult<T> {
Matched(uint, T),
Failed,
}
).unwrap());
items.push(quote_item!(ctxt,
struct ParseState {
max_err_pos: uint,
expected: ::std::collections::HashSet<&'static str>,
}
).unwrap());
items.push(quote_item!(ctxt,
impl ParseState {
fn new() -> ParseState {
ParseState{ max_err_pos: 0, expected: ::std::collections::HashSet::new() }
}
fn mark_failure(&mut self, pos: uint, expected: &'static str) -> ParseResult<()> {
if pos > self.max_err_pos {
self.max_err_pos = pos;
self.expected.clear();
}
if pos == self.max_err_pos {
self.expected.insert(expected);
}
Failed
}
}
).unwrap());
items.push(quote_item!(ctxt,
fn slice_eq(input: &str, state: &mut ParseState, pos: uint, m: &'static str) -> ParseResult<()> {
#![inline]
#![allow(dead_code)]
let l = m.len();
if input.len() >= pos + l && input.as_bytes().slice(pos, pos+l) == m.as_bytes() {
Matched(pos+l, ())
} else {
state.mark_failure(pos, m)
}
}
).unwrap());
items.push(quote_item!(ctxt,
fn any_char(input: &str, state: &mut ParseState, pos: uint) -> ParseResult<()> {
#![inline]
#![allow(dead_code)]
if input.len() > pos {
Matched(input.char_range_at(pos).next, ())
} else {
state.mark_failure(pos, "<character>")
}
}
).unwrap());
items.push(quote_item!(ctxt,
fn pos_to_line(input: &str, pos: uint) -> (uint, uint) {
let mut remaining = pos;
let mut lineno: uint = 1;
for line in input.lines() {
let line_length = line.len() + 1;
if remaining < line_length {
return (lineno, remaining + 1);
}
remaining -= line_length;
lineno += 1;
}
return (lineno, remaining + 1);
}
).unwrap());
items
}
fn compile_rule(ctxt: &rustast::ExtCtxt, rule: &Rule) -> rustast::P<rustast::Item> {
let name = rustast::str_to_ident(format!("parse_{}", rule.name).as_slice());
let ret = rustast::parse_type(rule.ret_type.as_slice());
let body = compile_expr(ctxt, &*rule.expr, rule.ret_type.as_slice() != "()");
(quote_item!(ctxt,
fn $name<'input>(input: &'input str, state: &mut ParseState, pos: uint) -> ParseResult<$ret> {
$body
}
)).unwrap()
}
fn compile_rule_export(ctxt: &rustast::ExtCtxt, rule: &Rule) -> rustast::P<rustast::Item> {
let name = rustast::str_to_ident(rule.name.as_slice());
let ret = rustast::parse_type(rule.ret_type.as_slice());
let parse_fn = rustast::str_to_ident(format!("parse_{}", rule.name).as_slice());
(quote_item!(ctxt,
pub fn $name<'input>(input: &'input str) -> Result<$ret, String> {
let mut state = ParseState::new();
match $parse_fn(input, &mut state, 0) {
Matched(pos, value) => {
if pos == input.len() {
return Ok(value)
}
}
_ => {}
}
let expected = state.expected.to_string().escape_default();
Err(format!("Error at {}: Expected {}", pos_to_line(input, state.max_err_pos), expected))
}
)).unwrap()
}
fn compile_match_and_then(ctxt: &rustast::ExtCtxt, e: &Expr, value_name: Option<&str>, then: rustast::P<rustast::Expr>) -> rustast::P<rustast::Expr> {
let seq_res = compile_expr(ctxt, e, value_name.is_some());
let name_pat = match value_name {
Some(name) => rustast::str_to_ident(name),
None => rustast::str_to_ident("_")
};
quote_expr!(ctxt, {
let seq_res = $seq_res;
match seq_res {
Matched(pos, $name_pat) => { $then }
Failed => Failed,
}
})
}
fn cond_swap<T>(swap: bool, tup: (T, T)) -> (T, T) {
let (a, b) = tup;
if swap {
(b, a)
} else {
(a, b)
}
}
fn format_char_set(cases: &[CharSetCase]) -> String {
let mut r = "[".into_string();
for &CharSetCase{start, end} in cases.iter() {
r.push(start);
if start != end {
r.push('-');
r.push(end);
}
}
r.push(']');
r
}
#[allow(unused_imports)] // quote_tokens! imports things
fn compile_expr(ctxt: &rustast::ExtCtxt, e: &Expr, result_used: bool) -> rustast::P<rustast::Expr> {
match *e {
AnyCharExpr => {
quote_expr!(ctxt, any_char(input, state, pos))
}
LiteralExpr(ref s) => {
let sl = s.as_slice();
quote_expr!(ctxt, slice_eq(input, state, pos, $sl))
}
CharSetExpr(invert, ref cases) => {
let expected_set = format_char_set(cases.as_slice());
let expected_str = expected_set.as_slice();
let (in_set, not_in_set) = cond_swap(invert, (
quote_expr!(ctxt, Matched(next, ())),
quote_expr!(ctxt, state.mark_failure(pos, $expected_str)),
));
let m = ctxt.expr_match(DUMMY_SP, quote_expr!(ctxt, ch), vec!(
ctxt.arm(DUMMY_SP, cases.iter().map(|case| {
if case.start == case.end {
ctxt.pat_lit(DUMMY_SP, ctxt.expr_lit(DUMMY_SP, rustast::ast::LitChar(case.start)))
} else {
ctxt.pat(DUMMY_SP, rustast::ast::PatRange(
ctxt.expr_lit(DUMMY_SP, rustast::ast::LitChar(case.start)),
ctxt.expr_lit(DUMMY_SP, rustast::ast::LitChar(case.end))
| Rule | identifier_name |
|
translate.rs | match *i {
RustUseSimple(ref p) => ctxt.view_use_simple(DUMMY_SP, rustast::ast::Inherited, rustast::parse_path(p.as_slice())),
RustUseGlob(ref p) => ctxt.view_use_glob(DUMMY_SP, rustast::ast::Inherited, rustast::parse_path_vec(p.as_slice())),
RustUseList(ref p, ref v) => ctxt.view_use_list(DUMMY_SP, rustast::ast::Inherited, rustast::parse_path_vec(p.as_slice()),
v.iter().map(|s| rustast::str_to_ident(s.as_slice())).collect::<Vec<_>>().as_slice()
),
}
}).collect()
}
pub fn header_items(ctxt: &rustast::ExtCtxt) -> Vec<rustast::P<rustast::Item>> {
let mut items = Vec::new();
items.push(quote_item!(ctxt,
enum ParseResult<T> {
Matched(uint, T),
Failed,
}
).unwrap());
items.push(quote_item!(ctxt,
struct ParseState {
max_err_pos: uint,
expected: ::std::collections::HashSet<&'static str>,
}
).unwrap());
items.push(quote_item!(ctxt,
impl ParseState {
fn new() -> ParseState {
ParseState{ max_err_pos: 0, expected: ::std::collections::HashSet::new() }
}
fn mark_failure(&mut self, pos: uint, expected: &'static str) -> ParseResult<()> {
if pos > self.max_err_pos {
self.max_err_pos = pos;
self.expected.clear();
}
if pos == self.max_err_pos {
self.expected.insert(expected);
}
Failed
}
}
).unwrap());
items.push(quote_item!(ctxt,
fn slice_eq(input: &str, state: &mut ParseState, pos: uint, m: &'static str) -> ParseResult<()> {
#![inline]
#![allow(dead_code)]
let l = m.len();
if input.len() >= pos + l && input.as_bytes().slice(pos, pos+l) == m.as_bytes() {
Matched(pos+l, ())
} else {
state.mark_failure(pos, m)
}
}
).unwrap());
items.push(quote_item!(ctxt,
fn any_char(input: &str, state: &mut ParseState, pos: uint) -> ParseResult<()> {
#![inline]
#![allow(dead_code)]
if input.len() > pos {
Matched(input.char_range_at(pos).next, ())
} else {
state.mark_failure(pos, "<character>")
}
}
).unwrap());
items.push(quote_item!(ctxt,
fn pos_to_line(input: &str, pos: uint) -> (uint, uint) {
let mut remaining = pos;
let mut lineno: uint = 1;
for line in input.lines() {
let line_length = line.len() + 1;
if remaining < line_length {
return (lineno, remaining + 1);
}
remaining -= line_length;
lineno += 1;
}
return (lineno, remaining + 1);
}
).unwrap());
items
}
fn compile_rule(ctxt: &rustast::ExtCtxt, rule: &Rule) -> rustast::P<rustast::Item> |
fn compile_rule_export(ctxt: &rustast::ExtCtxt, rule: &Rule) -> rustast::P<rustast::Item> {
let name = rustast::str_to_ident(rule.name.as_slice());
let ret = rustast::parse_type(rule.ret_type.as_slice());
let parse_fn = rustast::str_to_ident(format!("parse_{}", rule.name).as_slice());
(quote_item!(ctxt,
pub fn $name<'input>(input: &'input str) -> Result<$ret, String> {
let mut state = ParseState::new();
match $parse_fn(input, &mut state, 0) {
Matched(pos, value) => {
if pos == input.len() {
return Ok(value)
}
}
_ => {}
}
let expected = state.expected.to_string().escape_default();
Err(format!("Error at {}: Expected {}", pos_to_line(input, state.max_err_pos), expected))
}
)).unwrap()
}
fn compile_match_and_then(ctxt: &rustast::ExtCtxt, e: &Expr, value_name: Option<&str>, then: rustast::P<rustast::Expr>) -> rustast::P<rustast::Expr> {
let seq_res = compile_expr(ctxt, e, value_name.is_some());
let name_pat = match value_name {
Some(name) => rustast::str_to_ident(name),
None => rustast::str_to_ident("_")
};
quote_expr!(ctxt, {
let seq_res = $seq_res;
match seq_res {
Matched(pos, $name_pat) => { $then }
Failed => Failed,
}
})
}
fn cond_swap<T>(swap: bool, tup: (T, T)) -> (T, T) {
let (a, b) = tup;
if swap {
(b, a)
} else {
(a, b)
}
}
fn format_char_set(cases: &[CharSetCase]) -> String {
let mut r = "[".into_string();
for &CharSetCase{start, end} in cases.iter() {
r.push(start);
if start != end {
r.push('-');
r.push(end);
}
}
r.push(']');
r
}
#[allow(unused_imports)] // quote_tokens! imports things
fn compile_expr(ctxt: &rustast::ExtCtxt, e: &Expr, result_used: bool) -> rustast::P<rustast::Expr> {
match *e {
AnyCharExpr => {
quote_expr!(ctxt, any_char(input, state, pos))
}
LiteralExpr(ref s) => {
let sl = s.as_slice();
quote_expr!(ctxt, slice_eq(input, state, pos, $sl))
}
CharSetExpr(invert, ref cases) => {
let expected_set = format_char_set(cases.as_slice());
let expected_str = expected_set.as_slice();
let (in_set, not_in_set) = cond_swap(invert, (
quote_expr!(ctxt, Matched(next, ())),
quote_expr!(ctxt, state.mark_failure(pos, $expected_str)),
));
let m = ctxt.expr_match(DUMMY_SP, quote_expr!(ctxt, ch), vec!(
ctxt.arm(DUMMY_SP, cases.iter().map(|case| {
if case.start == case.end {
ctxt.pat_lit(DUMMY_SP, ctxt.expr_lit(DUMMY_SP, rustast::ast::LitChar(case.start)))
} else {
ctxt.pat(DUMMY_SP, rustast::ast::PatRange(
ctxt.expr_lit(DUMMY_SP, rustast::ast::LitChar(case.start)),
ctxt.expr_lit(DUMMY_SP, rustast::ast::LitChar(case.end))
))
}
}).collect::<Vec<_>>(), in_set),
ctxt.arm(DUMMY_SP, vec!(ctxt.pat_wild(DUMMY_SP)), not_in_set)
));
quote_expr!(ctxt, if input.len() > pos {
let ::std::str::CharRange {ch, next} = input.char_range_at(pos);
$m
} else {
state.mark_failure(pos, $expected_str)
})
}
RuleExpr(ref rule_name) => {
let func = rustast::str_to_ident(format!("parse_{}", *rule_name).as_slice());
quote_expr!(ctxt, $func(input, state, pos))
}
SequenceExpr(ref exprs) => {
fn write_seq(ctxt: &rustast::ExtCtxt, exprs: &[Expr]) -> rustast::P<rustast::Expr> {
if exprs.len() == 1 {
compile_expr(ctxt, &exprs[0], false)
} else {
compile_match_and_then(ctxt, &exprs[0], None, write_seq(ctxt, exprs.tail()))
}
}
if exprs.len() > 0 {
write_seq(ctxt, exprs.as_slice())
} else {
quote_expr!(ctxt, Matched(pos, ()))
}
}
ChoiceExpr(ref exprs) => {
fn write_choice(ctxt: &rustast::ExtCtxt, exprs: &[Expr], result_used: bool) -> rustast::P<rustast::Expr> {
if exprs.len() == 1 {
compile_expr(ctxt, &exprs | {
let name = rustast::str_to_ident(format!("parse_{}", rule.name).as_slice());
let ret = rustast::parse_type(rule.ret_type.as_slice());
let body = compile_expr(ctxt, &*rule.expr, rule.ret_type.as_slice() != "()");
(quote_item!(ctxt,
fn $name<'input>(input: &'input str, state: &mut ParseState, pos: uint) -> ParseResult<$ret> {
$body
}
)).unwrap()
} | identifier_body |
views.py | order_item, order, payment_details
from cart.models import Cart
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.forms import PasswordChangeForm
from django.contrib.auth.models import User, auth
from .forms import productsform, profilesform
from django.contrib import messages
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.core.mail import send_mail
from django.utils.datastructures import MultiValueDictKeyError
from .models import Feedback
def deleteacc(request):
if request.method == 'POST':
username = request.user.username
Cart.objects.filter(buyer_id=username).delete()
products.objects.filter(seller_id=username).delete()
User.objects.filter(username = username).delete()
profiles.objects.filter(user_name=username).delete()
messages.info(request, "The user is deleted")
return redirect('/')
else:
return redirect('/')
def error_404_view(request, exception):
return render(request,'404.html')
def faqs(request):
list = faq.objects.all()
context={
'list': list
}
return render(request, "faqs.html", context)
def category(request,cat):
product=products.objects.filter(category=cat)
#print(product)
return render(request,"category.html",{'products': product,'cat':cat})
def change_password(request):
if request.method == 'POST':
form = PasswordChangeForm(request.user, request.POST)
if form.is_valid():
user = form.save()
update_session_auth_hash(request, user) # Important!
messages.success(request, 'Your password was successfully updated!')
return redirect('/')
else:
messages.error(request, 'Please correct the error below.')
else:
form = PasswordChangeForm(request.user)
return render(request, 'change_password.html', {'form': form})
def modify(request, slug):
if request.method == 'POST':
slug = request.POST['slug']
product=products.objects.get(slug=slug)
product.name = request.POST['name']
product.brand = request.POST['brand']
product.price = request.POST['price']
product.description = request.POST['description']
product.stock = request.POST['stock']
product.detail = request.POST['detail']
product.available_offer = request.POST['available_offer']
product.save()
return redirect('/')
user=request.user
product=products.objects.get(slug=slug)
if user.username == product.seller_id:
return render(request, 'modify.html', {'product': product})
else:
return redirect('/')
def editprofile(request):
user = request.user
profile = profiles.objects.get(user_name=user.username)
if request.method == 'POST':
user.first_name = request.POST['firstname']
user.last_name = request.POST['lastname']
user.save()
profile.phone_number = request.POST['phone_number']
profile.region = request.POST['region']
profile.city = request.POST['city']
profile.area = request.POST['area']
profile.locale = request.POST['locale']
profile.gmap = request.POST['gmap']
profile.pan = request.POST['pan']
profile.save()
return redirect('/')
else:
return render(request, 'editprofile.html', {'pro': profile })
def profile(request):
user = request.user
product = products.objects.filter(seller_id=user.username)
profile = profiles.objects.filter(user_name=user.username)
len = 0
for p in product:
len += 1
context={
'products': product ,
'profile':profile,
'len': len
}
return render(request, 'profile.html', context)
def search(request):
q = request.GET['q']
productset = (products.objects.filter(name__icontains=q) | products.objects.filter(description__icontains=q) | products.objects.filter(brand__icontains=q)).order_by('id').reverse()
if not productset.exists():
q += "--No Such Product Found"
return render(request, 'search.html', {"products": productset, "q": q}) | # Create your views here.
def index(request):
user = request.user
username = user.username
category=["Electronic Devices","Electronic Accessories","TV & Home Appliances","Health & Beauty","Babies & Toys","Groceries & Pets",
"Home & Lifestyle","Women's Fashion","Men's Fashion","Watches & Accessories","Sports & Outdoor","Automotive & Motorbike"]
showupload = False
if request.method == 'POST':
emailll = request.POST['email']
message = request.POST['message']
print(emailll)
print(message)
if emailll == "" or message == "":
messages.info(request, "Cannot take empty feedback. Please fillup.")
return redirect('/')
send_mail(# this requires 4 arguments compulsoraly:
user,# subject like the
message,# message,
emailll,# from email
['[email protected]'],# to email
)
m = Feedback(email = emailll , text = message)
m.save()
messages.info(request, "Thanks for your feedback. We just received your email.")
return redirect('/')
if user.is_authenticated:
try:
pro = profiles.objects.get(user_name=user.username)
except:
messages.info(request, "Invalid Profile! Please Use Another ID to login properly.")
return render(request, 'index.html')
showupload = pro.is_seller
senditem = products.objects.all().order_by('id').reverse()
paginator = Paginator(senditem,12)
page = request.GET.get('page')
paged_products = paginator.get_page(page)
product_count = senditem.count()
c = Cart.objects.filter(buyer_id = username).count()
return render(request, 'index.html', {"products":paged_products,"showupload":showupload,'product_count': product_count,"category":category,'noofitem':c})
else:
senditem = products.objects.all().order_by('id').reverse()
paginator = Paginator(senditem,12)
page = request.GET.get('page')
paged_products = paginator.get_page(page)
product_count = senditem.count()
return render(request, 'index.html', {"products": paged_products,'product_count': product_count,"category":category})
def register(request):
if request.method == 'POST':
form = profilesform(request.POST)
if form.is_valid():
is_seller = form.cleaned_data['is_seller']
else:
messages.info(request,'Invalid Data Sent!')
firstname = request.POST['firstname']
lastname = request.POST['lastname']
username = request.POST['username']
email = request.POST['email']
password1 = request.POST['password1']
password2 = request.POST['password2']
phone_number = request.POST['phone_number']
region = request.POST['region']
city = request.POST['city']
area = request.POST['area']
locale = request.POST['locale']
gmap = request.POST['gmap']
pan = 0
if password1 != password2:
messages.info(request,'Password Mismatch!')
return redirect('register')
elif User.objects.filter(username=username).exists():
messages.info(request,'Username Already In Use!')
return redirect('register')
elif User.objects.filter(email=email).exists():
messages.info(request,'Email Already In Use!')
return redirect('register')
else:
user = User.objects.create_user(username=username, first_name=firstname, last_name=lastname, email=email, password=password1)
user.save()
profile = profiles.objects.create(user_name=username,pan=pan, phone_number=phone_number, region=region, city=city, area=area, locale=locale, is_seller=is_seller, gmap=gmap)
profile.save()
return redirect('/')
else:
form = profilesform()
return render(request, 'register.html', {"form": form})
def upload(request):
showupload = False
form = productsform()
user = request.user
owneruser = user.username
if request.method == 'POST':
form = productsform(request.POST, request.FILES)
if form.is_valid():
#form.seller_id=user.username
name = form.cleaned_data['name']
cat = form.cleaned_data['category']
brand = form.cleaned_data['brand']
price = form.cleaned_data['price']
seller_id = owneruser
image = form.cleaned_data['image']
desc = form.cleaned_data['description']
stock = form.cleaned_data['stock']
keywords = " "
detail = form.cleaned_data['detail']
product = products.objects.create(name= name, category=cat, brand=brand, price=price, seller_id=seller_id, image=image, description=desc, stock=stock, keywords=keywords, detail=detail)
product.save()
return redirect('/')
else:
profile = profiles.objects.filter(user_name=user.username)
if profile[0].pan == 0:
messages.info(request, "Please go to edit info and add PAN before uploading products")
return redirect('/')
for pro in profile:
showupload = pro.is_seller
if showupload:
form = productsform()
return render(request, "upload.html", {"form": form})
else:
messages.info(request,'You are not registered as Seller')
return redirect('/')
def login(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
| random_line_split |
|
views.py | order_item, order, payment_details
from cart.models import Cart
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.forms import PasswordChangeForm
from django.contrib.auth.models import User, auth
from .forms import productsform, profilesform
from django.contrib import messages
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.core.mail import send_mail
from django.utils.datastructures import MultiValueDictKeyError
from .models import Feedback
def deleteacc(request):
if request.method == 'POST':
username = request.user.username
Cart.objects.filter(buyer_id=username).delete()
products.objects.filter(seller_id=username).delete()
User.objects.filter(username = username).delete()
profiles.objects.filter(user_name=username).delete()
messages.info(request, "The user is deleted")
return redirect('/')
else:
return redirect('/')
def error_404_view(request, exception):
return render(request,'404.html')
def faqs(request):
list = faq.objects.all()
context={
'list': list
}
return render(request, "faqs.html", context)
def category(request,cat):
product=products.objects.filter(category=cat)
#print(product)
return render(request,"category.html",{'products': product,'cat':cat})
def change_password(request):
if request.method == 'POST':
form = PasswordChangeForm(request.user, request.POST)
if form.is_valid():
user = form.save()
update_session_auth_hash(request, user) # Important!
messages.success(request, 'Your password was successfully updated!')
return redirect('/')
else:
messages.error(request, 'Please correct the error below.')
else:
form = PasswordChangeForm(request.user)
return render(request, 'change_password.html', {'form': form})
def modify(request, slug):
if request.method == 'POST':
slug = request.POST['slug']
product=products.objects.get(slug=slug)
product.name = request.POST['name']
product.brand = request.POST['brand']
product.price = request.POST['price']
product.description = request.POST['description']
product.stock = request.POST['stock']
product.detail = request.POST['detail']
product.available_offer = request.POST['available_offer']
product.save()
return redirect('/')
user=request.user
product=products.objects.get(slug=slug)
if user.username == product.seller_id:
return render(request, 'modify.html', {'product': product})
else:
return redirect('/')
def editprofile(request):
user = request.user
profile = profiles.objects.get(user_name=user.username)
if request.method == 'POST':
user.first_name = request.POST['firstname']
user.last_name = request.POST['lastname']
user.save()
profile.phone_number = request.POST['phone_number']
profile.region = request.POST['region']
profile.city = request.POST['city']
profile.area = request.POST['area']
profile.locale = request.POST['locale']
profile.gmap = request.POST['gmap']
profile.pan = request.POST['pan']
profile.save()
return redirect('/')
else:
return render(request, 'editprofile.html', {'pro': profile })
def profile(request):
user = request.user
product = products.objects.filter(seller_id=user.username)
profile = profiles.objects.filter(user_name=user.username)
len = 0
for p in product:
len += 1
context={
'products': product ,
'profile':profile,
'len': len
}
return render(request, 'profile.html', context)
def search(request):
q = request.GET['q']
productset = (products.objects.filter(name__icontains=q) | products.objects.filter(description__icontains=q) | products.objects.filter(brand__icontains=q)).order_by('id').reverse()
if not productset.exists():
q += "--No Such Product Found"
return render(request, 'search.html', {"products": productset, "q": q})
# Create your views here.
def index(request):
user = request.user
username = user.username
category=["Electronic Devices","Electronic Accessories","TV & Home Appliances","Health & Beauty","Babies & Toys","Groceries & Pets",
"Home & Lifestyle","Women's Fashion","Men's Fashion","Watches & Accessories","Sports & Outdoor","Automotive & Motorbike"]
showupload = False
if request.method == 'POST':
emailll = request.POST['email']
message = request.POST['message']
print(emailll)
print(message)
if emailll == "" or message == "":
messages.info(request, "Cannot take empty feedback. Please fillup.")
return redirect('/')
send_mail(# this requires 4 arguments compulsoraly:
user,# subject like the
message,# message,
emailll,# from email
['[email protected]'],# to email
)
m = Feedback(email = emailll , text = message)
m.save()
messages.info(request, "Thanks for your feedback. We just received your email.")
return redirect('/')
if user.is_authenticated:
try:
pro = profiles.objects.get(user_name=user.username)
except:
messages.info(request, "Invalid Profile! Please Use Another ID to login properly.")
return render(request, 'index.html')
showupload = pro.is_seller
senditem = products.objects.all().order_by('id').reverse()
paginator = Paginator(senditem,12)
page = request.GET.get('page')
paged_products = paginator.get_page(page)
product_count = senditem.count()
c = Cart.objects.filter(buyer_id = username).count()
return render(request, 'index.html', {"products":paged_products,"showupload":showupload,'product_count': product_count,"category":category,'noofitem':c})
else:
senditem = products.objects.all().order_by('id').reverse()
paginator = Paginator(senditem,12)
page = request.GET.get('page')
paged_products = paginator.get_page(page)
product_count = senditem.count()
return render(request, 'index.html', {"products": paged_products,'product_count': product_count,"category":category})
def register(request):
if request.method == 'POST':
form = profilesform(request.POST)
if form.is_valid():
is_seller = form.cleaned_data['is_seller']
else:
messages.info(request,'Invalid Data Sent!')
firstname = request.POST['firstname']
lastname = request.POST['lastname']
username = request.POST['username']
email = request.POST['email']
password1 = request.POST['password1']
password2 = request.POST['password2']
phone_number = request.POST['phone_number']
region = request.POST['region']
city = request.POST['city']
area = request.POST['area']
locale = request.POST['locale']
gmap = request.POST['gmap']
pan = 0
if password1 != password2:
messages.info(request,'Password Mismatch!')
return redirect('register')
elif User.objects.filter(username=username).exists():
messages.info(request,'Username Already In Use!')
return redirect('register')
elif User.objects.filter(email=email).exists():
messages.info(request,'Email Already In Use!')
return redirect('register')
else:
user = User.objects.create_user(username=username, first_name=firstname, last_name=lastname, email=email, password=password1)
user.save()
profile = profiles.objects.create(user_name=username,pan=pan, phone_number=phone_number, region=region, city=city, area=area, locale=locale, is_seller=is_seller, gmap=gmap)
profile.save()
return redirect('/')
else:
form = profilesform()
return render(request, 'register.html', {"form": form})
def upload(request):
showupload = False
form = productsform()
user = request.user
owneruser = user.username
if request.method == 'POST':
form = productsform(request.POST, request.FILES)
if form.is_valid():
#form.seller_id=user.username
name = form.cleaned_data['name']
cat = form.cleaned_data['category']
brand = form.cleaned_data['brand']
price = form.cleaned_data['price']
seller_id = owneruser
image = form.cleaned_data['image']
desc = form.cleaned_data['description']
stock = form.cleaned_data['stock']
keywords = " "
detail = form.cleaned_data['detail']
product = products.objects.create(name= name, category=cat, brand=brand, price=price, seller_id=seller_id, image=image, description=desc, stock=stock, keywords=keywords, detail=detail)
product.save()
return redirect('/')
else:
profile = profiles.objects.filter(user_name=user.username)
if profile[0].pan == 0:
messages.info(request, "Please go to edit info and add PAN before uploading products")
return redirect('/')
for pro in profile:
|
if showupload:
form = productsform()
return render(request, "upload.html", {"form": form})
else:
messages.info(request,'You are not registered as Seller')
return redirect('/')
def login(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
| showupload = pro.is_seller | conditional_block |
views.py | _item, order, payment_details
from cart.models import Cart
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.forms import PasswordChangeForm
from django.contrib.auth.models import User, auth
from .forms import productsform, profilesform
from django.contrib import messages
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.core.mail import send_mail
from django.utils.datastructures import MultiValueDictKeyError
from .models import Feedback
def deleteacc(request):
if request.method == 'POST':
username = request.user.username
Cart.objects.filter(buyer_id=username).delete()
products.objects.filter(seller_id=username).delete()
User.objects.filter(username = username).delete()
profiles.objects.filter(user_name=username).delete()
messages.info(request, "The user is deleted")
return redirect('/')
else:
return redirect('/')
def error_404_view(request, exception):
return render(request,'404.html')
def faqs(request):
list = faq.objects.all()
context={
'list': list
}
return render(request, "faqs.html", context)
def category(request,cat):
product=products.objects.filter(category=cat)
#print(product)
return render(request,"category.html",{'products': product,'cat':cat})
def change_password(request):
if request.method == 'POST':
form = PasswordChangeForm(request.user, request.POST)
if form.is_valid():
user = form.save()
update_session_auth_hash(request, user) # Important!
messages.success(request, 'Your password was successfully updated!')
return redirect('/')
else:
messages.error(request, 'Please correct the error below.')
else:
form = PasswordChangeForm(request.user)
return render(request, 'change_password.html', {'form': form})
def | (request, slug):
if request.method == 'POST':
slug = request.POST['slug']
product=products.objects.get(slug=slug)
product.name = request.POST['name']
product.brand = request.POST['brand']
product.price = request.POST['price']
product.description = request.POST['description']
product.stock = request.POST['stock']
product.detail = request.POST['detail']
product.available_offer = request.POST['available_offer']
product.save()
return redirect('/')
user=request.user
product=products.objects.get(slug=slug)
if user.username == product.seller_id:
return render(request, 'modify.html', {'product': product})
else:
return redirect('/')
def editprofile(request):
user = request.user
profile = profiles.objects.get(user_name=user.username)
if request.method == 'POST':
user.first_name = request.POST['firstname']
user.last_name = request.POST['lastname']
user.save()
profile.phone_number = request.POST['phone_number']
profile.region = request.POST['region']
profile.city = request.POST['city']
profile.area = request.POST['area']
profile.locale = request.POST['locale']
profile.gmap = request.POST['gmap']
profile.pan = request.POST['pan']
profile.save()
return redirect('/')
else:
return render(request, 'editprofile.html', {'pro': profile })
def profile(request):
user = request.user
product = products.objects.filter(seller_id=user.username)
profile = profiles.objects.filter(user_name=user.username)
len = 0
for p in product:
len += 1
context={
'products': product ,
'profile':profile,
'len': len
}
return render(request, 'profile.html', context)
def search(request):
q = request.GET['q']
productset = (products.objects.filter(name__icontains=q) | products.objects.filter(description__icontains=q) | products.objects.filter(brand__icontains=q)).order_by('id').reverse()
if not productset.exists():
q += "--No Such Product Found"
return render(request, 'search.html', {"products": productset, "q": q})
# Create your views here.
def index(request):
user = request.user
username = user.username
category=["Electronic Devices","Electronic Accessories","TV & Home Appliances","Health & Beauty","Babies & Toys","Groceries & Pets",
"Home & Lifestyle","Women's Fashion","Men's Fashion","Watches & Accessories","Sports & Outdoor","Automotive & Motorbike"]
showupload = False
if request.method == 'POST':
emailll = request.POST['email']
message = request.POST['message']
print(emailll)
print(message)
if emailll == "" or message == "":
messages.info(request, "Cannot take empty feedback. Please fillup.")
return redirect('/')
send_mail(# this requires 4 arguments compulsoraly:
user,# subject like the
message,# message,
emailll,# from email
['[email protected]'],# to email
)
m = Feedback(email = emailll , text = message)
m.save()
messages.info(request, "Thanks for your feedback. We just received your email.")
return redirect('/')
if user.is_authenticated:
try:
pro = profiles.objects.get(user_name=user.username)
except:
messages.info(request, "Invalid Profile! Please Use Another ID to login properly.")
return render(request, 'index.html')
showupload = pro.is_seller
senditem = products.objects.all().order_by('id').reverse()
paginator = Paginator(senditem,12)
page = request.GET.get('page')
paged_products = paginator.get_page(page)
product_count = senditem.count()
c = Cart.objects.filter(buyer_id = username).count()
return render(request, 'index.html', {"products":paged_products,"showupload":showupload,'product_count': product_count,"category":category,'noofitem':c})
else:
senditem = products.objects.all().order_by('id').reverse()
paginator = Paginator(senditem,12)
page = request.GET.get('page')
paged_products = paginator.get_page(page)
product_count = senditem.count()
return render(request, 'index.html', {"products": paged_products,'product_count': product_count,"category":category})
def register(request):
if request.method == 'POST':
form = profilesform(request.POST)
if form.is_valid():
is_seller = form.cleaned_data['is_seller']
else:
messages.info(request,'Invalid Data Sent!')
firstname = request.POST['firstname']
lastname = request.POST['lastname']
username = request.POST['username']
email = request.POST['email']
password1 = request.POST['password1']
password2 = request.POST['password2']
phone_number = request.POST['phone_number']
region = request.POST['region']
city = request.POST['city']
area = request.POST['area']
locale = request.POST['locale']
gmap = request.POST['gmap']
pan = 0
if password1 != password2:
messages.info(request,'Password Mismatch!')
return redirect('register')
elif User.objects.filter(username=username).exists():
messages.info(request,'Username Already In Use!')
return redirect('register')
elif User.objects.filter(email=email).exists():
messages.info(request,'Email Already In Use!')
return redirect('register')
else:
user = User.objects.create_user(username=username, first_name=firstname, last_name=lastname, email=email, password=password1)
user.save()
profile = profiles.objects.create(user_name=username,pan=pan, phone_number=phone_number, region=region, city=city, area=area, locale=locale, is_seller=is_seller, gmap=gmap)
profile.save()
return redirect('/')
else:
form = profilesform()
return render(request, 'register.html', {"form": form})
def upload(request):
showupload = False
form = productsform()
user = request.user
owneruser = user.username
if request.method == 'POST':
form = productsform(request.POST, request.FILES)
if form.is_valid():
#form.seller_id=user.username
name = form.cleaned_data['name']
cat = form.cleaned_data['category']
brand = form.cleaned_data['brand']
price = form.cleaned_data['price']
seller_id = owneruser
image = form.cleaned_data['image']
desc = form.cleaned_data['description']
stock = form.cleaned_data['stock']
keywords = " "
detail = form.cleaned_data['detail']
product = products.objects.create(name= name, category=cat, brand=brand, price=price, seller_id=seller_id, image=image, description=desc, stock=stock, keywords=keywords, detail=detail)
product.save()
return redirect('/')
else:
profile = profiles.objects.filter(user_name=user.username)
if profile[0].pan == 0:
messages.info(request, "Please go to edit info and add PAN before uploading products")
return redirect('/')
for pro in profile:
showupload = pro.is_seller
if showupload:
form = productsform()
return render(request, "upload.html", {"form": form})
else:
messages.info(request,'You are not registered as Seller')
return redirect('/')
def login(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
| modify | identifier_name |
views.py | from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.core.mail import send_mail
from django.utils.datastructures import MultiValueDictKeyError
from .models import Feedback
def deleteacc(request):
if request.method == 'POST':
username = request.user.username
Cart.objects.filter(buyer_id=username).delete()
products.objects.filter(seller_id=username).delete()
User.objects.filter(username = username).delete()
profiles.objects.filter(user_name=username).delete()
messages.info(request, "The user is deleted")
return redirect('/')
else:
return redirect('/')
def error_404_view(request, exception):
return render(request,'404.html')
def faqs(request):
list = faq.objects.all()
context={
'list': list
}
return render(request, "faqs.html", context)
def category(request,cat):
product=products.objects.filter(category=cat)
#print(product)
return render(request,"category.html",{'products': product,'cat':cat})
def change_password(request):
if request.method == 'POST':
form = PasswordChangeForm(request.user, request.POST)
if form.is_valid():
user = form.save()
update_session_auth_hash(request, user) # Important!
messages.success(request, 'Your password was successfully updated!')
return redirect('/')
else:
messages.error(request, 'Please correct the error below.')
else:
form = PasswordChangeForm(request.user)
return render(request, 'change_password.html', {'form': form})
def modify(request, slug):
if request.method == 'POST':
slug = request.POST['slug']
product=products.objects.get(slug=slug)
product.name = request.POST['name']
product.brand = request.POST['brand']
product.price = request.POST['price']
product.description = request.POST['description']
product.stock = request.POST['stock']
product.detail = request.POST['detail']
product.available_offer = request.POST['available_offer']
product.save()
return redirect('/')
user=request.user
product=products.objects.get(slug=slug)
if user.username == product.seller_id:
return render(request, 'modify.html', {'product': product})
else:
return redirect('/')
def editprofile(request):
user = request.user
profile = profiles.objects.get(user_name=user.username)
if request.method == 'POST':
user.first_name = request.POST['firstname']
user.last_name = request.POST['lastname']
user.save()
profile.phone_number = request.POST['phone_number']
profile.region = request.POST['region']
profile.city = request.POST['city']
profile.area = request.POST['area']
profile.locale = request.POST['locale']
profile.gmap = request.POST['gmap']
profile.pan = request.POST['pan']
profile.save()
return redirect('/')
else:
return render(request, 'editprofile.html', {'pro': profile })
def profile(request):
user = request.user
product = products.objects.filter(seller_id=user.username)
profile = profiles.objects.filter(user_name=user.username)
len = 0
for p in product:
len += 1
context={
'products': product ,
'profile':profile,
'len': len
}
return render(request, 'profile.html', context)
def search(request):
q = request.GET['q']
productset = (products.objects.filter(name__icontains=q) | products.objects.filter(description__icontains=q) | products.objects.filter(brand__icontains=q)).order_by('id').reverse()
if not productset.exists():
q += "--No Such Product Found"
return render(request, 'search.html', {"products": productset, "q": q})
# Create your views here.
def index(request):
user = request.user
username = user.username
category=["Electronic Devices","Electronic Accessories","TV & Home Appliances","Health & Beauty","Babies & Toys","Groceries & Pets",
"Home & Lifestyle","Women's Fashion","Men's Fashion","Watches & Accessories","Sports & Outdoor","Automotive & Motorbike"]
showupload = False
if request.method == 'POST':
emailll = request.POST['email']
message = request.POST['message']
print(emailll)
print(message)
if emailll == "" or message == "":
messages.info(request, "Cannot take empty feedback. Please fillup.")
return redirect('/')
send_mail(# this requires 4 arguments compulsoraly:
user,# subject like the
message,# message,
emailll,# from email
['[email protected]'],# to email
)
m = Feedback(email = emailll , text = message)
m.save()
messages.info(request, "Thanks for your feedback. We just received your email.")
return redirect('/')
if user.is_authenticated:
try:
pro = profiles.objects.get(user_name=user.username)
except:
messages.info(request, "Invalid Profile! Please Use Another ID to login properly.")
return render(request, 'index.html')
showupload = pro.is_seller
senditem = products.objects.all().order_by('id').reverse()
paginator = Paginator(senditem,12)
page = request.GET.get('page')
paged_products = paginator.get_page(page)
product_count = senditem.count()
c = Cart.objects.filter(buyer_id = username).count()
return render(request, 'index.html', {"products":paged_products,"showupload":showupload,'product_count': product_count,"category":category,'noofitem':c})
else:
senditem = products.objects.all().order_by('id').reverse()
paginator = Paginator(senditem,12)
page = request.GET.get('page')
paged_products = paginator.get_page(page)
product_count = senditem.count()
return render(request, 'index.html', {"products": paged_products,'product_count': product_count,"category":category})
def register(request):
if request.method == 'POST':
form = profilesform(request.POST)
if form.is_valid():
is_seller = form.cleaned_data['is_seller']
else:
messages.info(request,'Invalid Data Sent!')
firstname = request.POST['firstname']
lastname = request.POST['lastname']
username = request.POST['username']
email = request.POST['email']
password1 = request.POST['password1']
password2 = request.POST['password2']
phone_number = request.POST['phone_number']
region = request.POST['region']
city = request.POST['city']
area = request.POST['area']
locale = request.POST['locale']
gmap = request.POST['gmap']
pan = 0
if password1 != password2:
messages.info(request,'Password Mismatch!')
return redirect('register')
elif User.objects.filter(username=username).exists():
messages.info(request,'Username Already In Use!')
return redirect('register')
elif User.objects.filter(email=email).exists():
messages.info(request,'Email Already In Use!')
return redirect('register')
else:
user = User.objects.create_user(username=username, first_name=firstname, last_name=lastname, email=email, password=password1)
user.save()
profile = profiles.objects.create(user_name=username,pan=pan, phone_number=phone_number, region=region, city=city, area=area, locale=locale, is_seller=is_seller, gmap=gmap)
profile.save()
return redirect('/')
else:
form = profilesform()
return render(request, 'register.html', {"form": form})
def upload(request):
showupload = False
form = productsform()
user = request.user
owneruser = user.username
if request.method == 'POST':
form = productsform(request.POST, request.FILES)
if form.is_valid():
#form.seller_id=user.username
name = form.cleaned_data['name']
cat = form.cleaned_data['category']
brand = form.cleaned_data['brand']
price = form.cleaned_data['price']
seller_id = owneruser
image = form.cleaned_data['image']
desc = form.cleaned_data['description']
stock = form.cleaned_data['stock']
keywords = " "
detail = form.cleaned_data['detail']
product = products.objects.create(name= name, category=cat, brand=brand, price=price, seller_id=seller_id, image=image, description=desc, stock=stock, keywords=keywords, detail=detail)
product.save()
return redirect('/')
else:
profile = profiles.objects.filter(user_name=user.username)
if profile[0].pan == 0:
messages.info(request, "Please go to edit info and add PAN before uploading products")
return redirect('/')
for pro in profile:
showupload = pro.is_seller
if showupload:
form = productsform()
return render(request, "upload.html", {"form": form})
else:
messages.info(request,'You are not registered as Seller')
return redirect('/')
def login(request):
| if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
user = auth.authenticate(username=username, password=password)
if user is not None:
auth.login(request, user)
return redirect('/')
else:
messages.info(request, 'Wrong Values Entered!')
return redirect('login')
else:
return render(request, 'login.html') | identifier_body |
|
lib.rs | ::Result<()>{
let mut lines: Vec<String> = vec![String::new();self.cluster_ids.len()];
for i in 0..self.cluster_ids.len(){
lines[i] = format!("{},{},{}\n",self.cluster_ids[i], self.stability_scores[i],self.purity_scores[i])
}
let outfile = format!("{}/{}", outpath, self.exp_param);
let outstring = lines.join("");
fs::write(outfile, outstring).unwrap();
Ok(())
}
fn write_csv_simple(&self, outfile:&str)->std::io::Result<()>{
let mut lines: Vec<String> = vec![String::new();self.cluster_ids.len()];
for i in 0..self.cluster_ids.len(){
lines[i] = format!("{},{},{}\n",self.cluster_ids[i], self.stability_scores[i],self.purity_scores[i])
}
let outstring = lines.join("");
fs::write(outfile, outstring).unwrap();
Ok(())
}
}
fn entropy(group_map: &HashMap<i64, HashSet<i64>>, labels:&Vec<i64> ) -> f64{
let n = labels.len() as f64;
let res: f64 = group_map.values().map(|i|{
let p = i.len() as f64 /n;
p * p.ln()
}).sum();
return res * -1 as f64
}
impl ClusterResults{
fn new(barcodes:Vec<i64>, labels: Vec<i64>, exp_name: String) -> ClusterResults{
let barcode_set: HashSet<i64> = HashSet::from_iter(barcodes.clone());
let mut grouped_barcodes:HashMap<i64, HashSet<i64>> = HashMap::new();
let mut old_label = &labels[0];
let mut current_label = &labels[0];// declare out here so we can add the last set back in
let mut current_set: HashSet<i64> = HashSet::new();
for i in 0..barcodes.len(){
current_label = &labels[i];
let current_barcode = &barcodes[i];
if current_label == old_label{
current_set.insert(current_barcode.clone());
}else{// reach a new cluster
let dup_check = grouped_barcodes.insert(old_label.clone(), current_set);
if !dup_check.is_none(){ // HashMap.insert returns None when new key is added
panic!("A duplicate key was added when making a ClusterResults; input data is not sorted by label")
}
let ns: HashSet<i64> = HashSet::new();
current_set = ns;
current_set.insert(current_barcode.clone());
old_label = current_label;
}
}
grouped_barcodes.insert(current_label.clone(), current_set);
let h_tot = entropy(&grouped_barcodes, &labels);
ClusterResults{barcodes, labels, barcode_set, grouped_barcodes, h_tot, exp_name}
}
fn head(&self){
println!("{:?}", &self.barcodes[0..5]);
println!("{:?}", &self.labels[0..5])
}
}
fn stability_k(ref_bc: &HashSet<i64>, query:&ClusterResults) -> f64{
let intersect: HashSet<i64> = ref_bc.intersection(&query.barcode_set).cloned().collect::<HashSet<i64>>();
if intersect.len() == 0{
return 0.0
} else{
let mut new_bc :Vec<i64> = vec![-1; intersect.len()];
let mut new_labels : Vec<i64> = vec![-1; intersect.len()];
let mut j=0;
for i in 0..query.barcodes.len(){
if intersect.contains(&query.barcodes[i]){
new_bc[j] = query.barcodes[i].clone();
new_labels[j] = query.labels[i].clone();
j+=1;
}
}
let new_clu = ClusterResults::new(new_bc, new_labels, String::new());//use an empty string for these guys, as they get deleted later | let mut s = String::new();
gz.read_to_string(&mut s)?;
Ok(s)
}
fn read_cluster_results( file: &str) ->ClusterResults {
let mut handle = fs::File::open(file).expect("Bad file input");
let mut buffer = Vec::new();
handle.read_to_end(&mut buffer).expect("couldnt read file");
let file_string = decode_reader(buffer).expect("bad gzip");
let file_string: Vec<&str> = file_string.lines().collect();
let mut barcodes: Vec<i64> = vec![-1; file_string.len()];
let mut labels: Vec<i64> = vec![-1; file_string.len()];
for i in 0..file_string.len(){
let line_split : Vec<&str> = file_string[i].split(",").collect();
barcodes[i] = String::from(line_split[0]).parse::<i64>().unwrap();
labels[i] = String::from(line_split[1]).parse::<i64>().unwrap();
}
let exp_name = file.split("/").last().unwrap() ;
ClusterResults::new(barcodes,labels, String::from(exp_name))
}
fn calculate_metrics(ref_cluster:&ClusterResults, query_clusters: &Vec<&ClusterResults>) -> ExperimentResults{
let mut stability_results = Array2::<f64>::zeros(( ref_cluster.grouped_barcodes.len() ,query_clusters.len() ));
let mut purity_results = Array2::<f64>::zeros(( ref_cluster.grouped_barcodes.len() ,query_clusters.len() ));
for (i, cluster) in ref_cluster.grouped_barcodes.values().enumerate(){
for (j, experiment) in query_clusters.iter().enumerate() {
let mut stab = stability_k(&cluster, &experiment) / experiment.h_tot ;
if stab.is_nan(){// cant compare a naturally occuring NAN to f64::NAN
stab = 1.0;
}
stability_results[[i, j]]= stab ;
purity_results[[i,j]] = purity_k(&cluster, &experiment.grouped_barcodes)
}
}
let stability_scores = stability_results.rows().into_iter().map(|x| 1.0 - x.mean().unwrap()).collect::<Vec<f64>>();
let purity_scores = purity_results.rows().into_iter().map( |x| {
let mut v = x.to_vec();
v.retain(|x| *x != f64::NAN); // in purity_k f64::NAN is explicitly returned, so this works. Consider changing for conistency
return vmean(v)
} ).collect::<Vec<f64>>();
let cluster_ids: Vec<i64> = ref_cluster.grouped_barcodes.keys().cloned().collect::<Vec<i64>>() ;
let exp_param = ref_cluster.exp_name.clone();
return ExperimentResults{ exp_param,cluster_ids, stability_scores, purity_scores }
}
fn vmean(v:Vec<f64>) -> f64{
return v.iter().sum::<f64>() / v.len() as f64
}
fn purity_k(ref_bc_set: &HashSet<i64>, query_map: &HashMap<i64, HashSet<i64>>) -> f64{
let mut max_overlap = 0;
let mut max_overlap_key:i64 = -100000000;
for query_key in query_map.keys(){
let q_cluster_set = query_map.get(query_key).unwrap();
let overlap = ref_bc_set.intersection(q_cluster_set).count();
if overlap > max_overlap{
max_overlap = overlap;
max_overlap_key = *query_key;
}
}
if max_overlap_key == -100000000{
return f64::NAN;
} else{
return max_overlap as f64 / query_map.get(&max_overlap_key).unwrap().len() as f64
}
}
fn run_pairwise_calculation_threaded(experiment_list:&Vec<&ClusterResults>, nthreads:usize) ->Vec<ExperimentResults>{
let pool = rayon::ThreadPoolBuilder::new().num_threads(nthreads).build().unwrap();
let dummy_array: Vec<usize> = (0..experiment_list.len()).collect();
let res: Vec<ExperimentResults> = pool.install(|| dummy_array.into_par_iter()
.map(|i:usize| {
let ref_clust = experiment_list[i];
let mut query_clusts = experiment_list.clone();
query_clusts.remove(i);
return calculate_metrics(ref_clust, &query_clusts)
})
.collect()
);
return res
}
#[pyfunction]
fn pairwise_metric_calculation_fromdisk(file_glob: &str, nthreads:usize) -> Vec<ExperimentResults> {
let test_clusters_objs:Vec<ClusterResults> = glob(file_glob)
.expect("Failed to read glob pattern")
.map(|x|{let file = String::from(x.unwrap().to_str().expect("Failed | return entropy(&new_clu.grouped_barcodes, &new_clu.labels);
}
}
fn decode_reader(bytes: Vec<u8>) -> std::io::Result<String> {
let mut gz = GzDecoder::new(&bytes[..]); | random_line_split |
lib.rs | Result<()> |
fn write_csv_simple(&self, outfile:&str)->std::io::Result<()>{
let mut lines: Vec<String> = vec![String::new();self.cluster_ids.len()];
for i in 0..self.cluster_ids.len(){
lines[i] = format!("{},{},{}\n",self.cluster_ids[i], self.stability_scores[i],self.purity_scores[i])
}
let outstring = lines.join("");
fs::write(outfile, outstring).unwrap();
Ok(())
}
}
fn entropy(group_map: &HashMap<i64, HashSet<i64>>, labels:&Vec<i64> ) -> f64{
let n = labels.len() as f64;
let res: f64 = group_map.values().map(|i|{
let p = i.len() as f64 /n;
p * p.ln()
}).sum();
return res * -1 as f64
}
impl ClusterResults{
fn new(barcodes:Vec<i64>, labels: Vec<i64>, exp_name: String) -> ClusterResults{
let barcode_set: HashSet<i64> = HashSet::from_iter(barcodes.clone());
let mut grouped_barcodes:HashMap<i64, HashSet<i64>> = HashMap::new();
let mut old_label = &labels[0];
let mut current_label = &labels[0];// declare out here so we can add the last set back in
let mut current_set: HashSet<i64> = HashSet::new();
for i in 0..barcodes.len(){
current_label = &labels[i];
let current_barcode = &barcodes[i];
if current_label == old_label{
current_set.insert(current_barcode.clone());
}else{// reach a new cluster
let dup_check = grouped_barcodes.insert(old_label.clone(), current_set);
if !dup_check.is_none(){ // HashMap.insert returns None when new key is added
panic!("A duplicate key was added when making a ClusterResults; input data is not sorted by label")
}
let ns: HashSet<i64> = HashSet::new();
current_set = ns;
current_set.insert(current_barcode.clone());
old_label = current_label;
}
}
grouped_barcodes.insert(current_label.clone(), current_set);
let h_tot = entropy(&grouped_barcodes, &labels);
ClusterResults{barcodes, labels, barcode_set, grouped_barcodes, h_tot, exp_name}
}
fn head(&self){
println!("{:?}", &self.barcodes[0..5]);
println!("{:?}", &self.labels[0..5])
}
}
fn stability_k(ref_bc: &HashSet<i64>, query:&ClusterResults) -> f64{
let intersect: HashSet<i64> = ref_bc.intersection(&query.barcode_set).cloned().collect::<HashSet<i64>>();
if intersect.len() == 0{
return 0.0
} else{
let mut new_bc :Vec<i64> = vec![-1; intersect.len()];
let mut new_labels : Vec<i64> = vec![-1; intersect.len()];
let mut j=0;
for i in 0..query.barcodes.len(){
if intersect.contains(&query.barcodes[i]){
new_bc[j] = query.barcodes[i].clone();
new_labels[j] = query.labels[i].clone();
j+=1;
}
}
let new_clu = ClusterResults::new(new_bc, new_labels, String::new());//use an empty string for these guys, as they get deleted later
return entropy(&new_clu.grouped_barcodes, &new_clu.labels);
}
}
fn decode_reader(bytes: Vec<u8>) -> std::io::Result<String> {
let mut gz = GzDecoder::new(&bytes[..]);
let mut s = String::new();
gz.read_to_string(&mut s)?;
Ok(s)
}
fn read_cluster_results( file: &str) ->ClusterResults {
let mut handle = fs::File::open(file).expect("Bad file input");
let mut buffer = Vec::new();
handle.read_to_end(&mut buffer).expect("couldnt read file");
let file_string = decode_reader(buffer).expect("bad gzip");
let file_string: Vec<&str> = file_string.lines().collect();
let mut barcodes: Vec<i64> = vec![-1; file_string.len()];
let mut labels: Vec<i64> = vec![-1; file_string.len()];
for i in 0..file_string.len(){
let line_split : Vec<&str> = file_string[i].split(",").collect();
barcodes[i] = String::from(line_split[0]).parse::<i64>().unwrap();
labels[i] = String::from(line_split[1]).parse::<i64>().unwrap();
}
let exp_name = file.split("/").last().unwrap() ;
ClusterResults::new(barcodes,labels, String::from(exp_name))
}
fn calculate_metrics(ref_cluster:&ClusterResults, query_clusters: &Vec<&ClusterResults>) -> ExperimentResults{
let mut stability_results = Array2::<f64>::zeros(( ref_cluster.grouped_barcodes.len() ,query_clusters.len() ));
let mut purity_results = Array2::<f64>::zeros(( ref_cluster.grouped_barcodes.len() ,query_clusters.len() ));
for (i, cluster) in ref_cluster.grouped_barcodes.values().enumerate(){
for (j, experiment) in query_clusters.iter().enumerate() {
let mut stab = stability_k(&cluster, &experiment) / experiment.h_tot ;
if stab.is_nan(){// cant compare a naturally occuring NAN to f64::NAN
stab = 1.0;
}
stability_results[[i, j]]= stab ;
purity_results[[i,j]] = purity_k(&cluster, &experiment.grouped_barcodes)
}
}
let stability_scores = stability_results.rows().into_iter().map(|x| 1.0 - x.mean().unwrap()).collect::<Vec<f64>>();
let purity_scores = purity_results.rows().into_iter().map( |x| {
let mut v = x.to_vec();
v.retain(|x| *x != f64::NAN); // in purity_k f64::NAN is explicitly returned, so this works. Consider changing for conistency
return vmean(v)
} ).collect::<Vec<f64>>();
let cluster_ids: Vec<i64> = ref_cluster.grouped_barcodes.keys().cloned().collect::<Vec<i64>>() ;
let exp_param = ref_cluster.exp_name.clone();
return ExperimentResults{ exp_param,cluster_ids, stability_scores, purity_scores }
}
fn vmean(v:Vec<f64>) -> f64{
return v.iter().sum::<f64>() / v.len() as f64
}
fn purity_k(ref_bc_set: &HashSet<i64>, query_map: &HashMap<i64, HashSet<i64>>) -> f64{
let mut max_overlap = 0;
let mut max_overlap_key:i64 = -100000000;
for query_key in query_map.keys(){
let q_cluster_set = query_map.get(query_key).unwrap();
let overlap = ref_bc_set.intersection(q_cluster_set).count();
if overlap > max_overlap{
max_overlap = overlap;
max_overlap_key = *query_key;
}
}
if max_overlap_key == -100000000{
return f64::NAN;
} else{
return max_overlap as f64 / query_map.get(&max_overlap_key).unwrap().len() as f64
}
}
fn run_pairwise_calculation_threaded(experiment_list:&Vec<&ClusterResults>, nthreads:usize) ->Vec<ExperimentResults>{
let pool = rayon::ThreadPoolBuilder::new().num_threads(nthreads).build().unwrap();
let dummy_array: Vec<usize> = (0..experiment_list.len()).collect();
let res: Vec<ExperimentResults> = pool.install(|| dummy_array.into_par_iter()
.map(|i:usize| {
let ref_clust = experiment_list[i];
let mut query_clusts = experiment_list.clone();
query_clusts.remove(i);
return calculate_metrics(ref_clust, &query_clusts)
})
.collect()
);
return res
}
#[pyfunction]
fn pairwise_metric_calculation_fromdisk(file_glob: &str, nthreads:usize) -> Vec<ExperimentResults> {
let test_clusters_objs:Vec<ClusterResults> = glob(file_glob)
.expect("Failed to read glob pattern")
.map(|x|{let file = String::from(x.unwrap().to_str().expect("Failed | {
let mut lines: Vec<String> = vec![String::new();self.cluster_ids.len()];
for i in 0..self.cluster_ids.len(){
lines[i] = format!("{},{},{}\n",self.cluster_ids[i], self.stability_scores[i],self.purity_scores[i])
}
let outfile = format!("{}/{}", outpath, self.exp_param);
let outstring = lines.join("");
fs::write(outfile, outstring).unwrap();
Ok(())
} | identifier_body |
lib.rs | Result<()>{
let mut lines: Vec<String> = vec![String::new();self.cluster_ids.len()];
for i in 0..self.cluster_ids.len(){
lines[i] = format!("{},{},{}\n",self.cluster_ids[i], self.stability_scores[i],self.purity_scores[i])
}
let outfile = format!("{}/{}", outpath, self.exp_param);
let outstring = lines.join("");
fs::write(outfile, outstring).unwrap();
Ok(())
}
fn write_csv_simple(&self, outfile:&str)->std::io::Result<()>{
let mut lines: Vec<String> = vec![String::new();self.cluster_ids.len()];
for i in 0..self.cluster_ids.len(){
lines[i] = format!("{},{},{}\n",self.cluster_ids[i], self.stability_scores[i],self.purity_scores[i])
}
let outstring = lines.join("");
fs::write(outfile, outstring).unwrap();
Ok(())
}
}
fn entropy(group_map: &HashMap<i64, HashSet<i64>>, labels:&Vec<i64> ) -> f64{
let n = labels.len() as f64;
let res: f64 = group_map.values().map(|i|{
let p = i.len() as f64 /n;
p * p.ln()
}).sum();
return res * -1 as f64
}
impl ClusterResults{
fn new(barcodes:Vec<i64>, labels: Vec<i64>, exp_name: String) -> ClusterResults{
let barcode_set: HashSet<i64> = HashSet::from_iter(barcodes.clone());
let mut grouped_barcodes:HashMap<i64, HashSet<i64>> = HashMap::new();
let mut old_label = &labels[0];
let mut current_label = &labels[0];// declare out here so we can add the last set back in
let mut current_set: HashSet<i64> = HashSet::new();
for i in 0..barcodes.len(){
current_label = &labels[i];
let current_barcode = &barcodes[i];
if current_label == old_label{
current_set.insert(current_barcode.clone());
}else{// reach a new cluster
let dup_check = grouped_barcodes.insert(old_label.clone(), current_set);
if !dup_check.is_none(){ // HashMap.insert returns None when new key is added
panic!("A duplicate key was added when making a ClusterResults; input data is not sorted by label")
}
let ns: HashSet<i64> = HashSet::new();
current_set = ns;
current_set.insert(current_barcode.clone());
old_label = current_label;
}
}
grouped_barcodes.insert(current_label.clone(), current_set);
let h_tot = entropy(&grouped_barcodes, &labels);
ClusterResults{barcodes, labels, barcode_set, grouped_barcodes, h_tot, exp_name}
}
fn head(&self){
println!("{:?}", &self.barcodes[0..5]);
println!("{:?}", &self.labels[0..5])
}
}
fn stability_k(ref_bc: &HashSet<i64>, query:&ClusterResults) -> f64{
let intersect: HashSet<i64> = ref_bc.intersection(&query.barcode_set).cloned().collect::<HashSet<i64>>();
if intersect.len() == 0{
return 0.0
} else{
let mut new_bc :Vec<i64> = vec![-1; intersect.len()];
let mut new_labels : Vec<i64> = vec![-1; intersect.len()];
let mut j=0;
for i in 0..query.barcodes.len(){
if intersect.contains(&query.barcodes[i]){
new_bc[j] = query.barcodes[i].clone();
new_labels[j] = query.labels[i].clone();
j+=1;
}
}
let new_clu = ClusterResults::new(new_bc, new_labels, String::new());//use an empty string for these guys, as they get deleted later
return entropy(&new_clu.grouped_barcodes, &new_clu.labels);
}
}
fn decode_reader(bytes: Vec<u8>) -> std::io::Result<String> {
let mut gz = GzDecoder::new(&bytes[..]);
let mut s = String::new();
gz.read_to_string(&mut s)?;
Ok(s)
}
fn read_cluster_results( file: &str) ->ClusterResults {
let mut handle = fs::File::open(file).expect("Bad file input");
let mut buffer = Vec::new();
handle.read_to_end(&mut buffer).expect("couldnt read file");
let file_string = decode_reader(buffer).expect("bad gzip");
let file_string: Vec<&str> = file_string.lines().collect();
let mut barcodes: Vec<i64> = vec![-1; file_string.len()];
let mut labels: Vec<i64> = vec![-1; file_string.len()];
for i in 0..file_string.len(){
let line_split : Vec<&str> = file_string[i].split(",").collect();
barcodes[i] = String::from(line_split[0]).parse::<i64>().unwrap();
labels[i] = String::from(line_split[1]).parse::<i64>().unwrap();
}
let exp_name = file.split("/").last().unwrap() ;
ClusterResults::new(barcodes,labels, String::from(exp_name))
}
fn calculate_metrics(ref_cluster:&ClusterResults, query_clusters: &Vec<&ClusterResults>) -> ExperimentResults{
let mut stability_results = Array2::<f64>::zeros(( ref_cluster.grouped_barcodes.len() ,query_clusters.len() ));
let mut purity_results = Array2::<f64>::zeros(( ref_cluster.grouped_barcodes.len() ,query_clusters.len() ));
for (i, cluster) in ref_cluster.grouped_barcodes.values().enumerate(){
for (j, experiment) in query_clusters.iter().enumerate() {
let mut stab = stability_k(&cluster, &experiment) / experiment.h_tot ;
if stab.is_nan(){// cant compare a naturally occuring NAN to f64::NAN
stab = 1.0;
}
stability_results[[i, j]]= stab ;
purity_results[[i,j]] = purity_k(&cluster, &experiment.grouped_barcodes)
}
}
let stability_scores = stability_results.rows().into_iter().map(|x| 1.0 - x.mean().unwrap()).collect::<Vec<f64>>();
let purity_scores = purity_results.rows().into_iter().map( |x| {
let mut v = x.to_vec();
v.retain(|x| *x != f64::NAN); // in purity_k f64::NAN is explicitly returned, so this works. Consider changing for conistency
return vmean(v)
} ).collect::<Vec<f64>>();
let cluster_ids: Vec<i64> = ref_cluster.grouped_barcodes.keys().cloned().collect::<Vec<i64>>() ;
let exp_param = ref_cluster.exp_name.clone();
return ExperimentResults{ exp_param,cluster_ids, stability_scores, purity_scores }
}
fn | (v:Vec<f64>) -> f64{
return v.iter().sum::<f64>() / v.len() as f64
}
fn purity_k(ref_bc_set: &HashSet<i64>, query_map: &HashMap<i64, HashSet<i64>>) -> f64{
let mut max_overlap = 0;
let mut max_overlap_key:i64 = -100000000;
for query_key in query_map.keys(){
let q_cluster_set = query_map.get(query_key).unwrap();
let overlap = ref_bc_set.intersection(q_cluster_set).count();
if overlap > max_overlap{
max_overlap = overlap;
max_overlap_key = *query_key;
}
}
if max_overlap_key == -100000000{
return f64::NAN;
} else{
return max_overlap as f64 / query_map.get(&max_overlap_key).unwrap().len() as f64
}
}
fn run_pairwise_calculation_threaded(experiment_list:&Vec<&ClusterResults>, nthreads:usize) ->Vec<ExperimentResults>{
let pool = rayon::ThreadPoolBuilder::new().num_threads(nthreads).build().unwrap();
let dummy_array: Vec<usize> = (0..experiment_list.len()).collect();
let res: Vec<ExperimentResults> = pool.install(|| dummy_array.into_par_iter()
.map(|i:usize| {
let ref_clust = experiment_list[i];
let mut query_clusts = experiment_list.clone();
query_clusts.remove(i);
return calculate_metrics(ref_clust, &query_clusts)
})
.collect()
);
return res
}
#[pyfunction]
fn pairwise_metric_calculation_fromdisk(file_glob: &str, nthreads:usize) -> Vec<ExperimentResults> {
let test_clusters_objs:Vec<ClusterResults> = glob(file_glob)
.expect("Failed to read glob pattern")
.map(|x|{let file = String::from(x.unwrap().to_str().expect("Failed | vmean | identifier_name |
lib.rs | Result<()>{
let mut lines: Vec<String> = vec![String::new();self.cluster_ids.len()];
for i in 0..self.cluster_ids.len(){
lines[i] = format!("{},{},{}\n",self.cluster_ids[i], self.stability_scores[i],self.purity_scores[i])
}
let outfile = format!("{}/{}", outpath, self.exp_param);
let outstring = lines.join("");
fs::write(outfile, outstring).unwrap();
Ok(())
}
fn write_csv_simple(&self, outfile:&str)->std::io::Result<()>{
let mut lines: Vec<String> = vec![String::new();self.cluster_ids.len()];
for i in 0..self.cluster_ids.len(){
lines[i] = format!("{},{},{}\n",self.cluster_ids[i], self.stability_scores[i],self.purity_scores[i])
}
let outstring = lines.join("");
fs::write(outfile, outstring).unwrap();
Ok(())
}
}
fn entropy(group_map: &HashMap<i64, HashSet<i64>>, labels:&Vec<i64> ) -> f64{
let n = labels.len() as f64;
let res: f64 = group_map.values().map(|i|{
let p = i.len() as f64 /n;
p * p.ln()
}).sum();
return res * -1 as f64
}
impl ClusterResults{
fn new(barcodes:Vec<i64>, labels: Vec<i64>, exp_name: String) -> ClusterResults{
let barcode_set: HashSet<i64> = HashSet::from_iter(barcodes.clone());
let mut grouped_barcodes:HashMap<i64, HashSet<i64>> = HashMap::new();
let mut old_label = &labels[0];
let mut current_label = &labels[0];// declare out here so we can add the last set back in
let mut current_set: HashSet<i64> = HashSet::new();
for i in 0..barcodes.len(){
current_label = &labels[i];
let current_barcode = &barcodes[i];
if current_label == old_label{
current_set.insert(current_barcode.clone());
}else{// reach a new cluster
let dup_check = grouped_barcodes.insert(old_label.clone(), current_set);
if !dup_check.is_none() |
let ns: HashSet<i64> = HashSet::new();
current_set = ns;
current_set.insert(current_barcode.clone());
old_label = current_label;
}
}
grouped_barcodes.insert(current_label.clone(), current_set);
let h_tot = entropy(&grouped_barcodes, &labels);
ClusterResults{barcodes, labels, barcode_set, grouped_barcodes, h_tot, exp_name}
}
fn head(&self){
println!("{:?}", &self.barcodes[0..5]);
println!("{:?}", &self.labels[0..5])
}
}
fn stability_k(ref_bc: &HashSet<i64>, query:&ClusterResults) -> f64{
let intersect: HashSet<i64> = ref_bc.intersection(&query.barcode_set).cloned().collect::<HashSet<i64>>();
if intersect.len() == 0{
return 0.0
} else{
let mut new_bc :Vec<i64> = vec![-1; intersect.len()];
let mut new_labels : Vec<i64> = vec![-1; intersect.len()];
let mut j=0;
for i in 0..query.barcodes.len(){
if intersect.contains(&query.barcodes[i]){
new_bc[j] = query.barcodes[i].clone();
new_labels[j] = query.labels[i].clone();
j+=1;
}
}
let new_clu = ClusterResults::new(new_bc, new_labels, String::new());//use an empty string for these guys, as they get deleted later
return entropy(&new_clu.grouped_barcodes, &new_clu.labels);
}
}
fn decode_reader(bytes: Vec<u8>) -> std::io::Result<String> {
let mut gz = GzDecoder::new(&bytes[..]);
let mut s = String::new();
gz.read_to_string(&mut s)?;
Ok(s)
}
fn read_cluster_results( file: &str) ->ClusterResults {
let mut handle = fs::File::open(file).expect("Bad file input");
let mut buffer = Vec::new();
handle.read_to_end(&mut buffer).expect("couldnt read file");
let file_string = decode_reader(buffer).expect("bad gzip");
let file_string: Vec<&str> = file_string.lines().collect();
let mut barcodes: Vec<i64> = vec![-1; file_string.len()];
let mut labels: Vec<i64> = vec![-1; file_string.len()];
for i in 0..file_string.len(){
let line_split : Vec<&str> = file_string[i].split(",").collect();
barcodes[i] = String::from(line_split[0]).parse::<i64>().unwrap();
labels[i] = String::from(line_split[1]).parse::<i64>().unwrap();
}
let exp_name = file.split("/").last().unwrap() ;
ClusterResults::new(barcodes,labels, String::from(exp_name))
}
fn calculate_metrics(ref_cluster:&ClusterResults, query_clusters: &Vec<&ClusterResults>) -> ExperimentResults{
let mut stability_results = Array2::<f64>::zeros(( ref_cluster.grouped_barcodes.len() ,query_clusters.len() ));
let mut purity_results = Array2::<f64>::zeros(( ref_cluster.grouped_barcodes.len() ,query_clusters.len() ));
for (i, cluster) in ref_cluster.grouped_barcodes.values().enumerate(){
for (j, experiment) in query_clusters.iter().enumerate() {
let mut stab = stability_k(&cluster, &experiment) / experiment.h_tot ;
if stab.is_nan(){// cant compare a naturally occuring NAN to f64::NAN
stab = 1.0;
}
stability_results[[i, j]]= stab ;
purity_results[[i,j]] = purity_k(&cluster, &experiment.grouped_barcodes)
}
}
let stability_scores = stability_results.rows().into_iter().map(|x| 1.0 - x.mean().unwrap()).collect::<Vec<f64>>();
let purity_scores = purity_results.rows().into_iter().map( |x| {
let mut v = x.to_vec();
v.retain(|x| *x != f64::NAN); // in purity_k f64::NAN is explicitly returned, so this works. Consider changing for conistency
return vmean(v)
} ).collect::<Vec<f64>>();
let cluster_ids: Vec<i64> = ref_cluster.grouped_barcodes.keys().cloned().collect::<Vec<i64>>() ;
let exp_param = ref_cluster.exp_name.clone();
return ExperimentResults{ exp_param,cluster_ids, stability_scores, purity_scores }
}
fn vmean(v:Vec<f64>) -> f64{
return v.iter().sum::<f64>() / v.len() as f64
}
fn purity_k(ref_bc_set: &HashSet<i64>, query_map: &HashMap<i64, HashSet<i64>>) -> f64{
let mut max_overlap = 0;
let mut max_overlap_key:i64 = -100000000;
for query_key in query_map.keys(){
let q_cluster_set = query_map.get(query_key).unwrap();
let overlap = ref_bc_set.intersection(q_cluster_set).count();
if overlap > max_overlap{
max_overlap = overlap;
max_overlap_key = *query_key;
}
}
if max_overlap_key == -100000000{
return f64::NAN;
} else{
return max_overlap as f64 / query_map.get(&max_overlap_key).unwrap().len() as f64
}
}
fn run_pairwise_calculation_threaded(experiment_list:&Vec<&ClusterResults>, nthreads:usize) ->Vec<ExperimentResults>{
let pool = rayon::ThreadPoolBuilder::new().num_threads(nthreads).build().unwrap();
let dummy_array: Vec<usize> = (0..experiment_list.len()).collect();
let res: Vec<ExperimentResults> = pool.install(|| dummy_array.into_par_iter()
.map(|i:usize| {
let ref_clust = experiment_list[i];
let mut query_clusts = experiment_list.clone();
query_clusts.remove(i);
return calculate_metrics(ref_clust, &query_clusts)
})
.collect()
);
return res
}
#[pyfunction]
fn pairwise_metric_calculation_fromdisk(file_glob: &str, nthreads:usize) -> Vec<ExperimentResults> {
let test_clusters_objs:Vec<ClusterResults> = glob(file_glob)
.expect("Failed to read glob pattern")
.map(|x|{let file = String::from(x.unwrap().to_str().expect(" | { // HashMap.insert returns None when new key is added
panic!("A duplicate key was added when making a ClusterResults; input data is not sorted by label")
} | conditional_block |
vt-3.rs | info per framebuffer
let pipeline_color_blend_attachment_infos = [vk::PipelineColorBlendAttachmentState {
blend_enable: vk::FALSE,
// not used because we disabled blending
src_color_blend_factor: vk::BlendFactor::ONE,
dst_color_blend_factor: vk::BlendFactor::ZERO,
color_blend_op: vk::BlendOp::ADD,
src_alpha_blend_factor: vk::BlendFactor::ONE,
dst_alpha_blend_factor: vk::BlendFactor::ZERO,
alpha_blend_op: vk::BlendOp::ADD,
// is used
color_write_mask: vk::ColorComponentFlags::R
& vk::ColorComponentFlags::G
& vk::ColorComponentFlags::G
& vk::ColorComponentFlags::B,
}];
// color blending settings for the whole pipleine
let pipeline_color_blend_info = vk::PipelineColorBlendStateCreateInfo {
s_type: vk::StructureType::PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineColorBlendStateCreateFlags::empty(),
logic_op_enable: vk::FALSE,
logic_op: vk::LogicOp::COPY, // optional
attachment_count: pipeline_color_blend_attachment_infos.len() as u32,
p_attachments: pipeline_color_blend_attachment_infos.as_ptr(),
blend_constants: [0.0, 0.0, 0.0, 0.0], // optional
};
// we don't use any shader uniforms so we leave it empty
let pipeline_layout_info = vk::PipelineLayoutCreateInfo {
s_type: vk::StructureType::PIPELINE_LAYOUT_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineLayoutCreateFlags::empty(),
set_layout_count: 0,
p_set_layouts: ptr::null(),
push_constant_range_count: 0,
p_push_constant_ranges: ptr::null(),
};
let pipeline_layout = unsafe {
device
.create_pipeline_layout(&pipeline_layout_info, None)
.expect("Couldn't create pipeline layout!")
};
// render pass
let render_pass = create_render_pass(&device);
// pipeline
let pipeline_infos = [vk::GraphicsPipelineCreateInfo {
s_type: vk::StructureType::GRAPHICS_PIPELINE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineCreateFlags::empty(),
stage_count: shader_stages.len() as u32,
p_stages: shader_stages.as_ptr(),
p_vertex_input_state: &pipeline_vertex_input_info,
p_input_assembly_state: &pipeline_input_assembly_info,
p_tessellation_state: ptr::null(),
p_viewport_state: &viewport_state,
p_rasterization_state: &pipeline_rasterization_info,
p_multisample_state: &pipeline_multisample_info,
p_depth_stencil_state: ptr::null(),
p_color_blend_state: &pipeline_color_blend_info,
p_dynamic_state: ptr::null(),
layout: pipeline_layout,
render_pass,
subpass: 0,
base_pipeline_handle: vk::Pipeline::null(),
base_pipeline_index: 0,
}];
let pipeline = unsafe {
device.create_graphics_pipelines(vk::PipelineCache::null(), &pipeline_infos, None)
}
.expect("Couldn't create graphics pipeline")[0];
dbg![pipeline];
// shader modules only need to live long enough to create the pipeline
unsafe {
device.destroy_shader_module(frag_module, None);
device.destroy_shader_module(vert_module, None);
}
loop {
let mut exit = false;
events_loop.poll_events(|ev| match ev {
Event::WindowEvent {
event: WindowEvent::CloseRequested,
..
} => exit = true,
_ => {}
});
if exit {
break;
}
}
// destroy objects
unsafe {
device.destroy_pipeline(pipeline, None);
device.destroy_pipeline_layout(pipeline_layout, None);
swapchain_creator.destroy_swapchain(swapchain, None);
device.destroy_render_pass(render_pass, None);
device.destroy_device(None);
surface_loader.destroy_surface(surface, None);
debug_utils_loader.destroy_debug_utils_messenger(debug_utils_messenger, None);
instance.destroy_instance(None);
}
}
fn create_shader_module<D: DeviceV1_0>(device: &D, code: Vec<u8>) -> vk::ShaderModule {
use ash::util::read_spv;
use std::io::Cursor;
let readable_code = read_spv(&mut Cursor::new(&code)).expect("Couldn't read SPV");
let shader_module_create_info = vk::ShaderModuleCreateInfo::builder().code(&readable_code);
unsafe {
device
.create_shader_module(&shader_module_create_info, None)
.expect("Couldn't create shader module")
}
}
fn extension_names() -> Vec<*const i8> {
// these are instance extensions
vec![
Surface::name().as_ptr(),
XlibSurface::name().as_ptr(),
DebugUtils::name().as_ptr(),
]
}
unsafe extern "system" fn vulkan_debug_utils_callback(
message_severity: vk::DebugUtilsMessageSeverityFlagsEXT,
message_type: vk::DebugUtilsMessageTypeFlagsEXT,
p_callback_data: *const vk::DebugUtilsMessengerCallbackDataEXT,
_p_user_data: *mut c_void,
) -> vk::Bool32 {
let severity = match message_severity {
vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE => "[Verbose]",
vk::DebugUtilsMessageSeverityFlagsEXT::WARNING => "[Warning]",
vk::DebugUtilsMessageSeverityFlagsEXT::ERROR => "[Error]",
vk::DebugUtilsMessageSeverityFlagsEXT::INFO => "[Info]",
_ => "[Unknown]",
};
let types = match message_type {
vk::DebugUtilsMessageTypeFlagsEXT::GENERAL => "[General]",
vk::DebugUtilsMessageTypeFlagsEXT::PERFORMANCE => "[Performance]",
vk::DebugUtilsMessageTypeFlagsEXT::VALIDATION => "[Validation]",
_ => "[Unknown]",
};
let message = CStr::from_ptr((*p_callback_data).p_message);
eprintln!("[Debug]{}{}{:?}", severity, types, message);
vk::FALSE
}
fn is_phys_dev_suitable(instance: &ash::Instance, phys_dev: &vk::PhysicalDevice) -> bool {
// gets a list of extensions supported by this device as vulkan strings,
// which don't implement PartialEq
let extension_properties = unsafe { instance.enumerate_device_extension_properties(*phys_dev) }
.expect("Couldn't enumerate device extension properties!");
// Now convert them into rust strings
let available_extension_names: Vec<String> = extension_properties
.iter()
.map(|ext| vk_to_string(&ext.extension_name))
.collect();
// make sure all required device extensions are supported by this device
get_device_extensions().iter().for_each(|name| {
available_extension_names
.iter()
.find(|ext| ext == name)
.expect(&format!("Couldn't find extension {}", name));
});
true
}
fn check_device_swapchain_caps(
surface_loader: &Surface,
physical_device: vk::PhysicalDevice,
surface: vk::SurfaceKHR,
) -> vk::Extent2D {
// returns the current dimensions of the swapchain
let capabilities = unsafe {
surface_loader.get_physical_device_surface_capabilities(physical_device, surface)
}
.expect("Couldn't get physical device surface capabilities");
let formats =
unsafe { surface_loader.get_physical_device_surface_formats(physical_device, surface) }
.expect("Couldn't get physical device surface formats");
let present_modes = unsafe {
surface_loader.get_physical_device_surface_present_modes(physical_device, surface)
}
.expect("Couldn't get physical device surface present modes");
// we will request 3 swapchain images to avoid having to wait while one is
// being cleared or something, idk exactly
assert!(capabilities.min_image_count <= 3 && capabilities.max_image_count >= 3);
formats
.iter()
.find(|fmt| fmt.format == vk::Format::B8G8R8A8_UNORM)
.expect("Swapchain doesn't support B8G8R8A8_UNORM!");
assert!(present_modes.contains(&vk::PresentModeKHR::IMMEDIATE));
capabilities.current_extent
}
// many of these functions are ripped from https://github.com/bwasty/vulkan-tutorial-rs
// only works on linux
unsafe fn create_surface<E: EntryV1_0, I: InstanceV1_0>(
entry: &E,
instance: &I,
window: &winit::Window,
) -> Result<vk::SurfaceKHR, vk::Result> | {
use winit::os::unix::WindowExt;
let x11_display = window.get_xlib_display().unwrap();
let x11_window = window.get_xlib_window().unwrap();
let x11_create_info = vk::XlibSurfaceCreateInfoKHR {
s_type: vk::StructureType::XLIB_SURFACE_CREATE_INFO_KHR,
p_next: ptr::null(),
flags: Default::default(),
window: x11_window as vk::Window,
dpy: x11_display as *mut vk::Display,
};
let xlib_surface_loader = XlibSurface::new(entry, instance);
xlib_surface_loader.create_xlib_surface(&x11_create_info, None)
} | identifier_body |
|
vt-3.rs | support! It's possible that a separate queue with surface support exists, but the current implementation is not capable of finding one.");
}
// get logical device
let device_queue_create_info = vk::DeviceQueueCreateInfo {
s_type: vk::StructureType::DEVICE_QUEUE_CREATE_INFO,
p_next: ptr::null(), | queue_family_index,
queue_count: 1,
p_queue_priorities: [1.0].as_ptr(),
};
let device_extensions_raw = get_device_extensions_raw();
let device_create_info = vk::DeviceCreateInfo {
s_type: vk::StructureType::DEVICE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::DeviceCreateFlags::empty(),
queue_create_info_count: 1,
p_queue_create_infos: [device_queue_create_info].as_ptr(),
// not used by Vulkan anymore
enabled_layer_count: 0,
pp_enabled_layer_names: ptr::null(),
// these are
enabled_extension_count: device_extensions_raw.len() as u32,
pp_enabled_extension_names: device_extensions_raw.as_ptr(),
p_enabled_features: &vk::PhysicalDeviceFeatures::builder().build(),
};
let device = unsafe {
instance
.create_device(physical_device, &device_create_info, None)
.expect("Couldn't create device")
};
// get queue (0 = take first queue)
let queue = unsafe { device.get_device_queue(queue_family_index, 0) };
// check device swapchain capabilties (not just that it has the extension,
// also formats and stuff like that)
// also returns what dimensions the swapchain should initially be created at
let starting_dims = check_device_swapchain_caps(&surface_loader, physical_device, surface);
// create swapchain
let sc_format = vk::SurfaceFormatKHR {
format: SWAPCHAIN_FORMAT,
color_space: vk::ColorSpaceKHR::default(),
};
let sc_present_mode = vk::PresentModeKHR::IMMEDIATE;
let swapchain_create_info = vk::SwapchainCreateInfoKHR {
s_type: vk::StructureType::SWAPCHAIN_CREATE_INFO_KHR,
p_next: ptr::null(),
flags: vk::SwapchainCreateFlagsKHR::empty(),
surface: surface,
min_image_count: 3,
image_format: SWAPCHAIN_FORMAT,
image_color_space: vk::ColorSpaceKHR::SRGB_NONLINEAR,
image_extent: starting_dims,
image_array_layers: 1,
image_usage: vk::ImageUsageFlags::COLOR_ATTACHMENT,
image_sharing_mode: vk::SharingMode::EXCLUSIVE,
queue_family_index_count: 0,
p_queue_family_indices: ptr::null(),
pre_transform: vk::SurfaceTransformFlagsKHR::IDENTITY,
composite_alpha: vk::CompositeAlphaFlagsKHR::OPAQUE,
present_mode: vk::PresentModeKHR::IMMEDIATE,
clipped: vk::TRUE,
old_swapchain: vk::SwapchainKHR::null(),
};
let swapchain_creator = Swapchain::new(&instance, &device);
let swapchain = unsafe { swapchain_creator.create_swapchain(&swapchain_create_info, None) }
.expect("Couldn't create swapchain");
let images = unsafe { swapchain_creator.get_swapchain_images(swapchain) }
.expect("Couldn't get swapchain images");
let image_views: Vec<_> = images
.iter()
.map(|image| {
let iv_info = vk::ImageViewCreateInfo {
s_type: vk::StructureType::IMAGE_VIEW_CREATE_INFO,
p_next: ptr::null(),
flags: vk::ImageViewCreateFlags::empty(),
image: *image,
view_type: vk::ImageViewType::TYPE_2D,
format: SWAPCHAIN_FORMAT,
components: vk::ComponentMapping {
r: vk::ComponentSwizzle::IDENTITY,
g: vk::ComponentSwizzle::IDENTITY,
b: vk::ComponentSwizzle::IDENTITY,
a: vk::ComponentSwizzle::IDENTITY,
},
subresource_range: vk::ImageSubresourceRange {
aspect_mask: vk::ImageAspectFlags::COLOR,
base_mip_level: 0,
level_count: 1,
base_array_layer: 0,
layer_count: 1,
},
};
})
.collect();
// shaders
let frag_code = read_shader_code(&relative_path("shaders/vt-3/triangle.frag.spv"));
let vert_code = read_shader_code(&relative_path("shaders/vt-3/triangle.vert.spv"));
let frag_module = create_shader_module(&device, frag_code);
let vert_module = create_shader_module(&device, vert_code);
let entry_point = CString::new("main").unwrap();
let vert_stage_info = vk::PipelineShaderStageCreateInfo {
s_type: vk::StructureType::PIPELINE_SHADER_STAGE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineShaderStageCreateFlags::empty(),
stage: vk::ShaderStageFlags::VERTEX,
module: vert_module,
p_name: entry_point.as_ptr(),
p_specialization_info: &vk::SpecializationInfo::default(),
};
let frag_stage_info = vk::PipelineShaderStageCreateInfo {
s_type: vk::StructureType::PIPELINE_SHADER_STAGE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineShaderStageCreateFlags::empty(),
stage: vk::ShaderStageFlags::FRAGMENT,
module: frag_module,
p_name: entry_point.as_ptr(),
p_specialization_info: &vk::SpecializationInfo::default(),
};
let shader_stages = [vert_stage_info, frag_stage_info];
// fixed-function pipeline settings
// a.k.a vertex format
// we don't really have a format since they are hard-coded into the vertex
// shader for now
let pipeline_vertex_input_info = vk::PipelineVertexInputStateCreateInfo::default();
let pipeline_input_assembly_info = vk::PipelineInputAssemblyStateCreateInfo {
s_type: vk::StructureType::PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineInputAssemblyStateCreateFlags::empty(),
topology: vk::PrimitiveTopology::TRIANGLE_LIST,
primitive_restart_enable: vk::FALSE,
};
let viewports = [vk::Viewport {
x: 0.0,
y: 0.0,
width: starting_dims.width as f32,
height: starting_dims.height as f32,
min_depth: 0.0,
max_depth: 1.0,
}];
let scissors = [vk::Rect2D {
offset: vk::Offset2D { x: 0, y: 0 },
extent: starting_dims,
}];
let viewport_state = vk::PipelineViewportStateCreateInfo {
s_type: vk::StructureType::PIPELINE_VIEWPORT_STATE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineViewportStateCreateFlags::empty(),
viewport_count: viewports.len() as u32,
p_viewports: viewports.as_ptr(),
scissor_count: scissors.len() as u32,
p_scissors: scissors.as_ptr(),
};
let pipeline_rasterization_info = vk::PipelineRasterizationStateCreateInfo {
s_type: vk::StructureType::PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineRasterizationStateCreateFlags::empty(),
depth_clamp_enable: vk::FALSE,
rasterizer_discard_enable: vk::FALSE,
polygon_mode: vk::PolygonMode::FILL,
cull_mode: vk::CullModeFlags::BACK,
front_face: vk::FrontFace::CLOCKWISE,
depth_bias_enable: vk::FALSE,
depth_bias_constant_factor: 0.0,
depth_bias_clamp: 0.0,
depth_bias_slope_factor: 0.0,
line_width: 1.0,
};
let pipeline_multisample_info = vk::PipelineMultisampleStateCreateInfo {
s_type: vk::StructureType::PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineMultisampleStateCreateFlags::empty(),
rasterization_samples: vk::SampleCountFlags::TYPE_1,
sample_shading_enable: vk::FALSE,
min_sample_shading: 1.0,
p_sample_mask: ptr::null(),
alpha_to_coverage_enable: vk::FALSE,
alpha_to_one_enable: vk::FALSE,
};
// color blending info per framebuffer
let pipeline_color_blend_attachment_infos = [vk::PipelineColorBlendAttachmentState {
blend_enable: vk::FALSE,
// not used because we disabled blending
src_color_blend_factor: vk::BlendFactor::ONE,
dst_color_blend_factor: vk::BlendFactor::ZERO,
color_blend_op: vk::BlendOp::ADD,
src_alpha_blend_factor: vk::BlendFactor::ONE,
dst_alpha_blend_factor: vk::BlendFactor::ZERO,
alpha_blend_op: vk::BlendOp::ADD,
// is used
color_write_mask: vk::ColorComponentFlags::R
& vk::ColorComponentFlags::G
& vk::ColorComponentFlags::G
& vk::ColorComponentFlags::B,
| flags: vk::DeviceQueueCreateFlags::empty(), | random_line_split |
vt-3.rs | })
.collect();
// shaders
let frag_code = read_shader_code(&relative_path("shaders/vt-3/triangle.frag.spv"));
let vert_code = read_shader_code(&relative_path("shaders/vt-3/triangle.vert.spv"));
let frag_module = create_shader_module(&device, frag_code);
let vert_module = create_shader_module(&device, vert_code);
let entry_point = CString::new("main").unwrap();
let vert_stage_info = vk::PipelineShaderStageCreateInfo {
s_type: vk::StructureType::PIPELINE_SHADER_STAGE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineShaderStageCreateFlags::empty(),
stage: vk::ShaderStageFlags::VERTEX,
module: vert_module,
p_name: entry_point.as_ptr(),
p_specialization_info: &vk::SpecializationInfo::default(),
};
let frag_stage_info = vk::PipelineShaderStageCreateInfo {
s_type: vk::StructureType::PIPELINE_SHADER_STAGE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineShaderStageCreateFlags::empty(),
stage: vk::ShaderStageFlags::FRAGMENT,
module: frag_module,
p_name: entry_point.as_ptr(),
p_specialization_info: &vk::SpecializationInfo::default(),
};
let shader_stages = [vert_stage_info, frag_stage_info];
// fixed-function pipeline settings
// a.k.a vertex format
// we don't really have a format since they are hard-coded into the vertex
// shader for now
let pipeline_vertex_input_info = vk::PipelineVertexInputStateCreateInfo::default();
let pipeline_input_assembly_info = vk::PipelineInputAssemblyStateCreateInfo {
s_type: vk::StructureType::PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineInputAssemblyStateCreateFlags::empty(),
topology: vk::PrimitiveTopology::TRIANGLE_LIST,
primitive_restart_enable: vk::FALSE,
};
let viewports = [vk::Viewport {
x: 0.0,
y: 0.0,
width: starting_dims.width as f32,
height: starting_dims.height as f32,
min_depth: 0.0,
max_depth: 1.0,
}];
let scissors = [vk::Rect2D {
offset: vk::Offset2D { x: 0, y: 0 },
extent: starting_dims,
}];
let viewport_state = vk::PipelineViewportStateCreateInfo {
s_type: vk::StructureType::PIPELINE_VIEWPORT_STATE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineViewportStateCreateFlags::empty(),
viewport_count: viewports.len() as u32,
p_viewports: viewports.as_ptr(),
scissor_count: scissors.len() as u32,
p_scissors: scissors.as_ptr(),
};
let pipeline_rasterization_info = vk::PipelineRasterizationStateCreateInfo {
s_type: vk::StructureType::PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineRasterizationStateCreateFlags::empty(),
depth_clamp_enable: vk::FALSE,
rasterizer_discard_enable: vk::FALSE,
polygon_mode: vk::PolygonMode::FILL,
cull_mode: vk::CullModeFlags::BACK,
front_face: vk::FrontFace::CLOCKWISE,
depth_bias_enable: vk::FALSE,
depth_bias_constant_factor: 0.0,
depth_bias_clamp: 0.0,
depth_bias_slope_factor: 0.0,
line_width: 1.0,
};
let pipeline_multisample_info = vk::PipelineMultisampleStateCreateInfo {
s_type: vk::StructureType::PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineMultisampleStateCreateFlags::empty(),
rasterization_samples: vk::SampleCountFlags::TYPE_1,
sample_shading_enable: vk::FALSE,
min_sample_shading: 1.0,
p_sample_mask: ptr::null(),
alpha_to_coverage_enable: vk::FALSE,
alpha_to_one_enable: vk::FALSE,
};
// color blending info per framebuffer
let pipeline_color_blend_attachment_infos = [vk::PipelineColorBlendAttachmentState {
blend_enable: vk::FALSE,
// not used because we disabled blending
src_color_blend_factor: vk::BlendFactor::ONE,
dst_color_blend_factor: vk::BlendFactor::ZERO,
color_blend_op: vk::BlendOp::ADD,
src_alpha_blend_factor: vk::BlendFactor::ONE,
dst_alpha_blend_factor: vk::BlendFactor::ZERO,
alpha_blend_op: vk::BlendOp::ADD,
// is used
color_write_mask: vk::ColorComponentFlags::R
& vk::ColorComponentFlags::G
& vk::ColorComponentFlags::G
& vk::ColorComponentFlags::B,
}];
// color blending settings for the whole pipleine
let pipeline_color_blend_info = vk::PipelineColorBlendStateCreateInfo {
s_type: vk::StructureType::PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineColorBlendStateCreateFlags::empty(),
logic_op_enable: vk::FALSE,
logic_op: vk::LogicOp::COPY, // optional
attachment_count: pipeline_color_blend_attachment_infos.len() as u32,
p_attachments: pipeline_color_blend_attachment_infos.as_ptr(),
blend_constants: [0.0, 0.0, 0.0, 0.0], // optional
};
// we don't use any shader uniforms so we leave it empty
let pipeline_layout_info = vk::PipelineLayoutCreateInfo {
s_type: vk::StructureType::PIPELINE_LAYOUT_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineLayoutCreateFlags::empty(),
set_layout_count: 0,
p_set_layouts: ptr::null(),
push_constant_range_count: 0,
p_push_constant_ranges: ptr::null(),
};
let pipeline_layout = unsafe {
device
.create_pipeline_layout(&pipeline_layout_info, None)
.expect("Couldn't create pipeline layout!")
};
// render pass
let render_pass = create_render_pass(&device);
// pipeline
let pipeline_infos = [vk::GraphicsPipelineCreateInfo {
s_type: vk::StructureType::GRAPHICS_PIPELINE_CREATE_INFO,
p_next: ptr::null(),
flags: vk::PipelineCreateFlags::empty(),
stage_count: shader_stages.len() as u32,
p_stages: shader_stages.as_ptr(),
p_vertex_input_state: &pipeline_vertex_input_info,
p_input_assembly_state: &pipeline_input_assembly_info,
p_tessellation_state: ptr::null(),
p_viewport_state: &viewport_state,
p_rasterization_state: &pipeline_rasterization_info,
p_multisample_state: &pipeline_multisample_info,
p_depth_stencil_state: ptr::null(),
p_color_blend_state: &pipeline_color_blend_info,
p_dynamic_state: ptr::null(),
layout: pipeline_layout,
render_pass,
subpass: 0,
base_pipeline_handle: vk::Pipeline::null(),
base_pipeline_index: 0,
}];
let pipeline = unsafe {
device.create_graphics_pipelines(vk::PipelineCache::null(), &pipeline_infos, None)
}
.expect("Couldn't create graphics pipeline")[0];
dbg![pipeline];
// shader modules only need to live long enough to create the pipeline
unsafe {
device.destroy_shader_module(frag_module, None);
device.destroy_shader_module(vert_module, None);
}
loop {
let mut exit = false;
events_loop.poll_events(|ev| match ev {
Event::WindowEvent {
event: WindowEvent::CloseRequested,
..
} => exit = true,
_ => {}
});
if exit {
break;
}
}
// destroy objects
unsafe {
device.destroy_pipeline(pipeline, None);
device.destroy_pipeline_layout(pipeline_layout, None);
swapchain_creator.destroy_swapchain(swapchain, None);
device.destroy_render_pass(render_pass, None);
device.destroy_device(None);
surface_loader.destroy_surface(surface, None);
debug_utils_loader.destroy_debug_utils_messenger(debug_utils_messenger, None);
instance.destroy_instance(None);
}
}
fn create_shader_module<D: DeviceV1_0>(device: &D, code: Vec<u8>) -> vk::ShaderModule {
use ash::util::read_spv;
use std::io::Cursor;
let readable_code = read_spv(&mut Cursor::new(&code)).expect("Couldn't read SPV");
let shader_module_create_info = vk::ShaderModuleCreateInfo::builder().code(&readable_code);
unsafe {
device
.create_shader_module(&shader_module_create_info, None)
.expect("Couldn't create shader module")
}
}
fn extension_names() -> Vec<*const i8> {
// these are instance extensions
vec![
Surface::name().as_ptr(),
XlibSurface::name().as_ptr(),
DebugUtils::name().as_ptr(),
]
}
unsafe extern "system" fn | vulkan_debug_utils_callback | identifier_name |
|
hydrophoneApi.go | confirm/send/team/invite
send.Handle("/team/invite", varsHandler(a.SendTeamInvite)).Methods("POST")
// POST /confirm/send/team/monitoring/{teamid}/{userid}
send.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.SendMonitoringTeamInvite)).Methods("POST")
// POST /confirm/send/team/role/:userid - add or remove admin role to userid
send.Handle("/team/role/{userid}", varsHandler(a.UpdateTeamRole)).Methods("PUT")
// DELETE /confirm/send/team/leave/:teamid/:userid - delete member
send.Handle("/team/leave/{teamid}/{userid}", varsHandler(a.DeleteTeamMember)).Methods("DELETE")
// POST /confirm/send/inform/:userid
send.Handle("/inform/{userid}", varsHandler(a.sendSignUpInformation)).Methods("POST")
send.Handle("/pin-reset/{userid}", varsHandler(a.SendPinReset)).Methods("POST")
// PUT /confirm/accept/forgot/
// PUT /confirm/accept/invite/:userid/:invited_by
accept := rtr.PathPrefix("/accept").Subrouter()
accept.Handle("/forgot", varsHandler(a.acceptPassword)).Methods("PUT")
accept.Handle("/invite/{userid}/{invitedby}", varsHandler(a.AcceptInvite)).Methods("PUT")
// PUT /confirm/accept/team/invite
accept.Handle("/team/invite", varsHandler(a.AcceptTeamNotifs)).Methods("PUT")
// PUT /confirm/accept/team/monitoring/{teamid}/{userid}
accept.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.AcceptMonitoringInvite)).Methods("PUT")
// GET /confirm/invite/:userid
rtr.Handle("/invite/{userid}", varsHandler(a.GetSentInvitations)).Methods("GET")
// GET /confirm/invitations/:userid
rtr.Handle("/invitations/{userid}", varsHandler(a.GetReceivedInvitations)).Methods("GET")
// GET /confirm/teams/:teamId/patients/:patientId/invite
rtr.Handle("/teams/{teamId}/patients/{patientId}/invite", varsHandler(a.GetPatientTeamPendingInvite)).Methods("GET")
// PUT /confirm/dismiss/invite/:userid/:invited_by
dismiss := rtr.PathPrefix("/dismiss").Subrouter()
dismiss.Handle("/invite/{userid}/{invitedby}",
varsHandler(a.DismissInvite)).Methods("PUT")
// PUT /confirm/dismiss/team/invite/{teamid}
dismiss.Handle("/team/invite/{teamid}", varsHandler(a.DismissTeamInvite)).Methods("PUT")
// PUT /confirm/dismiss/team/monitoring/{teamid}/{userid}
dismiss.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.DismissMonitoringInvite)).Methods("PUT")
rtr.Handle("/cancel/invite", varsHandler(a.CancelAnyInvite)).Methods("POST")
if a.Config.EnableTestRoutes {
rtr.Handle("/cancel/all/{email}", varsHandler(a.CancelAllInvites)).Methods("POST")
}
// PUT /confirm/:userid/invited/:invited_address
rtr.Handle("/{userid}/invited/{invited_address}", varsHandler(a.CancelInvite)).Methods("PUT")
// POST /confirm/notifications/:topic_label
rtr.Handle("/notifications/{topic}", varsHandler(a.CreateNotification)).Methods("POST")
rtr.Use(muxMiddleware.NativeTraceSessionMiddleware)
}
func (h varsHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {
vars := mux.Vars(req)
h(res, req, vars)
}
func getSessionToken(req *http.Request) string {
// time:= c.Params.ByName("time")
sessionToken := req.Header.Get(token.TP_SESSION_TOKEN)
if sessionToken != "" {
return sessionToken
}
sessionToken = strings.Trim(req.Header.Get("Authorization"), " ")
if sessionToken != "" && strings.HasPrefix(sessionToken, "Bearer ") {
tokenParts := strings.Split(sessionToken, " ")
sessionToken = tokenParts[1]
}
return sessionToken
}
// @Summary Get the api status
// @Description Get the api status
// @ID hydrophone-api-getstatus
// @Accept json
// @Produce json
// @Success 200 {string} string "OK"
// @Failure 500 {string} string "error description"
// @Router /status [get]
func (a *Api) GetStatus(res http.ResponseWriter, req *http.Request) {
var s status.ApiStatus
if err := a.Store.Ping(); err != nil {
log.Printf("Error getting status [%v]", err)
s = status.NewApiStatus(http.StatusInternalServerError, err.Error())
} else {
s = status.NewApiStatus(http.StatusOK, "OK")
}
a.sendModelAsResWithStatus(res, s, s.Status.Code)
return
}
//Save this confirmation or
//write an error if it all goes wrong
func (a *Api) addOrUpdateConfirmation(ctx context.Context, conf *models.Confirmation, res http.ResponseWriter) bool {
if err := a.Store.UpsertConfirmation(ctx, conf); err != nil {
log.Printf("Error saving the confirmation [%v]", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_SAVING_CONFIRMATION)}
a.sendModelAsResWithStatus(res, statusErr, http.StatusInternalServerError)
return false
}
return true
}
//Find this confirmation
//write error if it fails
func (a *Api) findExistingConfirmation(ctx context.Context, conf *models.Confirmation, res http.ResponseWriter) (*models.Confirmation, error) {
if found, err := a.Store.FindConfirmation(ctx, conf); err != nil {
log.Printf("findExistingConfirmation: [%v]", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_FINDING_CONFIRMATION)}
return nil, statusErr
} else {
return found, nil
}
}
//Find this confirmation
//write error if it fails
func (a *Api) addProfile(ctx context.Context, conf *models.Confirmation) error {
if conf.CreatorId != "" {
doc, err := a.seagull.GetCollections(ctx, conf.CreatorId, []string{"profile"}, a.sl.TokenProvide())
if err != nil {
log.Printf("error getting the creators profile [%v] ", err)
return err
}
if doc.Profile == nil {
err := errors.New("profile is empty")
log.Printf("error getting the creators profile [%v] ", err)
return err
}
conf.Creator.Profile = &models.Profile{FullName: doc.Profile.FullName}
conf.Creator.UserId = conf.CreatorId
}
return nil
}
func (a *Api) getUserLanguage(userid string, req *http.Request, res http.ResponseWriter) string {
// let's get the invitee user preferences
if seagulDoc, err := a.seagull.GetCollections(req.Context(), userid, []string{"preferences"}, a.sl.TokenProvide()); err != nil {
a.logger.Errorf("Preferences not availlable for user %s. Email will be sent using default language. Error: [%s]", userid, err)
} else if seagulDoc.Preferences != nil && seagulDoc.Preferences.DisplayLanguageCode != "" {
return seagulDoc.Preferences.DisplayLanguageCode
}
return GetUserChosenLanguage(req)
}
//Find these confirmations
//write error if fails or write no-content if it doesn't exist
func (a *Api) checkFoundConfirmations(ctx context.Context, res http.ResponseWriter, results []*models.Confirmation, err error) []*models.Confirmation {
if err != nil {
log.Println("Error finding confirmations ", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_FINDING_CONFIRMATION)}
a.sendModelAsResWithStatus(res, statusErr, http.StatusInternalServerError)
return nil
} else if results == nil || len(results) == 0 {
statusErr := &status.StatusError{status.NewStatus(http.StatusNotFound, STATUS_NOT_FOUND)}
//log.Println("No confirmations were found ", statusErr.Error())
a.sendModelAsResWithStatus(res, statusErr, http.StatusNotFound)
return nil
} else {
for i := range results {
if err = a.addProfile(ctx, results[i]); err != nil {
//report and move on
log.Println("Error getting profile", err.Error())
}
}
return results
}
}
//Generate a notification from the given confirmation,write the error if it fails
func (a *Api) createAndSendNotification(req *http.Request, conf *models.Confirmation, content map[string]string, lang string) bool | {
log.Printf("trying notification with template '%s' to %s with language '%s'", conf.TemplateName, conf.Email, lang)
// Get the template name based on the requested communication type
templateName := conf.TemplateName
if templateName == models.TemplateNameUndefined {
switch conf.Type {
case models.TypePasswordReset:
templateName = models.TemplateNamePasswordReset
case models.TypePatientPasswordReset:
templateName = models.TemplateNamePatientPasswordReset
case models.TypePatientPasswordInfo:
templateName = models.TemplateNamePatientPasswordInfo
case models.TypeCareteamInvite:
templateName = models.TemplateNameCareteamInvite
case models.TypeMedicalTeamInvite:
templateName = models.TemplateNameMedicalteamInvite
case models.TypeMedicalTeamPatientInvite:
templateName = models.TemplateNameMedicalteamPatientInvite
case models.TypeMedicalTeamMonitoringInvite: | identifier_body |
|
hydrophoneApi.go | sl,
perms: perms,
auth: auth,
seagull: seagull,
medicalData: medicalData,
templates: templates,
LanguageBundle: nil,
logger: logger,
}
}
func (a *Api) getWebURL(req *http.Request) string {
if a.Config.WebURL == "" {
host := req.Header.Get("Host")
return a.Config.Protocol + "://" + host
}
return a.Config.WebURL
}
func (a *Api) SetHandlers(prefix string, rtr *mux.Router) {
rtr.HandleFunc("/status", a.GetStatus).Methods("GET")
rtr.Handle("/sanity_check/{userid}", varsHandler(a.sendSanityCheckEmail)).Methods("POST")
// POST /confirm/send/forgot/:useremail
// POST /confirm/send/invite/:userid
send := rtr.PathPrefix("/send").Subrouter()
send.Handle("/forgot/{useremail}", varsHandler(a.passwordReset)).Methods("POST")
send.Handle("/invite/{userid}", varsHandler(a.SendInvite)).Methods("POST")
// POST /confirm/send/team/invite
send.Handle("/team/invite", varsHandler(a.SendTeamInvite)).Methods("POST")
// POST /confirm/send/team/monitoring/{teamid}/{userid}
send.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.SendMonitoringTeamInvite)).Methods("POST")
// POST /confirm/send/team/role/:userid - add or remove admin role to userid
send.Handle("/team/role/{userid}", varsHandler(a.UpdateTeamRole)).Methods("PUT")
// DELETE /confirm/send/team/leave/:teamid/:userid - delete member
send.Handle("/team/leave/{teamid}/{userid}", varsHandler(a.DeleteTeamMember)).Methods("DELETE")
// POST /confirm/send/inform/:userid
send.Handle("/inform/{userid}", varsHandler(a.sendSignUpInformation)).Methods("POST")
send.Handle("/pin-reset/{userid}", varsHandler(a.SendPinReset)).Methods("POST")
// PUT /confirm/accept/forgot/
// PUT /confirm/accept/invite/:userid/:invited_by
accept := rtr.PathPrefix("/accept").Subrouter()
accept.Handle("/forgot", varsHandler(a.acceptPassword)).Methods("PUT")
accept.Handle("/invite/{userid}/{invitedby}", varsHandler(a.AcceptInvite)).Methods("PUT")
// PUT /confirm/accept/team/invite
accept.Handle("/team/invite", varsHandler(a.AcceptTeamNotifs)).Methods("PUT")
// PUT /confirm/accept/team/monitoring/{teamid}/{userid}
accept.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.AcceptMonitoringInvite)).Methods("PUT")
// GET /confirm/invite/:userid
rtr.Handle("/invite/{userid}", varsHandler(a.GetSentInvitations)).Methods("GET")
// GET /confirm/invitations/:userid
rtr.Handle("/invitations/{userid}", varsHandler(a.GetReceivedInvitations)).Methods("GET")
// GET /confirm/teams/:teamId/patients/:patientId/invite
rtr.Handle("/teams/{teamId}/patients/{patientId}/invite", varsHandler(a.GetPatientTeamPendingInvite)).Methods("GET")
// PUT /confirm/dismiss/invite/:userid/:invited_by
dismiss := rtr.PathPrefix("/dismiss").Subrouter()
dismiss.Handle("/invite/{userid}/{invitedby}",
varsHandler(a.DismissInvite)).Methods("PUT")
// PUT /confirm/dismiss/team/invite/{teamid}
dismiss.Handle("/team/invite/{teamid}", varsHandler(a.DismissTeamInvite)).Methods("PUT")
// PUT /confirm/dismiss/team/monitoring/{teamid}/{userid}
dismiss.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.DismissMonitoringInvite)).Methods("PUT")
rtr.Handle("/cancel/invite", varsHandler(a.CancelAnyInvite)).Methods("POST")
if a.Config.EnableTestRoutes {
rtr.Handle("/cancel/all/{email}", varsHandler(a.CancelAllInvites)).Methods("POST")
}
// PUT /confirm/:userid/invited/:invited_address
rtr.Handle("/{userid}/invited/{invited_address}", varsHandler(a.CancelInvite)).Methods("PUT")
// POST /confirm/notifications/:topic_label
rtr.Handle("/notifications/{topic}", varsHandler(a.CreateNotification)).Methods("POST")
rtr.Use(muxMiddleware.NativeTraceSessionMiddleware)
}
func (h varsHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {
vars := mux.Vars(req)
h(res, req, vars)
}
func getSessionToken(req *http.Request) string {
// time:= c.Params.ByName("time")
sessionToken := req.Header.Get(token.TP_SESSION_TOKEN)
if sessionToken != "" {
return sessionToken
}
sessionToken = strings.Trim(req.Header.Get("Authorization"), " ")
if sessionToken != "" && strings.HasPrefix(sessionToken, "Bearer ") {
tokenParts := strings.Split(sessionToken, " ")
sessionToken = tokenParts[1]
}
return sessionToken
}
// @Summary Get the api status
// @Description Get the api status
// @ID hydrophone-api-getstatus
// @Accept json
// @Produce json
// @Success 200 {string} string "OK"
// @Failure 500 {string} string "error description"
// @Router /status [get]
func (a *Api) GetStatus(res http.ResponseWriter, req *http.Request) {
var s status.ApiStatus
if err := a.Store.Ping(); err != nil {
log.Printf("Error getting status [%v]", err)
s = status.NewApiStatus(http.StatusInternalServerError, err.Error())
} else {
s = status.NewApiStatus(http.StatusOK, "OK")
}
a.sendModelAsResWithStatus(res, s, s.Status.Code)
return
}
//Save this confirmation or
//write an error if it all goes wrong
func (a *Api) addOrUpdateConfirmation(ctx context.Context, conf *models.Confirmation, res http.ResponseWriter) bool {
if err := a.Store.UpsertConfirmation(ctx, conf); err != nil {
log.Printf("Error saving the confirmation [%v]", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_SAVING_CONFIRMATION)}
a.sendModelAsResWithStatus(res, statusErr, http.StatusInternalServerError)
return false
}
return true
}
//Find this confirmation
//write error if it fails
func (a *Api) findExistingConfirmation(ctx context.Context, conf *models.Confirmation, res http.ResponseWriter) (*models.Confirmation, error) {
if found, err := a.Store.FindConfirmation(ctx, conf); err != nil {
log.Printf("findExistingConfirmation: [%v]", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_FINDING_CONFIRMATION)}
return nil, statusErr
} else {
return found, nil
}
}
//Find this confirmation
//write error if it fails
func (a *Api) addProfile(ctx context.Context, conf *models.Confirmation) error {
if conf.CreatorId != "" {
doc, err := a.seagull.GetCollections(ctx, conf.CreatorId, []string{"profile"}, a.sl.TokenProvide())
if err != nil {
log.Printf("error getting the creators profile [%v] ", err)
return err
}
if doc.Profile == nil {
err := errors.New("profile is empty")
log.Printf("error getting the creators profile [%v] ", err)
return err
}
conf.Creator.Profile = &models.Profile{FullName: doc.Profile.FullName}
conf.Creator.UserId = conf.CreatorId
}
return nil
}
func (a *Api) getUserLanguage(userid string, req *http.Request, res http.ResponseWriter) string {
// let's get the invitee user preferences
if seagulDoc, err := a.seagull.GetCollections(req.Context(), userid, []string{"preferences"}, a.sl.TokenProvide()); err != nil {
a.logger.Errorf("Preferences not availlable for user %s. Email will be sent using default language. Error: [%s]", userid, err)
} else if seagulDoc.Preferences != nil && seagulDoc.Preferences.DisplayLanguageCode != "" {
return seagulDoc.Preferences.DisplayLanguageCode
}
return GetUserChosenLanguage(req)
}
//Find these confirmations
//write error if fails or write no-content if it doesn't exist
func (a *Api) checkFoundConfirmations(ctx context.Context, res http.ResponseWriter, results []*models.Confirmation, err error) []*models.Confirmation {
if err != nil {
log.Println("Error finding confirmations ", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_FINDING_CONFIRMATION)}
a.sendModelAsResWithStatus(res, statusErr, http.StatusInternalServerError)
return nil
} else if results == nil || len(results) == 0 {
statusErr := &status.StatusError{status.NewStatus(http.StatusNotFound, STATUS_NOT_FOUND)}
//log.Println("No confirmations were found ", statusErr.Error())
a.sendModelAsResWithStatus(res, statusErr, http.StatusNotFound)
return nil
} else {
for i := range results | {
if err = a.addProfile(ctx, results[i]); err != nil {
//report and move on
log.Println("Error getting profile", err.Error())
}
} | conditional_block |
|
hydrophoneApi.go | func (a *Api) SetHandlers(prefix string, rtr *mux.Router) {
rtr.HandleFunc("/status", a.GetStatus).Methods("GET")
rtr.Handle("/sanity_check/{userid}", varsHandler(a.sendSanityCheckEmail)).Methods("POST")
// POST /confirm/send/forgot/:useremail
// POST /confirm/send/invite/:userid
send := rtr.PathPrefix("/send").Subrouter()
send.Handle("/forgot/{useremail}", varsHandler(a.passwordReset)).Methods("POST")
send.Handle("/invite/{userid}", varsHandler(a.SendInvite)).Methods("POST")
// POST /confirm/send/team/invite
send.Handle("/team/invite", varsHandler(a.SendTeamInvite)).Methods("POST")
// POST /confirm/send/team/monitoring/{teamid}/{userid}
send.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.SendMonitoringTeamInvite)).Methods("POST")
// POST /confirm/send/team/role/:userid - add or remove admin role to userid
send.Handle("/team/role/{userid}", varsHandler(a.UpdateTeamRole)).Methods("PUT")
// DELETE /confirm/send/team/leave/:teamid/:userid - delete member
send.Handle("/team/leave/{teamid}/{userid}", varsHandler(a.DeleteTeamMember)).Methods("DELETE")
// POST /confirm/send/inform/:userid
send.Handle("/inform/{userid}", varsHandler(a.sendSignUpInformation)).Methods("POST")
send.Handle("/pin-reset/{userid}", varsHandler(a.SendPinReset)).Methods("POST")
// PUT /confirm/accept/forgot/
// PUT /confirm/accept/invite/:userid/:invited_by
accept := rtr.PathPrefix("/accept").Subrouter()
accept.Handle("/forgot", varsHandler(a.acceptPassword)).Methods("PUT")
accept.Handle("/invite/{userid}/{invitedby}", varsHandler(a.AcceptInvite)).Methods("PUT")
// PUT /confirm/accept/team/invite
accept.Handle("/team/invite", varsHandler(a.AcceptTeamNotifs)).Methods("PUT")
// PUT /confirm/accept/team/monitoring/{teamid}/{userid}
accept.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.AcceptMonitoringInvite)).Methods("PUT")
// GET /confirm/invite/:userid
rtr.Handle("/invite/{userid}", varsHandler(a.GetSentInvitations)).Methods("GET")
// GET /confirm/invitations/:userid
rtr.Handle("/invitations/{userid}", varsHandler(a.GetReceivedInvitations)).Methods("GET")
// GET /confirm/teams/:teamId/patients/:patientId/invite
rtr.Handle("/teams/{teamId}/patients/{patientId}/invite", varsHandler(a.GetPatientTeamPendingInvite)).Methods("GET")
// PUT /confirm/dismiss/invite/:userid/:invited_by
dismiss := rtr.PathPrefix("/dismiss").Subrouter()
dismiss.Handle("/invite/{userid}/{invitedby}",
varsHandler(a.DismissInvite)).Methods("PUT")
// PUT /confirm/dismiss/team/invite/{teamid}
dismiss.Handle("/team/invite/{teamid}", varsHandler(a.DismissTeamInvite)).Methods("PUT")
// PUT /confirm/dismiss/team/monitoring/{teamid}/{userid}
dismiss.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.DismissMonitoringInvite)).Methods("PUT")
rtr.Handle("/cancel/invite", varsHandler(a.CancelAnyInvite)).Methods("POST")
if a.Config.EnableTestRoutes {
rtr.Handle("/cancel/all/{email}", varsHandler(a.CancelAllInvites)).Methods("POST")
}
// PUT /confirm/:userid/invited/:invited_address
rtr.Handle("/{userid}/invited/{invited_address}", varsHandler(a.CancelInvite)).Methods("PUT")
// POST /confirm/notifications/:topic_label
rtr.Handle("/notifications/{topic}", varsHandler(a.CreateNotification)).Methods("POST")
rtr.Use(muxMiddleware.NativeTraceSessionMiddleware)
}
func (h varsHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {
vars := mux.Vars(req)
h(res, req, vars)
}
func getSessionToken(req *http.Request) string {
// time:= c.Params.ByName("time")
sessionToken := req.Header.Get(token.TP_SESSION_TOKEN)
if sessionToken != "" {
return sessionToken
}
sessionToken = strings.Trim(req.Header.Get("Authorization"), " ")
if sessionToken != "" && strings.HasPrefix(sessionToken, "Bearer ") {
tokenParts := strings.Split(sessionToken, " ")
sessionToken = tokenParts[1]
}
return sessionToken
}
// @Summary Get the api status
// @Description Get the api status
// @ID hydrophone-api-getstatus
// @Accept json
// @Produce json
// @Success 200 {string} string "OK"
// @Failure 500 {string} string "error description"
// @Router /status [get]
func (a *Api) GetStatus(res http.ResponseWriter, req *http.Request) {
var s status.ApiStatus
if err := a.Store.Ping(); err != nil {
log.Printf("Error getting status [%v]", err)
s = status.NewApiStatus(http.StatusInternalServerError, err.Error())
} else {
s = status.NewApiStatus(http.StatusOK, "OK")
}
a.sendModelAsResWithStatus(res, s, s.Status.Code)
return
}
//Save this confirmation or
//write an error if it all goes wrong
func (a *Api) addOrUpdateConfirmation(ctx context.Context, conf *models.Confirmation, res http.ResponseWriter) bool {
if err := a.Store.UpsertConfirmation(ctx, conf); err != nil {
log.Printf("Error saving the confirmation [%v]", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_SAVING_CONFIRMATION)}
a.sendModelAsResWithStatus(res, statusErr, http.StatusInternalServerError)
return false
}
return true
}
//Find this confirmation
//write error if it fails
func (a *Api) findExistingConfirmation(ctx context.Context, conf *models.Confirmation, res http.ResponseWriter) (*models.Confirmation, error) {
if found, err := a.Store.FindConfirmation(ctx, conf); err != nil {
log.Printf("findExistingConfirmation: [%v]", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_FINDING_CONFIRMATION)}
return nil, statusErr
} else {
return found, nil
}
}
//Find this confirmation
//write error if it fails
func (a *Api) addProfile(ctx context.Context, conf *models.Confirmation) error {
if conf.CreatorId != "" {
doc, err := a.seagull.GetCollections(ctx, conf.CreatorId, []string{"profile"}, a.sl.TokenProvide())
if err != nil {
log.Printf("error getting the creators profile [%v] ", err)
return err
}
if doc.Profile == nil {
err := errors.New("profile is empty")
log.Printf("error getting the creators profile [%v] ", err)
return err
}
conf.Creator.Profile = &models.Profile{FullName: doc.Profile.FullName}
conf.Creator.UserId = conf.CreatorId
}
return nil
}
func (a *Api) getUserLanguage(userid string, req *http.Request, res http.ResponseWriter) string {
// let's get the invitee user preferences
if seagulDoc, err := a.seagull.GetCollections(req.Context(), userid, []string{"preferences"}, a.sl.TokenProvide()); err != nil {
a.logger.Errorf("Preferences not availlable for user %s. Email will be sent using default language. Error: [%s]", userid, err)
} else if seagulDoc.Preferences != nil && seagulDoc.Preferences.DisplayLanguageCode != "" {
return seagulDoc.Preferences.DisplayLanguageCode
}
return GetUserChosenLanguage(req)
}
//Find these confirmations
//write error if fails or write no-content if it doesn't exist
func (a *Api) checkFoundConfirmations(ctx context.Context, res http.ResponseWriter, results []*models.Confirmation, err error) []*models.Confirmation {
if err != nil {
log.Println("Error finding confirmations ", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_FINDING_CONFIRMATION)}
a.sendModelAsResWithStatus(res, statusErr, http.StatusInternalServerError)
return nil
} else if results == nil || len(results) == 0 {
statusErr := &status.StatusError{status.NewStatus(http.StatusNotFound, STATUS_NOT_FOUND)}
//log.Println("No confirmations were found ", statusErr.Error())
a.sendModelAsResWithStatus(res, statusErr, http.StatusNotFound)
return nil
} else {
for i := range results {
if err = a.addProfile(ctx, results[i]); err != nil {
//report and move on
log.Println("Error getting profile", err.Error())
}
}
return results
}
}
//Generate a notification from the given confirmation,write the error if it fails
func (a *Api) createAndSendNotification(req *http.Request, conf *models.Confirmation, content map[string]string, lang string) bool { | log.Printf("trying notification with template '%s' to %s with language '%s'", conf.TemplateName, conf.Email, lang)
// Get the template name based on the requested communication type
templateName := conf.TemplateName
if templateName == models.TemplateNameUndefined { | random_line_split |
|
hydrophoneApi.go | ,
templates: templates,
LanguageBundle: nil,
logger: logger,
}
}
func (a *Api) getWebURL(req *http.Request) string {
if a.Config.WebURL == "" {
host := req.Header.Get("Host")
return a.Config.Protocol + "://" + host
}
return a.Config.WebURL
}
func (a *Api) SetHandlers(prefix string, rtr *mux.Router) {
rtr.HandleFunc("/status", a.GetStatus).Methods("GET")
rtr.Handle("/sanity_check/{userid}", varsHandler(a.sendSanityCheckEmail)).Methods("POST")
// POST /confirm/send/forgot/:useremail
// POST /confirm/send/invite/:userid
send := rtr.PathPrefix("/send").Subrouter()
send.Handle("/forgot/{useremail}", varsHandler(a.passwordReset)).Methods("POST")
send.Handle("/invite/{userid}", varsHandler(a.SendInvite)).Methods("POST")
// POST /confirm/send/team/invite
send.Handle("/team/invite", varsHandler(a.SendTeamInvite)).Methods("POST")
// POST /confirm/send/team/monitoring/{teamid}/{userid}
send.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.SendMonitoringTeamInvite)).Methods("POST")
// POST /confirm/send/team/role/:userid - add or remove admin role to userid
send.Handle("/team/role/{userid}", varsHandler(a.UpdateTeamRole)).Methods("PUT")
// DELETE /confirm/send/team/leave/:teamid/:userid - delete member
send.Handle("/team/leave/{teamid}/{userid}", varsHandler(a.DeleteTeamMember)).Methods("DELETE")
// POST /confirm/send/inform/:userid
send.Handle("/inform/{userid}", varsHandler(a.sendSignUpInformation)).Methods("POST")
send.Handle("/pin-reset/{userid}", varsHandler(a.SendPinReset)).Methods("POST")
// PUT /confirm/accept/forgot/
// PUT /confirm/accept/invite/:userid/:invited_by
accept := rtr.PathPrefix("/accept").Subrouter()
accept.Handle("/forgot", varsHandler(a.acceptPassword)).Methods("PUT")
accept.Handle("/invite/{userid}/{invitedby}", varsHandler(a.AcceptInvite)).Methods("PUT")
// PUT /confirm/accept/team/invite
accept.Handle("/team/invite", varsHandler(a.AcceptTeamNotifs)).Methods("PUT")
// PUT /confirm/accept/team/monitoring/{teamid}/{userid}
accept.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.AcceptMonitoringInvite)).Methods("PUT")
// GET /confirm/invite/:userid
rtr.Handle("/invite/{userid}", varsHandler(a.GetSentInvitations)).Methods("GET")
// GET /confirm/invitations/:userid
rtr.Handle("/invitations/{userid}", varsHandler(a.GetReceivedInvitations)).Methods("GET")
// GET /confirm/teams/:teamId/patients/:patientId/invite
rtr.Handle("/teams/{teamId}/patients/{patientId}/invite", varsHandler(a.GetPatientTeamPendingInvite)).Methods("GET")
// PUT /confirm/dismiss/invite/:userid/:invited_by
dismiss := rtr.PathPrefix("/dismiss").Subrouter()
dismiss.Handle("/invite/{userid}/{invitedby}",
varsHandler(a.DismissInvite)).Methods("PUT")
// PUT /confirm/dismiss/team/invite/{teamid}
dismiss.Handle("/team/invite/{teamid}", varsHandler(a.DismissTeamInvite)).Methods("PUT")
// PUT /confirm/dismiss/team/monitoring/{teamid}/{userid}
dismiss.Handle("/team/monitoring/{teamid}/{userid}", varsHandler(a.DismissMonitoringInvite)).Methods("PUT")
rtr.Handle("/cancel/invite", varsHandler(a.CancelAnyInvite)).Methods("POST")
if a.Config.EnableTestRoutes {
rtr.Handle("/cancel/all/{email}", varsHandler(a.CancelAllInvites)).Methods("POST")
}
// PUT /confirm/:userid/invited/:invited_address
rtr.Handle("/{userid}/invited/{invited_address}", varsHandler(a.CancelInvite)).Methods("PUT")
// POST /confirm/notifications/:topic_label
rtr.Handle("/notifications/{topic}", varsHandler(a.CreateNotification)).Methods("POST")
rtr.Use(muxMiddleware.NativeTraceSessionMiddleware)
}
func (h varsHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {
vars := mux.Vars(req)
h(res, req, vars)
}
func getSessionToken(req *http.Request) string {
// time:= c.Params.ByName("time")
sessionToken := req.Header.Get(token.TP_SESSION_TOKEN)
if sessionToken != "" {
return sessionToken
}
sessionToken = strings.Trim(req.Header.Get("Authorization"), " ")
if sessionToken != "" && strings.HasPrefix(sessionToken, "Bearer ") {
tokenParts := strings.Split(sessionToken, " ")
sessionToken = tokenParts[1]
}
return sessionToken
}
// @Summary Get the api status
// @Description Get the api status
// @ID hydrophone-api-getstatus
// @Accept json
// @Produce json
// @Success 200 {string} string "OK"
// @Failure 500 {string} string "error description"
// @Router /status [get]
func (a *Api) GetStatus(res http.ResponseWriter, req *http.Request) {
var s status.ApiStatus
if err := a.Store.Ping(); err != nil {
log.Printf("Error getting status [%v]", err)
s = status.NewApiStatus(http.StatusInternalServerError, err.Error())
} else {
s = status.NewApiStatus(http.StatusOK, "OK")
}
a.sendModelAsResWithStatus(res, s, s.Status.Code)
return
}
//Save this confirmation or
//write an error if it all goes wrong
func (a *Api) addOrUpdateConfirmation(ctx context.Context, conf *models.Confirmation, res http.ResponseWriter) bool {
if err := a.Store.UpsertConfirmation(ctx, conf); err != nil {
log.Printf("Error saving the confirmation [%v]", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_SAVING_CONFIRMATION)}
a.sendModelAsResWithStatus(res, statusErr, http.StatusInternalServerError)
return false
}
return true
}
//Find this confirmation
//write error if it fails
func (a *Api) findExistingConfirmation(ctx context.Context, conf *models.Confirmation, res http.ResponseWriter) (*models.Confirmation, error) {
if found, err := a.Store.FindConfirmation(ctx, conf); err != nil {
log.Printf("findExistingConfirmation: [%v]", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_FINDING_CONFIRMATION)}
return nil, statusErr
} else {
return found, nil
}
}
//Find this confirmation
//write error if it fails
func (a *Api) addProfile(ctx context.Context, conf *models.Confirmation) error {
if conf.CreatorId != "" {
doc, err := a.seagull.GetCollections(ctx, conf.CreatorId, []string{"profile"}, a.sl.TokenProvide())
if err != nil {
log.Printf("error getting the creators profile [%v] ", err)
return err
}
if doc.Profile == nil {
err := errors.New("profile is empty")
log.Printf("error getting the creators profile [%v] ", err)
return err
}
conf.Creator.Profile = &models.Profile{FullName: doc.Profile.FullName}
conf.Creator.UserId = conf.CreatorId
}
return nil
}
func (a *Api) getUserLanguage(userid string, req *http.Request, res http.ResponseWriter) string {
// let's get the invitee user preferences
if seagulDoc, err := a.seagull.GetCollections(req.Context(), userid, []string{"preferences"}, a.sl.TokenProvide()); err != nil {
a.logger.Errorf("Preferences not availlable for user %s. Email will be sent using default language. Error: [%s]", userid, err)
} else if seagulDoc.Preferences != nil && seagulDoc.Preferences.DisplayLanguageCode != "" {
return seagulDoc.Preferences.DisplayLanguageCode
}
return GetUserChosenLanguage(req)
}
//Find these confirmations
//write error if fails or write no-content if it doesn't exist
func (a *Api) checkFoundConfirmations(ctx context.Context, res http.ResponseWriter, results []*models.Confirmation, err error) []*models.Confirmation {
if err != nil {
log.Println("Error finding confirmations ", err)
statusErr := &status.StatusError{status.NewStatus(http.StatusInternalServerError, STATUS_ERR_FINDING_CONFIRMATION)}
a.sendModelAsResWithStatus(res, statusErr, http.StatusInternalServerError)
return nil
} else if results == nil || len(results) == 0 {
statusErr := &status.StatusError{status.NewStatus(http.StatusNotFound, STATUS_NOT_FOUND)}
//log.Println("No confirmations were found ", statusErr.Error())
a.sendModelAsResWithStatus(res, statusErr, http.StatusNotFound)
return nil
} else {
for i := range results {
if err = a.addProfile(ctx, results[i]); err != nil {
//report and move on
log.Println("Error getting profile", err.Error())
}
}
return results
}
}
//Generate a notification from the given confirmation,write the error if it fails
func (a *Api) | createAndSendNotification | identifier_name |
|
mod.rs | not
//! guaranteed. Because the substreams are independent, there is no guarantee on the ordering
//! of the message delivery either.
//! 2. An DirectSend call negotiates which protocol to speak using [`protocol-select`]. This
//! allows simple versioning of message delivery and negotiation of which message types are
//! supported. In the future, we can potentially support multiple backwards-incompatible
//! versions of any messages.
//! 3. The actual structure of the wire messages is left for higher layers to specify. The
//! DirectSend protocol is only concerned with shipping around opaque blobs. Current xpeer
//! DirectSend clients (consensus, mempool) mostly send protobuf enums around over a single
//! DirectSend protocol, e.g., `/xpeer/consensus/direct_send/0.1.0`.
//!
//! ## Wire Protocol (dialer):
//!
//! To send a message to a remote peer, the dialer
//!
//! 1. Requests a new outbound substream from the muxer.
//! 2. Negotiates the substream using [`protocol-select`] to the protocol they
//! wish to speak, e.g., `/xpeer/mempool/direct_send/0.1.0`.
//! 3. Sends the serialized message on the newly negotiated substream.
//! 4. Drops the substream.
//!
//! ## Wire Protocol (listener):
//!
//! To receive a message from remote peers, the listener
//!
//! 1. Polls for new inbound substreams on the muxer.
//! 2. Negotiates inbound substreams using [`protocol-select`]. The negotiation
//! must only succeed if the requested protocol is actually supported.
//! 3. Awaits the serialized message on the newly negotiated substream.
//! 4. Drops the substream.
//!
//! Note: negotiated substreams are currently framed with the
//! [muiltiformats unsigned varint length-prefix](https://github.com/multiformats/unsigned-varint)
//!
//! [muxers]: ../../../netcore/multiplexing/index.html
//! [substream negotiation]: ../../../netcore/negotiate/index.html
//! [`protocol-select`]: ../../../netcore/negotiate/index.html
use crate::{
counters,
error::NetworkError,
peer_manager::{PeerManagerNotification, PeerManagerRequestSender},
ProtocolId,
};
use bytes::Bytes;
use channel;
use futures::{
compat::Sink01CompatExt,
future::{FutureExt, TryFutureExt},
io::{AsyncRead, AsyncReadExt, AsyncWrite},
sink::SinkExt,
stream::StreamExt,
};
use logger::prelude::*;
use std::{
collections::{hash_map::Entry, HashMap},
fmt::Debug,
};
use tokio::{codec::Framed, runtime::TaskExecutor};
use types::PeerId;
use unsigned_varint::codec::UviBytes;
#[cfg(test)]
mod test;
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum DirectSendRequest {
/// A request to send out a message.
SendMessage(PeerId, Message),
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum DirectSendNotification {
/// A notification that a DirectSend message is received.
RecvMessage(PeerId, Message),
}
#[derive(Clone, Eq, PartialEq)]
pub struct Message {
/// Message type.
pub protocol: ProtocolId,
/// Serialized message data.
pub mdata: Bytes,
}
impl Debug for Message {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mdata_str = if self.mdata.len() <= 10 {
format!("{:?}", self.mdata)
} else {
format!("{:?}...", self.mdata.slice_to(10))
};
write!(
f,
"Message {{ protocol: {:?}, mdata: {} }}",
self.protocol, mdata_str
)
}
}
/// The DirectSend actor.
pub struct DirectSend<TSubstream> {
/// A handle to a tokio executor.
executor: TaskExecutor,
/// Channel to receive requests from other upstream actors.
ds_requests_rx: channel::Receiver<DirectSendRequest>,
/// Channels to send notifictions to upstream actors.
ds_notifs_tx: channel::Sender<DirectSendNotification>,
/// Channel to receive notifications from PeerManager.
peer_mgr_notifs_rx: channel::Receiver<PeerManagerNotification<TSubstream>>,
/// Channel to send requests to PeerManager.
peer_mgr_reqs_tx: PeerManagerRequestSender<TSubstream>,
/// Outbound message queues for each (PeerId, ProtocolId) pair.
message_queues: HashMap<(PeerId, ProtocolId), channel::Sender<Bytes>>,
}
impl<TSubstream> DirectSend<TSubstream>
where
TSubstream: AsyncRead + AsyncWrite + Send + Unpin + Debug + 'static,
{
pub fn | (
executor: TaskExecutor,
ds_requests_rx: channel::Receiver<DirectSendRequest>,
ds_notifs_tx: channel::Sender<DirectSendNotification>,
peer_mgr_notifs_rx: channel::Receiver<PeerManagerNotification<TSubstream>>,
peer_mgr_reqs_tx: PeerManagerRequestSender<TSubstream>,
) -> Self {
Self {
executor,
ds_requests_rx,
ds_notifs_tx,
peer_mgr_notifs_rx,
peer_mgr_reqs_tx,
message_queues: HashMap::new(),
}
}
pub async fn start(mut self) {
loop {
futures::select! {
req = self.ds_requests_rx.select_next_some() => {
self.handle_direct_send_request(req).await;
}
notif = self.peer_mgr_notifs_rx.select_next_some() => {
self.handle_peer_mgr_notification(notif);
}
complete => {
crit!("Direct send actor terminated");
break;
}
}
}
}
// Handle PeerManagerNotification, which can only be NewInboundSubstream for now.
fn handle_peer_mgr_notification(&self, notif: PeerManagerNotification<TSubstream>) {
trace!("PeerManagerNotification::{:?}", notif);
match notif {
PeerManagerNotification::NewInboundSubstream(peer_id, substream) => {
self.executor.spawn(
Self::handle_inbound_substream(
peer_id,
substream.protocol,
substream.substream,
self.ds_notifs_tx.clone(),
)
.boxed()
.unit_error()
.compat(),
);
}
_ => unreachable!("Unexpected PeerManagerNotification"),
}
}
// Handle a new inbound substream. Keep forwarding the messages to the NetworkProvider.
async fn handle_inbound_substream(
peer_id: PeerId,
protocol: ProtocolId,
substream: TSubstream,
mut ds_notifs_tx: channel::Sender<DirectSendNotification>,
) {
let mut substream =
Framed::new(substream.compat(), UviBytes::<Bytes>::default()).sink_compat();
while let Some(item) = substream.next().await {
match item {
Ok(data) => {
let notif = DirectSendNotification::RecvMessage(
peer_id,
Message {
protocol: protocol.clone(),
mdata: data.freeze(),
},
);
ds_notifs_tx
.send(notif)
.await
.expect("DirectSendNotification send error");
}
Err(e) => {
warn!(
"DirectSend substream with peer {} receives error {}",
peer_id.short_str(),
e
);
break;
}
}
}
warn!(
"DirectSend inbound substream with peer {} closed",
peer_id.short_str()
);
}
// Create a new message queue and spawn a task to forward the messages from the queue to the
// corresponding substream.
async fn start_message_queue_handler(
executor: TaskExecutor,
mut peer_mgr_reqs_tx: PeerManagerRequestSender<TSubstream>,
peer_id: PeerId,
protocol: ProtocolId,
) -> Result<channel::Sender<Bytes>, NetworkError> {
// Create a channel for the (PeerId, ProtocolId) pair.
let (msg_tx, msg_rx) = channel::new::<Bytes>(
1024,
&counters::OP_COUNTERS.peer_gauge(
&counters::PENDING_DIRECT_SEND_OUTBOUND_MESSAGES,
&peer_id.short_str(),
),
);
// Open a new substream for the (PeerId, ProtocolId) pair
let raw_substream = peer_mgr_reqs_tx.open_substream(peer_id, protocol).await?;
let substream =
Framed::new(raw_substream.compat(), UviBytes::<Bytes>::default()).sink_compat();
// Spawn a task to forward the messages from the queue to the substream.
let f_substream = async move {
if let Err(e) = msg_rx.map(Ok).forward(substream).await {
warn!(
"Forward messages to peer {} error {:?}",
peer_id.short_str(),
e
);
}
// The messages in queue will be dropped
counters::DIRECT_SEND_MESSAGES_DROPPED.inc_by(
counters::OP_COUNTERS
.peer_gauge(
&counters::PENDING_DIRECT_SEND_OUTBOUND_MESSAGES,
&peer_id.short_str(),
)
.get(),
);
};
executor.spawn(f_substream.boxed().unit_error().compat());
Ok(msg_tx)
}
// Try to send a message to the message queue.
async fn try_send | new | identifier_name |
mod.rs | //! that only the dialer sends a message to the listener, but no messages or acknowledgements
//! sending back on the other direction. So the message delivery is best effort and not
//! guaranteed. Because the substreams are independent, there is no guarantee on the ordering
//! of the message delivery either.
//! 2. An DirectSend call negotiates which protocol to speak using [`protocol-select`]. This
//! allows simple versioning of message delivery and negotiation of which message types are
//! supported. In the future, we can potentially support multiple backwards-incompatible
//! versions of any messages.
//! 3. The actual structure of the wire messages is left for higher layers to specify. The
//! DirectSend protocol is only concerned with shipping around opaque blobs. Current xpeer
//! DirectSend clients (consensus, mempool) mostly send protobuf enums around over a single
//! DirectSend protocol, e.g., `/xpeer/consensus/direct_send/0.1.0`.
//!
//! ## Wire Protocol (dialer):
//!
//! To send a message to a remote peer, the dialer
//!
//! 1. Requests a new outbound substream from the muxer.
//! 2. Negotiates the substream using [`protocol-select`] to the protocol they
//! wish to speak, e.g., `/xpeer/mempool/direct_send/0.1.0`.
//! 3. Sends the serialized message on the newly negotiated substream.
//! 4. Drops the substream.
//!
//! ## Wire Protocol (listener):
//!
//! To receive a message from remote peers, the listener
//!
//! 1. Polls for new inbound substreams on the muxer.
//! 2. Negotiates inbound substreams using [`protocol-select`]. The negotiation
//! must only succeed if the requested protocol is actually supported.
//! 3. Awaits the serialized message on the newly negotiated substream.
//! 4. Drops the substream.
//!
//! Note: negotiated substreams are currently framed with the
//! [muiltiformats unsigned varint length-prefix](https://github.com/multiformats/unsigned-varint)
//!
//! [muxers]: ../../../netcore/multiplexing/index.html
//! [substream negotiation]: ../../../netcore/negotiate/index.html
//! [`protocol-select`]: ../../../netcore/negotiate/index.html
use crate::{
counters,
error::NetworkError,
peer_manager::{PeerManagerNotification, PeerManagerRequestSender},
ProtocolId,
};
use bytes::Bytes;
use channel;
use futures::{
compat::Sink01CompatExt,
future::{FutureExt, TryFutureExt},
io::{AsyncRead, AsyncReadExt, AsyncWrite},
sink::SinkExt,
stream::StreamExt,
};
use logger::prelude::*;
use std::{
collections::{hash_map::Entry, HashMap},
fmt::Debug,
};
use tokio::{codec::Framed, runtime::TaskExecutor};
use types::PeerId;
use unsigned_varint::codec::UviBytes;
#[cfg(test)]
mod test;
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum DirectSendRequest {
/// A request to send out a message.
SendMessage(PeerId, Message),
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum DirectSendNotification {
/// A notification that a DirectSend message is received.
RecvMessage(PeerId, Message),
}
#[derive(Clone, Eq, PartialEq)]
pub struct Message {
/// Message type.
pub protocol: ProtocolId,
/// Serialized message data.
pub mdata: Bytes,
}
impl Debug for Message {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mdata_str = if self.mdata.len() <= 10 {
format!("{:?}", self.mdata)
} else {
format!("{:?}...", self.mdata.slice_to(10))
};
write!(
f,
"Message {{ protocol: {:?}, mdata: {} }}",
self.protocol, mdata_str
)
}
}
/// The DirectSend actor.
pub struct DirectSend<TSubstream> {
/// A handle to a tokio executor.
executor: TaskExecutor,
/// Channel to receive requests from other upstream actors.
ds_requests_rx: channel::Receiver<DirectSendRequest>,
/// Channels to send notifictions to upstream actors.
ds_notifs_tx: channel::Sender<DirectSendNotification>,
/// Channel to receive notifications from PeerManager.
peer_mgr_notifs_rx: channel::Receiver<PeerManagerNotification<TSubstream>>,
/// Channel to send requests to PeerManager.
peer_mgr_reqs_tx: PeerManagerRequestSender<TSubstream>,
/// Outbound message queues for each (PeerId, ProtocolId) pair.
message_queues: HashMap<(PeerId, ProtocolId), channel::Sender<Bytes>>,
}
impl<TSubstream> DirectSend<TSubstream>
where
TSubstream: AsyncRead + AsyncWrite + Send + Unpin + Debug + 'static,
{
pub fn new(
executor: TaskExecutor,
ds_requests_rx: channel::Receiver<DirectSendRequest>,
ds_notifs_tx: channel::Sender<DirectSendNotification>,
peer_mgr_notifs_rx: channel::Receiver<PeerManagerNotification<TSubstream>>,
peer_mgr_reqs_tx: PeerManagerRequestSender<TSubstream>,
) -> Self {
Self {
executor,
ds_requests_rx,
ds_notifs_tx,
peer_mgr_notifs_rx,
peer_mgr_reqs_tx,
message_queues: HashMap::new(),
}
}
pub async fn start(mut self) {
loop {
futures::select! {
req = self.ds_requests_rx.select_next_some() => {
self.handle_direct_send_request(req).await;
}
notif = self.peer_mgr_notifs_rx.select_next_some() => {
self.handle_peer_mgr_notification(notif);
}
complete => {
crit!("Direct send actor terminated");
break;
}
}
}
}
// Handle PeerManagerNotification, which can only be NewInboundSubstream for now.
fn handle_peer_mgr_notification(&self, notif: PeerManagerNotification<TSubstream>) {
trace!("PeerManagerNotification::{:?}", notif);
match notif {
PeerManagerNotification::NewInboundSubstream(peer_id, substream) => {
self.executor.spawn(
Self::handle_inbound_substream(
peer_id,
substream.protocol,
substream.substream,
self.ds_notifs_tx.clone(),
)
.boxed()
.unit_error()
.compat(),
);
}
_ => unreachable!("Unexpected PeerManagerNotification"),
}
}
// Handle a new inbound substream. Keep forwarding the messages to the NetworkProvider.
async fn handle_inbound_substream(
peer_id: PeerId,
protocol: ProtocolId,
substream: TSubstream,
mut ds_notifs_tx: channel::Sender<DirectSendNotification>,
) {
let mut substream =
Framed::new(substream.compat(), UviBytes::<Bytes>::default()).sink_compat();
while let Some(item) = substream.next().await {
match item {
Ok(data) => {
let notif = DirectSendNotification::RecvMessage(
peer_id,
Message {
protocol: protocol.clone(),
mdata: data.freeze(),
},
);
ds_notifs_tx
.send(notif)
.await
.expect("DirectSendNotification send error");
}
Err(e) => {
warn!(
"DirectSend substream with peer {} receives error {}",
peer_id.short_str(),
e
);
break;
}
}
}
warn!(
"DirectSend inbound substream with peer {} closed",
peer_id.short_str()
);
}
// Create a new message queue and spawn a task to forward the messages from the queue to the
// corresponding substream.
async fn start_message_queue_handler(
executor: TaskExecutor,
mut peer_mgr_reqs_tx: PeerManagerRequestSender<TSubstream>,
peer_id: PeerId,
protocol: ProtocolId,
) -> Result<channel::Sender<Bytes>, NetworkError> {
// Create a channel for the (PeerId, ProtocolId) pair.
let (msg_tx, msg_rx) = channel::new::<Bytes>(
1024,
&counters::OP_COUNTERS.peer_gauge(
&counters::PENDING_DIRECT_SEND_OUTBOUND_MESSAGES,
&peer_id.short_str(),
),
);
// Open a new substream for the (PeerId, ProtocolId) pair
let raw_substream = peer_mgr_reqs_tx.open_substream(peer_id, protocol).await?;
let substream =
Framed::new(raw_substream.compat(), UviBytes::<Bytes>::default()).sink_compat();
// Spawn a task to forward the messages from the queue to the substream.
let f_substream = async move {
if let Err(e) = msg_rx.map(Ok).forward(substream).await {
warn!(
"Forward messages to peer {} error {:?}",
peer_id.short_str(),
e
);
}
// The messages in queue will be dropped
counters::DIRECT_SEND_MESSAGES_DROPPED.inc_by(
counters::OP_COUNTERS
.peer_gauge(
&counters::PENDING_DIRECT_SEND_OUTBOUND_MESSAGES,
&peer_id.short_str(),
)
.get(),
);
};
| random_line_split |
||
mod.rs | not
//! guaranteed. Because the substreams are independent, there is no guarantee on the ordering
//! of the message delivery either.
//! 2. An DirectSend call negotiates which protocol to speak using [`protocol-select`]. This
//! allows simple versioning of message delivery and negotiation of which message types are
//! supported. In the future, we can potentially support multiple backwards-incompatible
//! versions of any messages.
//! 3. The actual structure of the wire messages is left for higher layers to specify. The
//! DirectSend protocol is only concerned with shipping around opaque blobs. Current xpeer
//! DirectSend clients (consensus, mempool) mostly send protobuf enums around over a single
//! DirectSend protocol, e.g., `/xpeer/consensus/direct_send/0.1.0`.
//!
//! ## Wire Protocol (dialer):
//!
//! To send a message to a remote peer, the dialer
//!
//! 1. Requests a new outbound substream from the muxer.
//! 2. Negotiates the substream using [`protocol-select`] to the protocol they
//! wish to speak, e.g., `/xpeer/mempool/direct_send/0.1.0`.
//! 3. Sends the serialized message on the newly negotiated substream.
//! 4. Drops the substream.
//!
//! ## Wire Protocol (listener):
//!
//! To receive a message from remote peers, the listener
//!
//! 1. Polls for new inbound substreams on the muxer.
//! 2. Negotiates inbound substreams using [`protocol-select`]. The negotiation
//! must only succeed if the requested protocol is actually supported.
//! 3. Awaits the serialized message on the newly negotiated substream.
//! 4. Drops the substream.
//!
//! Note: negotiated substreams are currently framed with the
//! [muiltiformats unsigned varint length-prefix](https://github.com/multiformats/unsigned-varint)
//!
//! [muxers]: ../../../netcore/multiplexing/index.html
//! [substream negotiation]: ../../../netcore/negotiate/index.html
//! [`protocol-select`]: ../../../netcore/negotiate/index.html
use crate::{
counters,
error::NetworkError,
peer_manager::{PeerManagerNotification, PeerManagerRequestSender},
ProtocolId,
};
use bytes::Bytes;
use channel;
use futures::{
compat::Sink01CompatExt,
future::{FutureExt, TryFutureExt},
io::{AsyncRead, AsyncReadExt, AsyncWrite},
sink::SinkExt,
stream::StreamExt,
};
use logger::prelude::*;
use std::{
collections::{hash_map::Entry, HashMap},
fmt::Debug,
};
use tokio::{codec::Framed, runtime::TaskExecutor};
use types::PeerId;
use unsigned_varint::codec::UviBytes;
#[cfg(test)]
mod test;
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum DirectSendRequest {
/// A request to send out a message.
SendMessage(PeerId, Message),
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum DirectSendNotification {
/// A notification that a DirectSend message is received.
RecvMessage(PeerId, Message),
}
#[derive(Clone, Eq, PartialEq)]
pub struct Message {
/// Message type.
pub protocol: ProtocolId,
/// Serialized message data.
pub mdata: Bytes,
}
impl Debug for Message {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mdata_str = if self.mdata.len() <= 10 {
format!("{:?}", self.mdata)
} else | ;
write!(
f,
"Message {{ protocol: {:?}, mdata: {} }}",
self.protocol, mdata_str
)
}
}
/// The DirectSend actor.
pub struct DirectSend<TSubstream> {
/// A handle to a tokio executor.
executor: TaskExecutor,
/// Channel to receive requests from other upstream actors.
ds_requests_rx: channel::Receiver<DirectSendRequest>,
/// Channels to send notifictions to upstream actors.
ds_notifs_tx: channel::Sender<DirectSendNotification>,
/// Channel to receive notifications from PeerManager.
peer_mgr_notifs_rx: channel::Receiver<PeerManagerNotification<TSubstream>>,
/// Channel to send requests to PeerManager.
peer_mgr_reqs_tx: PeerManagerRequestSender<TSubstream>,
/// Outbound message queues for each (PeerId, ProtocolId) pair.
message_queues: HashMap<(PeerId, ProtocolId), channel::Sender<Bytes>>,
}
impl<TSubstream> DirectSend<TSubstream>
where
TSubstream: AsyncRead + AsyncWrite + Send + Unpin + Debug + 'static,
{
pub fn new(
executor: TaskExecutor,
ds_requests_rx: channel::Receiver<DirectSendRequest>,
ds_notifs_tx: channel::Sender<DirectSendNotification>,
peer_mgr_notifs_rx: channel::Receiver<PeerManagerNotification<TSubstream>>,
peer_mgr_reqs_tx: PeerManagerRequestSender<TSubstream>,
) -> Self {
Self {
executor,
ds_requests_rx,
ds_notifs_tx,
peer_mgr_notifs_rx,
peer_mgr_reqs_tx,
message_queues: HashMap::new(),
}
}
pub async fn start(mut self) {
loop {
futures::select! {
req = self.ds_requests_rx.select_next_some() => {
self.handle_direct_send_request(req).await;
}
notif = self.peer_mgr_notifs_rx.select_next_some() => {
self.handle_peer_mgr_notification(notif);
}
complete => {
crit!("Direct send actor terminated");
break;
}
}
}
}
// Handle PeerManagerNotification, which can only be NewInboundSubstream for now.
fn handle_peer_mgr_notification(&self, notif: PeerManagerNotification<TSubstream>) {
trace!("PeerManagerNotification::{:?}", notif);
match notif {
PeerManagerNotification::NewInboundSubstream(peer_id, substream) => {
self.executor.spawn(
Self::handle_inbound_substream(
peer_id,
substream.protocol,
substream.substream,
self.ds_notifs_tx.clone(),
)
.boxed()
.unit_error()
.compat(),
);
}
_ => unreachable!("Unexpected PeerManagerNotification"),
}
}
// Handle a new inbound substream. Keep forwarding the messages to the NetworkProvider.
async fn handle_inbound_substream(
peer_id: PeerId,
protocol: ProtocolId,
substream: TSubstream,
mut ds_notifs_tx: channel::Sender<DirectSendNotification>,
) {
let mut substream =
Framed::new(substream.compat(), UviBytes::<Bytes>::default()).sink_compat();
while let Some(item) = substream.next().await {
match item {
Ok(data) => {
let notif = DirectSendNotification::RecvMessage(
peer_id,
Message {
protocol: protocol.clone(),
mdata: data.freeze(),
},
);
ds_notifs_tx
.send(notif)
.await
.expect("DirectSendNotification send error");
}
Err(e) => {
warn!(
"DirectSend substream with peer {} receives error {}",
peer_id.short_str(),
e
);
break;
}
}
}
warn!(
"DirectSend inbound substream with peer {} closed",
peer_id.short_str()
);
}
// Create a new message queue and spawn a task to forward the messages from the queue to the
// corresponding substream.
async fn start_message_queue_handler(
executor: TaskExecutor,
mut peer_mgr_reqs_tx: PeerManagerRequestSender<TSubstream>,
peer_id: PeerId,
protocol: ProtocolId,
) -> Result<channel::Sender<Bytes>, NetworkError> {
// Create a channel for the (PeerId, ProtocolId) pair.
let (msg_tx, msg_rx) = channel::new::<Bytes>(
1024,
&counters::OP_COUNTERS.peer_gauge(
&counters::PENDING_DIRECT_SEND_OUTBOUND_MESSAGES,
&peer_id.short_str(),
),
);
// Open a new substream for the (PeerId, ProtocolId) pair
let raw_substream = peer_mgr_reqs_tx.open_substream(peer_id, protocol).await?;
let substream =
Framed::new(raw_substream.compat(), UviBytes::<Bytes>::default()).sink_compat();
// Spawn a task to forward the messages from the queue to the substream.
let f_substream = async move {
if let Err(e) = msg_rx.map(Ok).forward(substream).await {
warn!(
"Forward messages to peer {} error {:?}",
peer_id.short_str(),
e
);
}
// The messages in queue will be dropped
counters::DIRECT_SEND_MESSAGES_DROPPED.inc_by(
counters::OP_COUNTERS
.peer_gauge(
&counters::PENDING_DIRECT_SEND_OUTBOUND_MESSAGES,
&peer_id.short_str(),
)
.get(),
);
};
executor.spawn(f_substream.boxed().unit_error().compat());
Ok(msg_tx)
}
// Try to send a message to the message queue.
async fn try | {
format!("{:?}...", self.mdata.slice_to(10))
} | conditional_block |
app.go | " " +
time.Now().Format("Jan 02 15:04:05.000") + " " +
value + "\n")
}
}
func App() {
getFilenameFromArgs := func(args []string) string {
if len(args) > 1 {
return args[1]
} else {
return "gttp-tmp.json"
}
}
// log file
os.Remove("application.log")
if logOn {
logFile, _ = os.OpenFile("application.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0600)
defer logFile.Close()
}
appDataService = services.NewApplicationDataService(getFilenameFromArgs(os.Args), log)
ctx = models.NewAppCtx(
getRootPrmt,
getMDR,
updateMDR,
getConfig,
updateConfig,
getOutput,
updateContext,
PrintOut,
refresh,
switchPage)
app = tview.NewApplication()
rootPrmt = drawMainComponents(app)
// Fixme: To delete
mapFocusPrmtToShortutText[requestResponseView.ResponsePrmt] = utils.ResultShortcutsText
mapFocusPrmtToShortutText[expertModeView.TitlePrmt] = utils.ExpertModeShortcutsText
mapFocusPrmtToShortutText[settingsView.TitlePrmt] = utils.SettingsShortcutsText
refresh("all")
app.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
log("Shortcut: "+event.Name()+" - "+time.Now().Format(time.RFC850), "info")
// disable all shortcuts (except for 'app.Stop()') if it's the root modal page which has focus
if page, _ := rootPrmt.GetFrontPage(); page == "modal" && event.Key() != tcell.KeyCtrlQ {
return event
}
switch event.Key() {
case tcell.KeyCtrlA:
if requestResponseView.ResponsePrmt.HasFocus() {
utils.WriteToClipboard(requestResponseView.LogBuffer, log)
}
case tcell.KeyCtrlC:
if requestResponseView.ResponsePrmt.HasFocus() {
utils.WriteToClipboard(responseData, log)
}
if prmt := app.GetFocus(); prmt != nil {
if input, er := app.GetFocus().(*tview.InputField); er {
utils.WriteToClipboard(input.GetText(), log)
}
}
// Disable "Ctrl+C" exit application default shortcut
return nil
case tcell.KeyCtrlD:
makeRequestController.Remove()
case tcell.KeyCtrlE:
executeRequest()
case tcell.KeyCtrlF:
focusPrimitive(makeRequestController.View.FormPrmt, nil)
case tcell.KeyCtrlH:
switchPage("ExpertRequestView")
case tcell.KeyCtrlJ:
focusPrimitive(treeAPICpnt.RootPrmt, nil)
case tcell.KeyCtrlN:
makeRequestController.New()
case tcell.KeyCtrlO:
switchPage("SettingsView")
case tcell.KeyCtrlQ:
app.Stop()
case tcell.KeyCtrlR:
displayRequestResponseViewPage(requestResponseView.RequestPrmt)
case tcell.KeyCtrlS:
makeRequestController.Save()
case tcell.KeyCtrlW:
displayRequestResponseViewPage(requestResponseView.ResponsePrmt)
case tcell.KeyEsc:
focusPrimitive(logEventTextPrmt, nil)
}
return event
})
if err := app.SetRoot(rootPrmt, true).Run(); err != nil {
panic(err)
}
}
func drawMainComponents(app *tview.Application) *tview.Pages {
drawLeftPanel := func() tview.Primitive {
treeAPICpnt = components.NewTreeCpnt(app, ctx)
tree := treeAPICpnt.Make(func(it models.MakeRequestData) {
refreshMDRView(it)
}, func(page string) {
pages.SwitchToPage(page)
})
flex := utils.MakeTitlePrmt(utils.TreePrmtTitle)
flex.SetBorder(false)
flex.SetBorderPadding(1, 0, 1, 1)
flex.SetBackgroundColor(utils.BackColor)
flex.AddItem(tree, 0, 1, false)
return flex
}
drawRightPanel := func() tview.Primitive {
makeRequestExportModeView := func() tview.Primitive {
expertModeView = views.NewRequestExpertModeView(app, ctx)
expertModeView.InitView()
return expertModeView.ParentPrmt
}
makeSettingsView := func() tview.Primitive {
settingsView = views.NewSettingsView(app, ctx)
settingsView.InitView()
return settingsView.ParentPrmt
}
// build request response view
requestResponseView = views.NewRequestResponseView(app, ctx)
requestResponseView.InitView()
focusPrmts = append(focusPrmts, requestResponseView.ResponsePrmt)
focusPrmts = append(focusPrmts, requestResponseView.RequestPrmt)
// build "make/execute request" controller
makeRequestController = controllers.NewMakeRequestController(
app,
appDataService,
ctx,
actions.NewMakeRequestAction(requestResponseView.Display, requestResponseView.Logger))
flex := tview.NewFlex().SetDirection(tview.FlexRow)
flex.SetBorder(false)
flex.SetBorderPadding(1, 0, 0, 0)
pages = tview.NewPages()
pages.SetBorder(false).SetBorderPadding(0, 1, 0, 0)
pages.AddPage("RequestResponseViewPage", requestResponseView.ParentPrmt, true, false)
pages.AddPage("RequestExpertModeViewPage", makeRequestExportModeView(), true, false)
pages.AddPage("SettingsViewPage", makeSettingsView(), true, true)
flex.AddItem(makeRequestController.Draw(), 9, 0, false)
flex.AddItem(pages, 0, 1, false)
return flex
}
logEventTextPrmt = tview.NewTextView()
logEventTextPrmt.SetBackgroundColor(utils.BackGrayColor)
logEventTextPrmt.SetTextAlign(tview.AlignLeft).SetDynamicColors(true)
shortcutInfoTextPrmt = tview.NewTextView()
shortcutInfoTextPrmt.SetBackgroundColor(utils.BackColor)
shortcutInfoTextPrmt.
SetTextAlign(tview.AlignRight).
SetDynamicColors(true).
SetText(utils.MainShortcutsText)
grid := tview.NewGrid().
SetRows(1, 0, 2).
SetColumns(0, 10, -4).
SetBorders(false).
AddItem(logEventTextPrmt, 0, 0, 1, 3, 0, 0, false).
AddItem(drawLeftPanel(), 1, 0, 1, 2, 0, 0, false).
AddItem(drawRightPanel(), 1, 2, 1, 1, 0, 0, false).
AddItem(shortcutInfoTextPrmt, 2, 0, 1, 3, 0, 0, false)
return tview.NewPages().AddPage("root", grid, true, true)
}
func displayRequestResponseViewPage(focusOn *tview.TextView) {
pages.SwitchToPage("RequestResponseViewPage")
focusPrimitive(focusOn, focusOn.Box)
}
func executeRequest() {
displayRequestResponseViewPage(requestResponseView.ResponsePrmt)
requestResponseView.ResetLogBuffer()
log("", "info")
makeRequestController.Execute()
}
func focusPrimitive(prmt tview.Primitive, box *tview.Box) {
app.SetFocus(prmt)
// Set border false to all focus prmt
for v := range focusPrmts {
focusPrmts[v].SetBorder(false)
}
if box != nil {
box.SetBorder(true)
}
// Display the right shortcuts text
if text, exists := mapFocusPrmtToShortutText[prmt]; exists {
shortcutInfoTextPrmt.SetText(text)
} else {
shortcutInfoTextPrmt.SetText(utils.MainShortcutsText)
}
}
func updateMDR(value models.MakeRequestData) {
makeRequestData = value
}
func refreshMDRView(makeRequestData models.MakeRequestData) {
updateMDR(makeRequestData)
for _, value := range ctx.AddListenerMRD {
value(makeRequestData)
}
}
func getMDR() models.MakeRequestData {
return makeRequestData
}
func getConfig() models.Config {
return output.Config
}
func updateConfig(value models.Config) {
output = appDataService.Load()
// update
output.Config = value
appDataService.Save(output)
// refresh views
refreshingConfig()
refreshingTreeAPICpn()
}
func updateContext(value models.Context) {
output = appDataService.Load()
// update
output.Context = value
appDataService.Save(output)
refreshingContext()
}
func refreshingConfig() {
for key, value := range ctx.AddListenerConfig {
ctx.PrintTrace("App.refreshingConfig." + key)
value(output.Config)
}
}
func refreshingContext() {
for key, value := range ctx.AddContextListener {
ctx.PrintTrace("App.refreshingContext." + key)
value(output.Context)
}
}
func refreshingTreeAPICpn() {
treeAPICpnt.Refresh()
}
func | getOutput | identifier_name |
|
app.go | Level()) + " " +
time.Now().Format("Jan 02 15:04:05.000") + " " +
value + "\n")
}
}
func App() {
getFilenameFromArgs := func(args []string) string {
if len(args) > 1 {
return args[1]
} else {
return "gttp-tmp.json"
}
}
// log file
os.Remove("application.log")
if logOn {
logFile, _ = os.OpenFile("application.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0600)
defer logFile.Close()
}
appDataService = services.NewApplicationDataService(getFilenameFromArgs(os.Args), log)
ctx = models.NewAppCtx(
getRootPrmt,
getMDR,
updateMDR,
getConfig,
updateConfig,
getOutput,
updateContext,
PrintOut,
refresh,
switchPage)
app = tview.NewApplication()
rootPrmt = drawMainComponents(app)
// Fixme: To delete
mapFocusPrmtToShortutText[requestResponseView.ResponsePrmt] = utils.ResultShortcutsText
mapFocusPrmtToShortutText[expertModeView.TitlePrmt] = utils.ExpertModeShortcutsText
mapFocusPrmtToShortutText[settingsView.TitlePrmt] = utils.SettingsShortcutsText
refresh("all")
app.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
log("Shortcut: "+event.Name()+" - "+time.Now().Format(time.RFC850), "info")
// disable all shortcuts (except for 'app.Stop()') if it's the root modal page which has focus
if page, _ := rootPrmt.GetFrontPage(); page == "modal" && event.Key() != tcell.KeyCtrlQ {
return event
}
switch event.Key() {
case tcell.KeyCtrlA:
if requestResponseView.ResponsePrmt.HasFocus() {
utils.WriteToClipboard(requestResponseView.LogBuffer, log)
}
case tcell.KeyCtrlC:
if requestResponseView.ResponsePrmt.HasFocus() {
utils.WriteToClipboard(responseData, log)
}
if prmt := app.GetFocus(); prmt != nil {
if input, er := app.GetFocus().(*tview.InputField); er {
utils.WriteToClipboard(input.GetText(), log)
}
}
// Disable "Ctrl+C" exit application default shortcut
return nil
case tcell.KeyCtrlD:
makeRequestController.Remove()
case tcell.KeyCtrlE:
executeRequest()
case tcell.KeyCtrlF:
focusPrimitive(makeRequestController.View.FormPrmt, nil)
case tcell.KeyCtrlH:
switchPage("ExpertRequestView")
case tcell.KeyCtrlJ:
focusPrimitive(treeAPICpnt.RootPrmt, nil)
case tcell.KeyCtrlN:
makeRequestController.New()
case tcell.KeyCtrlO:
switchPage("SettingsView")
case tcell.KeyCtrlQ:
app.Stop()
case tcell.KeyCtrlR:
displayRequestResponseViewPage(requestResponseView.RequestPrmt)
case tcell.KeyCtrlS:
makeRequestController.Save()
case tcell.KeyCtrlW:
displayRequestResponseViewPage(requestResponseView.ResponsePrmt)
case tcell.KeyEsc:
focusPrimitive(logEventTextPrmt, nil)
}
return event
})
if err := app.SetRoot(rootPrmt, true).Run(); err != nil {
panic(err)
}
}
func drawMainComponents(app *tview.Application) *tview.Pages {
drawLeftPanel := func() tview.Primitive {
treeAPICpnt = components.NewTreeCpnt(app, ctx)
tree := treeAPICpnt.Make(func(it models.MakeRequestData) {
refreshMDRView(it)
}, func(page string) {
pages.SwitchToPage(page)
})
flex := utils.MakeTitlePrmt(utils.TreePrmtTitle)
flex.SetBorder(false)
flex.SetBorderPadding(1, 0, 1, 1)
flex.SetBackgroundColor(utils.BackColor)
flex.AddItem(tree, 0, 1, false)
return flex
}
drawRightPanel := func() tview.Primitive {
makeRequestExportModeView := func() tview.Primitive {
expertModeView = views.NewRequestExpertModeView(app, ctx)
expertModeView.InitView()
return expertModeView.ParentPrmt
}
makeSettingsView := func() tview.Primitive {
settingsView = views.NewSettingsView(app, ctx)
settingsView.InitView()
return settingsView.ParentPrmt
}
// build request response view
requestResponseView = views.NewRequestResponseView(app, ctx)
requestResponseView.InitView()
focusPrmts = append(focusPrmts, requestResponseView.ResponsePrmt)
focusPrmts = append(focusPrmts, requestResponseView.RequestPrmt)
// build "make/execute request" controller
makeRequestController = controllers.NewMakeRequestController(
app,
appDataService,
ctx,
actions.NewMakeRequestAction(requestResponseView.Display, requestResponseView.Logger))
flex := tview.NewFlex().SetDirection(tview.FlexRow)
flex.SetBorder(false)
flex.SetBorderPadding(1, 0, 0, 0)
pages = tview.NewPages()
pages.SetBorder(false).SetBorderPadding(0, 1, 0, 0)
pages.AddPage("RequestResponseViewPage", requestResponseView.ParentPrmt, true, false)
pages.AddPage("RequestExpertModeViewPage", makeRequestExportModeView(), true, false)
pages.AddPage("SettingsViewPage", makeSettingsView(), true, true)
flex.AddItem(makeRequestController.Draw(), 9, 0, false)
flex.AddItem(pages, 0, 1, false)
return flex
}
logEventTextPrmt = tview.NewTextView()
logEventTextPrmt.SetBackgroundColor(utils.BackGrayColor)
logEventTextPrmt.SetTextAlign(tview.AlignLeft).SetDynamicColors(true)
shortcutInfoTextPrmt = tview.NewTextView()
shortcutInfoTextPrmt.SetBackgroundColor(utils.BackColor)
shortcutInfoTextPrmt.
SetTextAlign(tview.AlignRight).
SetDynamicColors(true).
SetText(utils.MainShortcutsText)
grid := tview.NewGrid().
SetRows(1, 0, 2).
SetColumns(0, 10, -4).
SetBorders(false).
AddItem(logEventTextPrmt, 0, 0, 1, 3, 0, 0, false).
AddItem(drawLeftPanel(), 1, 0, 1, 2, 0, 0, false).
AddItem(drawRightPanel(), 1, 2, 1, 1, 0, 0, false).
AddItem(shortcutInfoTextPrmt, 2, 0, 1, 3, 0, 0, false)
return tview.NewPages().AddPage("root", grid, true, true)
}
func displayRequestResponseViewPage(focusOn *tview.TextView) {
pages.SwitchToPage("RequestResponseViewPage")
focusPrimitive(focusOn, focusOn.Box)
}
func executeRequest() {
displayRequestResponseViewPage(requestResponseView.ResponsePrmt)
requestResponseView.ResetLogBuffer()
log("", "info")
makeRequestController.Execute()
}
func focusPrimitive(prmt tview.Primitive, box *tview.Box) {
app.SetFocus(prmt)
// Set border false to all focus prmt
for v := range focusPrmts {
focusPrmts[v].SetBorder(false)
}
if box != nil {
box.SetBorder(true)
}
// Display the right shortcuts text
if text, exists := mapFocusPrmtToShortutText[prmt]; exists {
shortcutInfoTextPrmt.SetText(text)
} else {
shortcutInfoTextPrmt.SetText(utils.MainShortcutsText)
}
}
func updateMDR(value models.MakeRequestData) {
makeRequestData = value
}
func refreshMDRView(makeRequestData models.MakeRequestData) {
updateMDR(makeRequestData)
for _, value := range ctx.AddListenerMRD {
value(makeRequestData)
}
}
func getMDR() models.MakeRequestData {
return makeRequestData
}
func getConfig() models.Config {
return output.Config
}
func updateConfig(value models.Config) {
output = appDataService.Load()
// update
output.Config = value
appDataService.Save(output)
// refresh views
refreshingConfig()
refreshingTreeAPICpn()
}
func updateContext(value models.Context) {
output = appDataService.Load()
// update
output.Context = value
appDataService.Save(output)
refreshingContext()
}
func refreshingConfig() {
for key, value := range ctx.AddListenerConfig {
ctx.PrintTrace("App.refreshingConfig." + key)
value(output.Config)
}
}
func refreshingContext() {
for key, value := range ctx.AddContextListener {
ctx.PrintTrace("App.refreshingContext." + key)
value(output.Context)
}
}
func refreshingTreeAPICpn() | {
treeAPICpnt.Refresh()
} | identifier_body |
|
app.go | .Contains(logLevels, level) {
logFile.WriteString(
strings.ToUpper(getLevel()) + " " +
time.Now().Format("Jan 02 15:04:05.000") + " " +
value + "\n")
}
}
func App() {
getFilenameFromArgs := func(args []string) string {
if len(args) > 1 {
return args[1]
} else {
return "gttp-tmp.json"
}
}
// log file
os.Remove("application.log")
if logOn {
logFile, _ = os.OpenFile("application.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0600)
defer logFile.Close()
}
appDataService = services.NewApplicationDataService(getFilenameFromArgs(os.Args), log)
ctx = models.NewAppCtx(
getRootPrmt,
getMDR,
updateMDR,
getConfig,
updateConfig,
getOutput,
updateContext,
PrintOut,
refresh,
switchPage)
app = tview.NewApplication()
rootPrmt = drawMainComponents(app)
// Fixme: To delete
mapFocusPrmtToShortutText[requestResponseView.ResponsePrmt] = utils.ResultShortcutsText
mapFocusPrmtToShortutText[expertModeView.TitlePrmt] = utils.ExpertModeShortcutsText
mapFocusPrmtToShortutText[settingsView.TitlePrmt] = utils.SettingsShortcutsText
refresh("all")
app.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
log("Shortcut: "+event.Name()+" - "+time.Now().Format(time.RFC850), "info")
// disable all shortcuts (except for 'app.Stop()') if it's the root modal page which has focus
if page, _ := rootPrmt.GetFrontPage(); page == "modal" && event.Key() != tcell.KeyCtrlQ {
return event
}
switch event.Key() {
case tcell.KeyCtrlA:
if requestResponseView.ResponsePrmt.HasFocus() {
utils.WriteToClipboard(requestResponseView.LogBuffer, log)
}
case tcell.KeyCtrlC:
if requestResponseView.ResponsePrmt.HasFocus() {
utils.WriteToClipboard(responseData, log)
}
if prmt := app.GetFocus(); prmt != nil {
if input, er := app.GetFocus().(*tview.InputField); er {
utils.WriteToClipboard(input.GetText(), log)
}
}
// Disable "Ctrl+C" exit application default shortcut
return nil
case tcell.KeyCtrlD:
makeRequestController.Remove()
case tcell.KeyCtrlE:
executeRequest()
case tcell.KeyCtrlF:
focusPrimitive(makeRequestController.View.FormPrmt, nil)
case tcell.KeyCtrlH:
switchPage("ExpertRequestView")
case tcell.KeyCtrlJ:
focusPrimitive(treeAPICpnt.RootPrmt, nil)
case tcell.KeyCtrlN:
makeRequestController.New()
case tcell.KeyCtrlO:
switchPage("SettingsView")
case tcell.KeyCtrlQ:
app.Stop()
case tcell.KeyCtrlR:
displayRequestResponseViewPage(requestResponseView.RequestPrmt)
case tcell.KeyCtrlS:
makeRequestController.Save()
case tcell.KeyCtrlW:
displayRequestResponseViewPage(requestResponseView.ResponsePrmt)
case tcell.KeyEsc:
focusPrimitive(logEventTextPrmt, nil)
}
return event
})
if err := app.SetRoot(rootPrmt, true).Run(); err != nil {
panic(err)
}
}
func drawMainComponents(app *tview.Application) *tview.Pages {
drawLeftPanel := func() tview.Primitive {
treeAPICpnt = components.NewTreeCpnt(app, ctx)
tree := treeAPICpnt.Make(func(it models.MakeRequestData) {
refreshMDRView(it)
}, func(page string) {
pages.SwitchToPage(page)
})
flex := utils.MakeTitlePrmt(utils.TreePrmtTitle)
flex.SetBorder(false)
flex.SetBorderPadding(1, 0, 1, 1)
flex.SetBackgroundColor(utils.BackColor)
flex.AddItem(tree, 0, 1, false)
return flex
}
drawRightPanel := func() tview.Primitive {
makeRequestExportModeView := func() tview.Primitive {
expertModeView = views.NewRequestExpertModeView(app, ctx)
expertModeView.InitView()
return expertModeView.ParentPrmt
}
makeSettingsView := func() tview.Primitive {
settingsView = views.NewSettingsView(app, ctx)
settingsView.InitView()
return settingsView.ParentPrmt
}
// build request response view
requestResponseView = views.NewRequestResponseView(app, ctx)
requestResponseView.InitView()
focusPrmts = append(focusPrmts, requestResponseView.ResponsePrmt)
focusPrmts = append(focusPrmts, requestResponseView.RequestPrmt)
// build "make/execute request" controller
makeRequestController = controllers.NewMakeRequestController(
app,
appDataService,
ctx,
actions.NewMakeRequestAction(requestResponseView.Display, requestResponseView.Logger))
flex := tview.NewFlex().SetDirection(tview.FlexRow)
flex.SetBorder(false)
flex.SetBorderPadding(1, 0, 0, 0)
pages = tview.NewPages()
pages.SetBorder(false).SetBorderPadding(0, 1, 0, 0)
pages.AddPage("RequestResponseViewPage", requestResponseView.ParentPrmt, true, false)
pages.AddPage("RequestExpertModeViewPage", makeRequestExportModeView(), true, false)
pages.AddPage("SettingsViewPage", makeSettingsView(), true, true)
flex.AddItem(makeRequestController.Draw(), 9, 0, false)
flex.AddItem(pages, 0, 1, false)
return flex
}
logEventTextPrmt = tview.NewTextView()
logEventTextPrmt.SetBackgroundColor(utils.BackGrayColor)
logEventTextPrmt.SetTextAlign(tview.AlignLeft).SetDynamicColors(true)
shortcutInfoTextPrmt = tview.NewTextView()
shortcutInfoTextPrmt.SetBackgroundColor(utils.BackColor)
shortcutInfoTextPrmt.
SetTextAlign(tview.AlignRight).
SetDynamicColors(true).
SetText(utils.MainShortcutsText)
grid := tview.NewGrid().
SetRows(1, 0, 2).
SetColumns(0, 10, -4).
SetBorders(false).
AddItem(logEventTextPrmt, 0, 0, 1, 3, 0, 0, false).
AddItem(drawLeftPanel(), 1, 0, 1, 2, 0, 0, false).
AddItem(drawRightPanel(), 1, 2, 1, 1, 0, 0, false).
AddItem(shortcutInfoTextPrmt, 2, 0, 1, 3, 0, 0, false)
return tview.NewPages().AddPage("root", grid, true, true)
}
func displayRequestResponseViewPage(focusOn *tview.TextView) {
pages.SwitchToPage("RequestResponseViewPage")
focusPrimitive(focusOn, focusOn.Box)
}
func executeRequest() {
displayRequestResponseViewPage(requestResponseView.ResponsePrmt)
requestResponseView.ResetLogBuffer()
log("", "info")
makeRequestController.Execute()
}
func focusPrimitive(prmt tview.Primitive, box *tview.Box) {
app.SetFocus(prmt)
// Set border false to all focus prmt
for v := range focusPrmts {
focusPrmts[v].SetBorder(false)
}
if box != nil {
box.SetBorder(true)
}
// Display the right shortcuts text
if text, exists := mapFocusPrmtToShortutText[prmt]; exists {
shortcutInfoTextPrmt.SetText(text)
} else {
shortcutInfoTextPrmt.SetText(utils.MainShortcutsText)
}
}
func updateMDR(value models.MakeRequestData) {
makeRequestData = value
}
func refreshMDRView(makeRequestData models.MakeRequestData) {
updateMDR(makeRequestData)
for _, value := range ctx.AddListenerMRD {
value(makeRequestData)
}
}
func getMDR() models.MakeRequestData {
return makeRequestData
}
func getConfig() models.Config {
return output.Config
}
func updateConfig(value models.Config) {
output = appDataService.Load()
// update
output.Config = value
appDataService.Save(output)
// refresh views
refreshingConfig()
refreshingTreeAPICpn()
}
func updateContext(value models.Context) {
output = appDataService.Load()
// update
output.Context = value
appDataService.Save(output)
refreshingContext()
}
func refreshingConfig() {
for key, value := range ctx.AddListenerConfig {
ctx.PrintTrace("App.refreshingConfig." + key)
value(output.Config)
}
}
func refreshingContext() {
for key, value := range ctx.AddContextListener { | ctx.PrintTrace("App.refreshingContext." + key)
value(output.Context)
}
}
| random_line_split |
|
app.go | Fixme: To delete
mapFocusPrmtToShortutText[requestResponseView.ResponsePrmt] = utils.ResultShortcutsText
mapFocusPrmtToShortutText[expertModeView.TitlePrmt] = utils.ExpertModeShortcutsText
mapFocusPrmtToShortutText[settingsView.TitlePrmt] = utils.SettingsShortcutsText
refresh("all")
app.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
log("Shortcut: "+event.Name()+" - "+time.Now().Format(time.RFC850), "info")
// disable all shortcuts (except for 'app.Stop()') if it's the root modal page which has focus
if page, _ := rootPrmt.GetFrontPage(); page == "modal" && event.Key() != tcell.KeyCtrlQ {
return event
}
switch event.Key() {
case tcell.KeyCtrlA:
if requestResponseView.ResponsePrmt.HasFocus() {
utils.WriteToClipboard(requestResponseView.LogBuffer, log)
}
case tcell.KeyCtrlC:
if requestResponseView.ResponsePrmt.HasFocus() {
utils.WriteToClipboard(responseData, log)
}
if prmt := app.GetFocus(); prmt != nil {
if input, er := app.GetFocus().(*tview.InputField); er {
utils.WriteToClipboard(input.GetText(), log)
}
}
// Disable "Ctrl+C" exit application default shortcut
return nil
case tcell.KeyCtrlD:
makeRequestController.Remove()
case tcell.KeyCtrlE:
executeRequest()
case tcell.KeyCtrlF:
focusPrimitive(makeRequestController.View.FormPrmt, nil)
case tcell.KeyCtrlH:
switchPage("ExpertRequestView")
case tcell.KeyCtrlJ:
focusPrimitive(treeAPICpnt.RootPrmt, nil)
case tcell.KeyCtrlN:
makeRequestController.New()
case tcell.KeyCtrlO:
switchPage("SettingsView")
case tcell.KeyCtrlQ:
app.Stop()
case tcell.KeyCtrlR:
displayRequestResponseViewPage(requestResponseView.RequestPrmt)
case tcell.KeyCtrlS:
makeRequestController.Save()
case tcell.KeyCtrlW:
displayRequestResponseViewPage(requestResponseView.ResponsePrmt)
case tcell.KeyEsc:
focusPrimitive(logEventTextPrmt, nil)
}
return event
})
if err := app.SetRoot(rootPrmt, true).Run(); err != nil {
panic(err)
}
}
func drawMainComponents(app *tview.Application) *tview.Pages {
drawLeftPanel := func() tview.Primitive {
treeAPICpnt = components.NewTreeCpnt(app, ctx)
tree := treeAPICpnt.Make(func(it models.MakeRequestData) {
refreshMDRView(it)
}, func(page string) {
pages.SwitchToPage(page)
})
flex := utils.MakeTitlePrmt(utils.TreePrmtTitle)
flex.SetBorder(false)
flex.SetBorderPadding(1, 0, 1, 1)
flex.SetBackgroundColor(utils.BackColor)
flex.AddItem(tree, 0, 1, false)
return flex
}
drawRightPanel := func() tview.Primitive {
makeRequestExportModeView := func() tview.Primitive {
expertModeView = views.NewRequestExpertModeView(app, ctx)
expertModeView.InitView()
return expertModeView.ParentPrmt
}
makeSettingsView := func() tview.Primitive {
settingsView = views.NewSettingsView(app, ctx)
settingsView.InitView()
return settingsView.ParentPrmt
}
// build request response view
requestResponseView = views.NewRequestResponseView(app, ctx)
requestResponseView.InitView()
focusPrmts = append(focusPrmts, requestResponseView.ResponsePrmt)
focusPrmts = append(focusPrmts, requestResponseView.RequestPrmt)
// build "make/execute request" controller
makeRequestController = controllers.NewMakeRequestController(
app,
appDataService,
ctx,
actions.NewMakeRequestAction(requestResponseView.Display, requestResponseView.Logger))
flex := tview.NewFlex().SetDirection(tview.FlexRow)
flex.SetBorder(false)
flex.SetBorderPadding(1, 0, 0, 0)
pages = tview.NewPages()
pages.SetBorder(false).SetBorderPadding(0, 1, 0, 0)
pages.AddPage("RequestResponseViewPage", requestResponseView.ParentPrmt, true, false)
pages.AddPage("RequestExpertModeViewPage", makeRequestExportModeView(), true, false)
pages.AddPage("SettingsViewPage", makeSettingsView(), true, true)
flex.AddItem(makeRequestController.Draw(), 9, 0, false)
flex.AddItem(pages, 0, 1, false)
return flex
}
logEventTextPrmt = tview.NewTextView()
logEventTextPrmt.SetBackgroundColor(utils.BackGrayColor)
logEventTextPrmt.SetTextAlign(tview.AlignLeft).SetDynamicColors(true)
shortcutInfoTextPrmt = tview.NewTextView()
shortcutInfoTextPrmt.SetBackgroundColor(utils.BackColor)
shortcutInfoTextPrmt.
SetTextAlign(tview.AlignRight).
SetDynamicColors(true).
SetText(utils.MainShortcutsText)
grid := tview.NewGrid().
SetRows(1, 0, 2).
SetColumns(0, 10, -4).
SetBorders(false).
AddItem(logEventTextPrmt, 0, 0, 1, 3, 0, 0, false).
AddItem(drawLeftPanel(), 1, 0, 1, 2, 0, 0, false).
AddItem(drawRightPanel(), 1, 2, 1, 1, 0, 0, false).
AddItem(shortcutInfoTextPrmt, 2, 0, 1, 3, 0, 0, false)
return tview.NewPages().AddPage("root", grid, true, true)
}
func displayRequestResponseViewPage(focusOn *tview.TextView) {
pages.SwitchToPage("RequestResponseViewPage")
focusPrimitive(focusOn, focusOn.Box)
}
func executeRequest() {
displayRequestResponseViewPage(requestResponseView.ResponsePrmt)
requestResponseView.ResetLogBuffer()
log("", "info")
makeRequestController.Execute()
}
func focusPrimitive(prmt tview.Primitive, box *tview.Box) {
app.SetFocus(prmt)
// Set border false to all focus prmt
for v := range focusPrmts {
focusPrmts[v].SetBorder(false)
}
if box != nil {
box.SetBorder(true)
}
// Display the right shortcuts text
if text, exists := mapFocusPrmtToShortutText[prmt]; exists {
shortcutInfoTextPrmt.SetText(text)
} else {
shortcutInfoTextPrmt.SetText(utils.MainShortcutsText)
}
}
func updateMDR(value models.MakeRequestData) {
makeRequestData = value
}
func refreshMDRView(makeRequestData models.MakeRequestData) {
updateMDR(makeRequestData)
for _, value := range ctx.AddListenerMRD {
value(makeRequestData)
}
}
func getMDR() models.MakeRequestData {
return makeRequestData
}
func getConfig() models.Config {
return output.Config
}
func updateConfig(value models.Config) {
output = appDataService.Load()
// update
output.Config = value
appDataService.Save(output)
// refresh views
refreshingConfig()
refreshingTreeAPICpn()
}
func updateContext(value models.Context) {
output = appDataService.Load()
// update
output.Context = value
appDataService.Save(output)
refreshingContext()
}
func refreshingConfig() {
for key, value := range ctx.AddListenerConfig {
ctx.PrintTrace("App.refreshingConfig." + key)
value(output.Config)
}
}
func refreshingContext() {
for key, value := range ctx.AddContextListener {
ctx.PrintTrace("App.refreshingContext." + key)
value(output.Context)
}
}
func refreshingTreeAPICpn() {
treeAPICpnt.Refresh()
}
func getOutput() models.Output {
return output
}
// Log displays UI message to user.
func log(message string, status string) {
if message != "" {
logEventTextPrmt.SetText(utils.FormatLog(message, status))
}
}
func switchPage(page string) {
switch page {
case "ExpertRequestView":
pages.SwitchToPage("RequestExpertModeViewPage")
focusPrimitive(expertModeView.TitlePrmt, nil)
case "SettingsView":
pages.SwitchToPage("SettingsViewPage")
focusPrimitive(settingsView.TitlePrmt, nil)
}
}
func refresh(value string) {
output = appDataService.Load()
refreshMRDAllViews := func() {
for _, value := range ctx.AddListenerMRD {
value(makeRequestData)
}
}
if value == "all" {
refreshingTreeAPICpn()
refreshingConfig()
refreshingContext()
refreshMRDAllViews()
} else {
if strings.Contains(value, "tree") | {
refreshingTreeAPICpn()
} | conditional_block |
|
test_1.rs | // dem granular
pub use dem_rust::basic_equations::*;
pub use dem_rust::dem_3d::*;
pub use dem_rust::wall::*;
pub use dem_rust::prelude::*;
// // rigid body imports
// pub use dem_rust::rb::rb_2d::Rigidbody2D;
// for reading data from file (comma separated)
use crate::read_xy_pairs;
// external crate imports
// use gnuplot::*;
// use simple_shapes::prelude::*;
// -------------------------------------------------
#[derive(Deserialize, Debug)]
pub struct | {
flag_tf: f64,
flag_dt: f64,
flag_plots: bool,
}
pub fn main(args: &[String]) {
// --------------------------------------
// GET THE COMMAND LINE VARIABLES
// --------------------------------------
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.argv(args).deserialize())
.unwrap_or_else(|e| e.exit());
// println!("{:?}", args);
// --------------------------------------
// GET THE COMMAND LINE VARIABLES ENDS
// --------------------------------------
// --------------------------------------
// CREATE PARTICLE ARRAYS
// --------------------------------------
let x = vec![0.];
let y = vec![50. * 1e-2];
let z = vec![0.; x.len()];
let radius = vec![10. * 1e-2; x.len()];
let mut sand = DEM3DParticleArray::from_xyz_rad(&x, &y, &z, &radius);
let rho = 2600.;
sand.rho = vec![rho; x.len()];
sand.m = vec![rho * PI * radius[0] * radius[0]; x.len()];
sand.m_inv = vec![1. / sand.m[0]; x.len()];
let inertia = 4. * (2. * radius[0]) * (2. * radius[0]) / 10.;
sand.mi = vec![inertia; x.len()];
sand.mi_inv = vec![1. / sand.mi[0]; x.len()];
let stiffness = 5. * 1e6;
sand.kn = vec![stiffness; x.len()];
sand.kt = vec![stiffness; x.len()];
// set some dummy Young's modulus for linear DEM case; change these values if
// you are updating this example for nonlinear DEM
let yng = 1.;
let nu = 0.2;
let shr = yng / (2. * (1. + nu));
sand.young_mod = vec![yng; x.len()];
sand.poisson = vec![nu; x.len()];
sand.shear_mod = vec![shr; x.len()];
// this is nice step for debugging
sand.validate_particle_array();
// -------------------------
// create an infinite wall
// -------------------------
let x = vec![0.];
let y = vec![0.];
let z = vec![0.; x.len()];
let wall_points = vec![[Vector3::new(-1., 0., 0.), Vector3::new(1., 0., 0.)]];
let nx = vec![0.];
let ny = vec![1.];
let nz = vec![0.; x.len()];
let mut wall =
WallDEMParticleArray::from_xyz_wall_points_normals(&x, &y, &z, &wall_points, &nx, &ny, &nz);
wall.kn = vec![stiffness; x.len()];
wall.kt = vec![stiffness; x.len()];
// --------------------------------------
// CREATE PARTICLE ARRAYS ENDS
// --------------------------------------
// ---------------------------------------
// SETUP CORRESPONDING NBS NNPS
// ----------------------------------------
let max_coordinate = 50. * 1e-2;
let max_size = 2. * radius[0];
let mut nbs2d_sand =
NBS2D::from_maximum_and_no_of_particles(max_coordinate, max_size, sand.x.len());
// ---------------------------------------
// SETUP CORRESPONDING NBS NNPS ENDS
// ----------------------------------------
// ----------------------------------------
// SOLVER DATA
// ----------------------------------------
let tf = args.flag_tf;
let dt = args.flag_dt;
let total_steps = (tf / dt) as u64;
let mut t = 0.;
let mut step_no = 0;
let total_output_file = 1000;
let pfreq = if total_steps < total_output_file {
1
} else {
total_steps / total_output_file
};
// ----------------------------------------
// SOLVER DATA ENDS
// ----------------------------------------
// ----------------------------------------
// OUTPUT DIRECTORY
// ----------------------------------------
let project_root = env!("CARGO_MANIFEST_DIR");
let dir_name = project_root.to_owned() + "/test_1_output";
let _p = fs::create_dir(&dir_name);
// ----------------------------------------
// OUTPUT DIRECTORY ENDS
// ----------------------------------------
// ----------------------------------------
// SOME CONSTANTS
// ----------------------------------------
let stage1 = 1;
let stage2 = 2;
// coefficient of friction
let mu = 0.5;
let kn = 5. * 1e5;
let kt = 5. * 1e5;
let en = 0.9;
// ----------------------------------------
// SOME CONSTANTS ENDS
// ----------------------------------------
// create a progress bar
let pb = setup_progress_bar(total_steps);
// -------------------------------
// FOR PARAVIEW VISUALIZATION
// -------------------------------
// define variables for automatic visualization
// particle array files
let mut sand_files = "".to_string();
write_to_vtk!(sand, format!("{}/sand_{}.vtk", &dir_name, step_no));
sand_files.push_str(&format!("'{}/sand_{}.vtk', ", &dir_name, step_no));
let mut wall_files = "".to_string();
write_wall_to_vtk!(wall, format!("{}/wall_{}.vtk", &dir_name, step_no));
wall_files.push_str(&format!("'{}/wall_{}.vtk', ", &dir_name, step_no));
// neighbours files
let mut nnps_files = "".to_string();
write_nnps_2d_to_vtk!(nbs2d_sand, format!("{}/nnps.vtk", &dir_name));
nnps_files.push_str(&format!("'{}/nnps.vtk'", &dir_name));
// -------------------------------
// FOR PARAVIEW VISUALIZATION ENDS
// -------------------------------
// -------------------------------
// POST PROCESSING VARIABLES
// -------------------------------
// uncomment and initialize the variables
// -------------------------------
// POST PROCESSING VARIABLES ENDS
// -------------------------------
while t < tf {
// ----------------------
// RK2 INITIALIZE
// ----------------------
rk2_initialize_copy_contacts_3d!((sand));
rk2_initialize_dem!((sand));
// ----------------------
// STAGE 1 EQUATIONS
// ----------------------
nbs2d_sand.register_particles_to_nnps(&sand.x, &sand.y, &sand.z);
make_forces_torques_zero!((sand));
body_force!((sand), 0., -9.81, 0.);
// dem_3d_force_linear_pp!(sand, (sand), (nbs2d_sand), (kn), (kt), (en), (mu), stage1, dt);
dem_3d_force_linear_pw!(sand, (wall), (kn), (kt), (en), (mu), stage1, dt);
// ----------------------
// STAGE 1 STEPPER
// ----------------------
rk2_stage1_dem!((sand), dt);
// ----------------------
// STAGE 2 EQUATIONS
// ----------------------
nbs2d_sand.register_particles_to_nnps(&sand.x, &sand.y, &sand.z);
make_forces_torques_zero!((sand));
body_force!((sand), 0., -9.81, 0.);
// dem_3d_force_linear_pp!(sand, (sand), (nbs2d_sand), (kn), (kt), (en), (mu), stage2, dt);
dem_3d_force_linear_pw!(sand, (wall), (kn), (kt), (en), (mu), stage2, dt);
// ----------------------
// STAGE 2 STEPPER
// ----------------------
rk2_stage2_dem!((sand), dt);
update_contacts_pp!(sand, (sand));
// ----------------------
// OUTPUT THE VTK FILE AND WRITE ALL FILES DATA
// ----------------------
if step_no % pfreq == 0 && step_no != 0 {
// ----------------------
// WRITE DATA TO VTK FILE
// ----------------------
write_to_vtk!(sand, format!("{}/sand_{}.vtk", &dir_name, step_no));
write_wall_to_vtk!(wall, format!("{}/wall_{}.vtk", &dir_name, step_no));
// ----------------------
// FOR PARAVIEW AUTOMATION
// ----------------------
sand_files.push_str(&format!("'{}/sand_{}.vtk', ", &dir_name, step_no));
wall_files.push_str(&format!("'{}/wall_{}.vtk', ", &dir_name, | Args | identifier_name |
test_1.rs | // dem granular
pub use dem_rust::basic_equations::*;
pub use dem_rust::dem_3d::*;
pub use dem_rust::wall::*;
pub use dem_rust::prelude::*;
// // rigid body imports
// pub use dem_rust::rb::rb_2d::Rigidbody2D;
// for reading data from file (comma separated)
use crate::read_xy_pairs;
// external crate imports
// use gnuplot::*;
// use simple_shapes::prelude::*;
// -------------------------------------------------
#[derive(Deserialize, Debug)]
pub struct Args {
flag_tf: f64,
flag_dt: f64,
flag_plots: bool,
}
pub fn main(args: &[String]) | let rho = 2600.;
sand.rho = vec![rho; x.len()];
sand.m = vec![rho * PI * radius[0] * radius[0]; x.len()];
sand.m_inv = vec![1. / sand.m[0]; x.len()];
let inertia = 4. * (2. * radius[0]) * (2. * radius[0]) / 10.;
sand.mi = vec![inertia; x.len()];
sand.mi_inv = vec![1. / sand.mi[0]; x.len()];
let stiffness = 5. * 1e6;
sand.kn = vec![stiffness; x.len()];
sand.kt = vec![stiffness; x.len()];
// set some dummy Young's modulus for linear DEM case; change these values if
// you are updating this example for nonlinear DEM
let yng = 1.;
let nu = 0.2;
let shr = yng / (2. * (1. + nu));
sand.young_mod = vec![yng; x.len()];
sand.poisson = vec![nu; x.len()];
sand.shear_mod = vec![shr; x.len()];
// this is nice step for debugging
sand.validate_particle_array();
// -------------------------
// create an infinite wall
// -------------------------
let x = vec![0.];
let y = vec![0.];
let z = vec![0.; x.len()];
let wall_points = vec![[Vector3::new(-1., 0., 0.), Vector3::new(1., 0., 0.)]];
let nx = vec![0.];
let ny = vec![1.];
let nz = vec![0.; x.len()];
let mut wall =
WallDEMParticleArray::from_xyz_wall_points_normals(&x, &y, &z, &wall_points, &nx, &ny, &nz);
wall.kn = vec![stiffness; x.len()];
wall.kt = vec![stiffness; x.len()];
// --------------------------------------
// CREATE PARTICLE ARRAYS ENDS
// --------------------------------------
// ---------------------------------------
// SETUP CORRESPONDING NBS NNPS
// ----------------------------------------
let max_coordinate = 50. * 1e-2;
let max_size = 2. * radius[0];
let mut nbs2d_sand =
NBS2D::from_maximum_and_no_of_particles(max_coordinate, max_size, sand.x.len());
// ---------------------------------------
// SETUP CORRESPONDING NBS NNPS ENDS
// ----------------------------------------
// ----------------------------------------
// SOLVER DATA
// ----------------------------------------
let tf = args.flag_tf;
let dt = args.flag_dt;
let total_steps = (tf / dt) as u64;
let mut t = 0.;
let mut step_no = 0;
let total_output_file = 1000;
let pfreq = if total_steps < total_output_file {
1
} else {
total_steps / total_output_file
};
// ----------------------------------------
// SOLVER DATA ENDS
// ----------------------------------------
// ----------------------------------------
// OUTPUT DIRECTORY
// ----------------------------------------
let project_root = env!("CARGO_MANIFEST_DIR");
let dir_name = project_root.to_owned() + "/test_1_output";
let _p = fs::create_dir(&dir_name);
// ----------------------------------------
// OUTPUT DIRECTORY ENDS
// ----------------------------------------
// ----------------------------------------
// SOME CONSTANTS
// ----------------------------------------
let stage1 = 1;
let stage2 = 2;
// coefficient of friction
let mu = 0.5;
let kn = 5. * 1e5;
let kt = 5. * 1e5;
let en = 0.9;
// ----------------------------------------
// SOME CONSTANTS ENDS
// ----------------------------------------
// create a progress bar
let pb = setup_progress_bar(total_steps);
// -------------------------------
// FOR PARAVIEW VISUALIZATION
// -------------------------------
// define variables for automatic visualization
// particle array files
let mut sand_files = "".to_string();
write_to_vtk!(sand, format!("{}/sand_{}.vtk", &dir_name, step_no));
sand_files.push_str(&format!("'{}/sand_{}.vtk', ", &dir_name, step_no));
let mut wall_files = "".to_string();
write_wall_to_vtk!(wall, format!("{}/wall_{}.vtk", &dir_name, step_no));
wall_files.push_str(&format!("'{}/wall_{}.vtk', ", &dir_name, step_no));
// neighbours files
let mut nnps_files = "".to_string();
write_nnps_2d_to_vtk!(nbs2d_sand, format!("{}/nnps.vtk", &dir_name));
nnps_files.push_str(&format!("'{}/nnps.vtk'", &dir_name));
// -------------------------------
// FOR PARAVIEW VISUALIZATION ENDS
// -------------------------------
// -------------------------------
// POST PROCESSING VARIABLES
// -------------------------------
// uncomment and initialize the variables
// -------------------------------
// POST PROCESSING VARIABLES ENDS
// -------------------------------
while t < tf {
// ----------------------
// RK2 INITIALIZE
// ----------------------
rk2_initialize_copy_contacts_3d!((sand));
rk2_initialize_dem!((sand));
// ----------------------
// STAGE 1 EQUATIONS
// ----------------------
nbs2d_sand.register_particles_to_nnps(&sand.x, &sand.y, &sand.z);
make_forces_torques_zero!((sand));
body_force!((sand), 0., -9.81, 0.);
// dem_3d_force_linear_pp!(sand, (sand), (nbs2d_sand), (kn), (kt), (en), (mu), stage1, dt);
dem_3d_force_linear_pw!(sand, (wall), (kn), (kt), (en), (mu), stage1, dt);
// ----------------------
// STAGE 1 STEPPER
// ----------------------
rk2_stage1_dem!((sand), dt);
// ----------------------
// STAGE 2 EQUATIONS
// ----------------------
nbs2d_sand.register_particles_to_nnps(&sand.x, &sand.y, &sand.z);
make_forces_torques_zero!((sand));
body_force!((sand), 0., -9.81, 0.);
// dem_3d_force_linear_pp!(sand, (sand), (nbs2d_sand), (kn), (kt), (en), (mu), stage2, dt);
dem_3d_force_linear_pw!(sand, (wall), (kn), (kt), (en), (mu), stage2, dt);
// ----------------------
// STAGE 2 STEPPER
// ----------------------
rk2_stage2_dem!((sand), dt);
update_contacts_pp!(sand, (sand));
// ----------------------
// OUTPUT THE VTK FILE AND WRITE ALL FILES DATA
// ----------------------
if step_no % pfreq == 0 && step_no != 0 {
// ----------------------
// WRITE DATA TO VTK FILE
// ----------------------
write_to_vtk!(sand, format!("{}/sand_{}.vtk", &dir_name, step_no));
write_wall_to_vtk!(wall, format!("{}/wall_{}.vtk", &dir_name, step_no));
// ----------------------
// FOR PARAVIEW AUTOMATION
// ----------------------
sand_files.push_str(&format!("'{}/sand_{}.vtk', ", &dir_name, step_no));
wall_files.push_str(&format!("'{}/wall_{}.vtk', ", &dir_name, | {
// --------------------------------------
// GET THE COMMAND LINE VARIABLES
// --------------------------------------
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.argv(args).deserialize())
.unwrap_or_else(|e| e.exit());
// println!("{:?}", args);
// --------------------------------------
// GET THE COMMAND LINE VARIABLES ENDS
// --------------------------------------
// --------------------------------------
// CREATE PARTICLE ARRAYS
// --------------------------------------
let x = vec![0.];
let y = vec![50. * 1e-2];
let z = vec![0.; x.len()];
let radius = vec![10. * 1e-2; x.len()];
let mut sand = DEM3DParticleArray::from_xyz_rad(&x, &y, &z, &radius); | identifier_body |
test_1.rs | dem granular
pub use dem_rust::basic_equations::*;
pub use dem_rust::dem_3d::*;
pub use dem_rust::wall::*;
pub use dem_rust::prelude::*;
// // rigid body imports
// pub use dem_rust::rb::rb_2d::Rigidbody2D;
// for reading data from file (comma separated)
use crate::read_xy_pairs;
// external crate imports
// use gnuplot::*;
// use simple_shapes::prelude::*;
// -------------------------------------------------
#[derive(Deserialize, Debug)]
pub struct Args {
flag_tf: f64,
flag_dt: f64,
flag_plots: bool,
}
pub fn main(args: &[String]) {
// --------------------------------------
// GET THE COMMAND LINE VARIABLES
// --------------------------------------
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.argv(args).deserialize())
.unwrap_or_else(|e| e.exit());
// println!("{:?}", args);
// --------------------------------------
// GET THE COMMAND LINE VARIABLES ENDS
// --------------------------------------
// --------------------------------------
// CREATE PARTICLE ARRAYS
// --------------------------------------
let x = vec![0.];
let y = vec![50. * 1e-2];
let z = vec![0.; x.len()];
let radius = vec![10. * 1e-2; x.len()];
let mut sand = DEM3DParticleArray::from_xyz_rad(&x, &y, &z, &radius);
let rho = 2600.;
sand.rho = vec![rho; x.len()];
sand.m = vec![rho * PI * radius[0] * radius[0]; x.len()];
sand.m_inv = vec![1. / sand.m[0]; x.len()];
let inertia = 4. * (2. * radius[0]) * (2. * radius[0]) / 10.;
sand.mi = vec![inertia; x.len()];
sand.mi_inv = vec![1. / sand.mi[0]; x.len()];
let stiffness = 5. * 1e6;
sand.kn = vec![stiffness; x.len()];
sand.kt = vec![stiffness; x.len()];
// set some dummy Young's modulus for linear DEM case; change these values if
// you are updating this example for nonlinear DEM
let yng = 1.;
let nu = 0.2;
let shr = yng / (2. * (1. + nu));
sand.young_mod = vec![yng; x.len()];
sand.poisson = vec![nu; x.len()];
sand.shear_mod = vec![shr; x.len()];
// this is nice step for debugging
sand.validate_particle_array();
// -------------------------
// create an infinite wall
// -------------------------
let x = vec![0.];
let y = vec![0.];
let z = vec![0.; x.len()];
let wall_points = vec![[Vector3::new(-1., 0., 0.), Vector3::new(1., 0., 0.)]];
let nx = vec![0.];
let ny = vec![1.];
let nz = vec![0.; x.len()];
let mut wall =
WallDEMParticleArray::from_xyz_wall_points_normals(&x, &y, &z, &wall_points, &nx, &ny, &nz);
wall.kn = vec![stiffness; x.len()];
wall.kt = vec![stiffness; x.len()];
// --------------------------------------
// CREATE PARTICLE ARRAYS ENDS
// --------------------------------------
// ---------------------------------------
// SETUP CORRESPONDING NBS NNPS
// ----------------------------------------
let max_coordinate = 50. * 1e-2;
let max_size = 2. * radius[0];
let mut nbs2d_sand =
NBS2D::from_maximum_and_no_of_particles(max_coordinate, max_size, sand.x.len());
// ---------------------------------------
// SETUP CORRESPONDING NBS NNPS ENDS
// ----------------------------------------
// ----------------------------------------
// SOLVER DATA
// ----------------------------------------
let tf = args.flag_tf;
let dt = args.flag_dt;
let total_steps = (tf / dt) as u64;
let mut t = 0.;
let mut step_no = 0;
let total_output_file = 1000;
let pfreq = if total_steps < total_output_file {
1
} else | ;
// ----------------------------------------
// SOLVER DATA ENDS
// ----------------------------------------
// ----------------------------------------
// OUTPUT DIRECTORY
// ----------------------------------------
let project_root = env!("CARGO_MANIFEST_DIR");
let dir_name = project_root.to_owned() + "/test_1_output";
let _p = fs::create_dir(&dir_name);
// ----------------------------------------
// OUTPUT DIRECTORY ENDS
// ----------------------------------------
// ----------------------------------------
// SOME CONSTANTS
// ----------------------------------------
let stage1 = 1;
let stage2 = 2;
// coefficient of friction
let mu = 0.5;
let kn = 5. * 1e5;
let kt = 5. * 1e5;
let en = 0.9;
// ----------------------------------------
// SOME CONSTANTS ENDS
// ----------------------------------------
// create a progress bar
let pb = setup_progress_bar(total_steps);
// -------------------------------
// FOR PARAVIEW VISUALIZATION
// -------------------------------
// define variables for automatic visualization
// particle array files
let mut sand_files = "".to_string();
write_to_vtk!(sand, format!("{}/sand_{}.vtk", &dir_name, step_no));
sand_files.push_str(&format!("'{}/sand_{}.vtk', ", &dir_name, step_no));
let mut wall_files = "".to_string();
write_wall_to_vtk!(wall, format!("{}/wall_{}.vtk", &dir_name, step_no));
wall_files.push_str(&format!("'{}/wall_{}.vtk', ", &dir_name, step_no));
// neighbours files
let mut nnps_files = "".to_string();
write_nnps_2d_to_vtk!(nbs2d_sand, format!("{}/nnps.vtk", &dir_name));
nnps_files.push_str(&format!("'{}/nnps.vtk'", &dir_name));
// -------------------------------
// FOR PARAVIEW VISUALIZATION ENDS
// -------------------------------
// -------------------------------
// POST PROCESSING VARIABLES
// -------------------------------
// uncomment and initialize the variables
// -------------------------------
// POST PROCESSING VARIABLES ENDS
// -------------------------------
while t < tf {
// ----------------------
// RK2 INITIALIZE
// ----------------------
rk2_initialize_copy_contacts_3d!((sand));
rk2_initialize_dem!((sand));
// ----------------------
// STAGE 1 EQUATIONS
// ----------------------
nbs2d_sand.register_particles_to_nnps(&sand.x, &sand.y, &sand.z);
make_forces_torques_zero!((sand));
body_force!((sand), 0., -9.81, 0.);
// dem_3d_force_linear_pp!(sand, (sand), (nbs2d_sand), (kn), (kt), (en), (mu), stage1, dt);
dem_3d_force_linear_pw!(sand, (wall), (kn), (kt), (en), (mu), stage1, dt);
// ----------------------
// STAGE 1 STEPPER
// ----------------------
rk2_stage1_dem!((sand), dt);
// ----------------------
// STAGE 2 EQUATIONS
// ----------------------
nbs2d_sand.register_particles_to_nnps(&sand.x, &sand.y, &sand.z);
make_forces_torques_zero!((sand));
body_force!((sand), 0., -9.81, 0.);
// dem_3d_force_linear_pp!(sand, (sand), (nbs2d_sand), (kn), (kt), (en), (mu), stage2, dt);
dem_3d_force_linear_pw!(sand, (wall), (kn), (kt), (en), (mu), stage2, dt);
// ----------------------
// STAGE 2 STEPPER
// ----------------------
rk2_stage2_dem!((sand), dt);
update_contacts_pp!(sand, (sand));
// ----------------------
// OUTPUT THE VTK FILE AND WRITE ALL FILES DATA
// ----------------------
if step_no % pfreq == 0 && step_no != 0 {
// ----------------------
// WRITE DATA TO VTK FILE
// ----------------------
write_to_vtk!(sand, format!("{}/sand_{}.vtk", &dir_name, step_no));
write_wall_to_vtk!(wall, format!("{}/wall_{}.vtk", &dir_name, step_no));
// ----------------------
// FOR PARAVIEW AUTOMATION
// ----------------------
sand_files.push_str(&format!("'{}/sand_{}.vtk', ", &dir_name, step_no));
wall_files.push_str(&format!("'{}/wall_{}.vtk', ", &dir_name, | {
total_steps / total_output_file
} | conditional_block |
test_1.rs | let yng = 1.;
let nu = 0.2;
let shr = yng / (2. * (1. + nu));
sand.young_mod = vec![yng; x.len()];
sand.poisson = vec![nu; x.len()];
sand.shear_mod = vec![shr; x.len()];
// this is nice step for debugging
sand.validate_particle_array();
// -------------------------
// create an infinite wall
// -------------------------
let x = vec![0.];
let y = vec![0.];
let z = vec![0.; x.len()];
let wall_points = vec![[Vector3::new(-1., 0., 0.), Vector3::new(1., 0., 0.)]];
let nx = vec![0.];
let ny = vec![1.];
let nz = vec![0.; x.len()];
let mut wall =
WallDEMParticleArray::from_xyz_wall_points_normals(&x, &y, &z, &wall_points, &nx, &ny, &nz);
wall.kn = vec![stiffness; x.len()];
wall.kt = vec![stiffness; x.len()];
// --------------------------------------
// CREATE PARTICLE ARRAYS ENDS
// --------------------------------------
// ---------------------------------------
// SETUP CORRESPONDING NBS NNPS
// ----------------------------------------
let max_coordinate = 50. * 1e-2;
let max_size = 2. * radius[0];
let mut nbs2d_sand =
NBS2D::from_maximum_and_no_of_particles(max_coordinate, max_size, sand.x.len());
// ---------------------------------------
// SETUP CORRESPONDING NBS NNPS ENDS
// ----------------------------------------
// ----------------------------------------
// SOLVER DATA
// ----------------------------------------
let tf = args.flag_tf;
let dt = args.flag_dt;
let total_steps = (tf / dt) as u64;
let mut t = 0.;
let mut step_no = 0;
let total_output_file = 1000;
let pfreq = if total_steps < total_output_file {
1
} else {
total_steps / total_output_file
};
// ----------------------------------------
// SOLVER DATA ENDS
// ----------------------------------------
// ----------------------------------------
// OUTPUT DIRECTORY
// ----------------------------------------
let project_root = env!("CARGO_MANIFEST_DIR");
let dir_name = project_root.to_owned() + "/test_1_output";
let _p = fs::create_dir(&dir_name);
// ----------------------------------------
// OUTPUT DIRECTORY ENDS
// ----------------------------------------
// ----------------------------------------
// SOME CONSTANTS
// ----------------------------------------
let stage1 = 1;
let stage2 = 2;
// coefficient of friction
let mu = 0.5;
let kn = 5. * 1e5;
let kt = 5. * 1e5;
let en = 0.9;
// ----------------------------------------
// SOME CONSTANTS ENDS
// ----------------------------------------
// create a progress bar
let pb = setup_progress_bar(total_steps);
// -------------------------------
// FOR PARAVIEW VISUALIZATION
// -------------------------------
// define variables for automatic visualization
// particle array files
let mut sand_files = "".to_string();
write_to_vtk!(sand, format!("{}/sand_{}.vtk", &dir_name, step_no));
sand_files.push_str(&format!("'{}/sand_{}.vtk', ", &dir_name, step_no));
let mut wall_files = "".to_string();
write_wall_to_vtk!(wall, format!("{}/wall_{}.vtk", &dir_name, step_no));
wall_files.push_str(&format!("'{}/wall_{}.vtk', ", &dir_name, step_no));
// neighbours files
let mut nnps_files = "".to_string();
write_nnps_2d_to_vtk!(nbs2d_sand, format!("{}/nnps.vtk", &dir_name));
nnps_files.push_str(&format!("'{}/nnps.vtk'", &dir_name));
// -------------------------------
// FOR PARAVIEW VISUALIZATION ENDS
// -------------------------------
// -------------------------------
// POST PROCESSING VARIABLES
// -------------------------------
// uncomment and initialize the variables
// -------------------------------
// POST PROCESSING VARIABLES ENDS
// -------------------------------
while t < tf {
// ----------------------
// RK2 INITIALIZE
// ----------------------
rk2_initialize_copy_contacts_3d!((sand));
rk2_initialize_dem!((sand));
// ----------------------
// STAGE 1 EQUATIONS
// ----------------------
nbs2d_sand.register_particles_to_nnps(&sand.x, &sand.y, &sand.z);
make_forces_torques_zero!((sand));
body_force!((sand), 0., -9.81, 0.);
// dem_3d_force_linear_pp!(sand, (sand), (nbs2d_sand), (kn), (kt), (en), (mu), stage1, dt);
dem_3d_force_linear_pw!(sand, (wall), (kn), (kt), (en), (mu), stage1, dt);
// ----------------------
// STAGE 1 STEPPER
// ----------------------
rk2_stage1_dem!((sand), dt);
// ----------------------
// STAGE 2 EQUATIONS
// ----------------------
nbs2d_sand.register_particles_to_nnps(&sand.x, &sand.y, &sand.z);
make_forces_torques_zero!((sand));
body_force!((sand), 0., -9.81, 0.);
// dem_3d_force_linear_pp!(sand, (sand), (nbs2d_sand), (kn), (kt), (en), (mu), stage2, dt);
dem_3d_force_linear_pw!(sand, (wall), (kn), (kt), (en), (mu), stage2, dt);
// ----------------------
// STAGE 2 STEPPER
// ----------------------
rk2_stage2_dem!((sand), dt);
update_contacts_pp!(sand, (sand));
// ----------------------
// OUTPUT THE VTK FILE AND WRITE ALL FILES DATA
// ----------------------
if step_no % pfreq == 0 && step_no != 0 {
// ----------------------
// WRITE DATA TO VTK FILE
// ----------------------
write_to_vtk!(sand, format!("{}/sand_{}.vtk", &dir_name, step_no));
write_wall_to_vtk!(wall, format!("{}/wall_{}.vtk", &dir_name, step_no));
// ----------------------
// FOR PARAVIEW AUTOMATION
// ----------------------
sand_files.push_str(&format!("'{}/sand_{}.vtk', ", &dir_name, step_no));
wall_files.push_str(&format!("'{}/wall_{}.vtk', ", &dir_name, step_no));
}
step_no += 1;
t += dt;
// progress bar increment
pb.inc(1);
}
pb.finish_with_message("Simulation succesfully completed");
// ---------------------------------------
// write an easy paraview visualization file
// ---------------------------------------
// truncate the extra part of the string
sand_files.truncate(sand_files.len() - 2);
write_vis_file(
format!("{}/vis_paraview.py", &dir_name),
vec!["sand", "wall"],
vec![sand_files, wall_files],
vec![true, false],
vec!["nnps"],
vec![nnps_files],
);
// ---------------------------------------
// write an easy paraview visualization file ends
// ---------------------------------------
// ---------------------------------------
// PLOTTING
// ---------------------------------------
// UNCOMMENT AND USE THE PLOTTING FACILITY
// let (incident_angle_experiment_kharaz, rebound_angle_experiment_kharaz) =
// read_xy_pairs(&format!(
// "{}/data/chung_test_4_incident_angle_vs_rebound_angle_experiment_kharaz.txt",
// &project_root
// ));
// let mut fg = Figure::new();
// fg.axes2d()
// .set_x_label("Incident angle (degree)", &[])
// .set_y_label("Rebound angle (degree)", &[])
// // .set_x_range(Fix(0.), Fix(90.))
// // .set_y_range(Fix(-800.), Fix(0.))
// .lines(
// &incident_angle_experiment_kharaz,
// &rebound_angle_experiment_kharaz,
// &[Caption("Kharaz experiment"), Color("black")],
// )
// .lines(
// &incident_angle_paper_simulated,
// &rebound_angle_paper_simulated,
// &[Caption("Paper simulated"), Color("blue")],
// )
// .points(
// &incident_angle,
// &rebound_angle_al_alloy,
// &[Caption("Al alloy"), Color("black")],
// )
// .points(
// &incident_angle,
// &rebound_angle_al_oxide, | // &[Caption("Al oxide"), Color("blue")],
// );
| random_line_split |
|
mqtt_dev.py | ot import mqtt_connection_builder
from .topic_config import read_config
# setup:
received_count = 0
received_all_event = threading.Event()
args = 0
def mqtt_pub():
"""
Publish a random stream of messages to the AWS IoT Core MQTT broker
"""
global args
args = parse_args()
init(args)
mqtt_connection = setup_connection(args)
connect_future = mqtt_connection.connect()
# Future.result() waits until a result is available
connect_future.result()
print("Connected!")
topic_data = read_config()
print(f"platform_type: {topic_data['platform_type']}")
print(f"random platform_type: {random.choice(topic_data['platform_type'])}")
# Publish message to server desired number of times
# This step loops forever if count was set to 0
if args.count == 0:
print("Sending messages until program killed")
else:
print(f"Sending {args.count} message(s)")
publish_count = 1
while (publish_count <= args.count) or (args.count == 0):
# topic definition: generate a random topic to publish to, based on the established hierarchy:
# ex: IOOS/<platform_type>/<ra>/<platform>/<sensor>/<variable>
platform_type = random.choice(topic_data["platform_type"])
ra = random.choice(topic_data["ra"])
platform = random.choice(topic_data["platform"])
sensor = random.choice(topic_data["sensor"])
variable = random.choice(topic_data["variable"])
topic = f"IOOS/{platform_type}/{ra}/{platform}/{sensor}/{variable}"
obs_data = random.uniform(1, 100)
# msg_json = """
# { "metadata": {
# "platform_type": "{platform_type}",
# "ra": "{ra}",
# "platform": "{platform}",
# "sensor": "{sensor}",
# "variable": "{variable}"
# },
# "data": {
# "value": "{data}"
# }
# }
# """
msg_dict = dict()
msg_dict["metadata"] = {
"platform_type": platform_type,
"ra": ra,
"platform": platform,
"sensor": sensor,
"variable": variable,
}
msg_dict["data"] = {"value": obs_data}
# print(msg_dict)
print(f"Topic: {topic}")
print(f"Message: {msg_dict}")
mqtt_connection.publish(
topic=topic,
# payload=str(msg_dict),
payload=json.dumps(msg_dict),
qos=mqtt.QoS.AT_LEAST_ONCE,
)
time.sleep(1)
publish_count += 1
# Disconnect
print("Disconnecting...")
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print("Disconnected!")
def mqtt_sub():
"""
Subscribe and echo messages from the broker using a user-provided topic filter string
"""
global args
args = parse_args()
init(args)
mqtt_connection = setup_connection(args)
connect_future = mqtt_connection.connect()
# Future.result() waits until a result is available
connect_future.result()
print("Connected!")
# Subscribe
print(f"Subscribing to topic '{args.subscribe_topic}'...")
subscribe_future, packet_id = mqtt_connection.subscribe(
topic=args.subscribe_topic,
qos=mqtt.QoS.AT_LEAST_ONCE,
callback=on_message_received,
)
subscribe_result = subscribe_future.result()
print("Subscribed with {}".format(str(subscribe_result["qos"])))
# Wait for all messages to be received.
# This waits forever if count was set to 0.
if args.count != 0 and not received_all_event.is_set():
print("Waiting for all messages to be received...")
received_all_event.wait()
print(f"{received_count} message(s) received.")
# Disconnect
print("Disconnecting...")
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print("Disconnected!")
def init(args):
"""
Main setup function
"""
io.init_logging(getattr(io.LogLevel, args.verbosity), "stderr")
# global received_count = 0
# global received_all_event = threading.Event()
def setup_connection(args):
"""
Set up an MQTT client connection and other details
"""
event_loop_group = io.EventLoopGroup(1)
host_resolver = io.DefaultHostResolver(event_loop_group)
client_bootstrap = io.ClientBootstrap(event_loop_group, host_resolver)
if args.use_websocket is True:
proxy_options = None
if args.proxy_host:
proxy_options = http.HttpProxyOptions(
host_name=args.proxy_host,
port=args.proxy_port,
)
credentials_provider = auth.AwsCredentialsProvider.new_default_chain(
client_bootstrap,
)
mqtt_connection = mqtt_connection_builder.websockets_with_default_aws_signing(
endpoint=args.endpoint,
client_bootstrap=client_bootstrap,
region=args.signing_region,
credentials_provider=credentials_provider,
websocket_proxy_options=proxy_options,
ca_filepath=args.root_ca,
on_connection_interrupted=on_connection_interrupted,
on_connection_resumed=on_connection_resumed,
client_id=args.client_id,
clean_session=False,
keep_alive_secs=6,
)
else:
mqtt_connection = mqtt_connection_builder.mtls_from_path(
endpoint=args.endpoint,
cert_filepath=args.cert,
pri_key_filepath=args.key,
client_bootstrap=client_bootstrap,
ca_filepath=args.root_ca,
on_connection_interrupted=on_connection_interrupted,
on_connection_resumed=on_connection_resumed,
client_id=args.client_id,
clean_session=False,
keep_alive_secs=6,
)
print(
f"Connecting to {args.endpoint} with client ID '{args.client_id}'...",
)
return mqtt_connection
# Callback when the subscribed topic receives a message
def on_message_received(topic, payload, **kwargs):
print(f"Received message from topic '{topic}': {payload}")
global received_count
received_count += 1
if received_count == args.count:
received_all_event.set()
# Callback when connection is accidentally lost.
def on_connection_interrupted(connection, error, **kwargs):
print(f"Connection interrupted. error: {error}")
# Callback when an interrupted connection is re-established.
def on_connection_resumed(connection, return_code, session_present, **kwargs): | "Connection resumed. return_code: {} session_present: {}".format(
return_code,
session_present,
),
)
if return_code == mqtt.ConnectReturnCode.ACCEPTED and not session_present:
print("Session did not persist. Resubscribing to existing topics...")
resubscribe_future, _ = connection.resubscribe_existing_topics()
# Cannot synchronously wait for resubscribe result because we're on the connection's event-loop thread,
# evaluate result with a callback instead.
resubscribe_future.add_done_callback(on_resubscribe_complete)
def on_resubscribe_complete(resubscribe_future):
resubscribe_results = resubscribe_future.result()
print(f"Resubscribe results: {resubscribe_results}")
for topic, qos in resubscribe_results["topics"]:
if qos is None:
sys.exit(f"Server rejected resubscribe to topic: {topic}")
def parse_args():
"""
Parse command-line args passed in to either entrypoint function
"""
kwargs = {
"description": "A simple utility that leverages the AWS IoT SDK publish and subscribe to MQTT topics",
"formatter_class": argparse.RawDescriptionHelpFormatter,
}
parser = argparse.ArgumentParser(**kwargs)
parser.add_argument(
"--endpoint",
required=True,
help="Your AWS IoT custom endpoint, not including a port. "
+ 'Ex: "abcd123456wxyz-ats.iot.us-east-1.amazonaws.com"',
)
parser.add_argument(
"--cert",
help="File path to your client certificate, in PEM format.",
)
parser.add_argument("--key", help="File path to your private key, in PEM format.")
parser.add_argument(
"--root-ca",
help="File path to root certificate authority, in PEM format. "
+ "Necessary if MQTT server uses a certificate that's not already in "
+ "your trust store.",
)
parser.add_argument(
"--client-id",
default="test-" + str(uuid4()),
help="Client ID for MQTT connection.",
)
parser.add_argument(
"--subscribe_topic",
default="IOOS/#",
help="Topic to subscribe to.",
)
# parser.add_argument('--message', default="Hello World!", help="Message to publish. " +
# "Specify empty string to publish nothing.")
parser.add_argument(
"--count",
default=0,
type=int,
help="Number of messages to publish/receive before exiting. "
+ "Specify 0 to run forever.",
)
parser.add_argument(
"--use-websocket",
default=False,
action="store_true",
help="To use a websocket instead of raw mqtt. If you "
+ "specify this option you must specify a region for signing, you can also enable proxy mode.",
)
parser.add_argument(
"--signing-region",
default="us-east-1",
help="If you specify --use-web-socket, this "
+ "is the region that will be used for computing the Sigv4 signature",
)
# parser.add_argument('--proxy | print( | random_line_split |
mqtt_dev.py | import mqtt_connection_builder
from .topic_config import read_config
# setup:
received_count = 0
received_all_event = threading.Event()
args = 0
def mqtt_pub():
"""
Publish a random stream of messages to the AWS IoT Core MQTT broker
"""
global args
args = parse_args()
init(args)
mqtt_connection = setup_connection(args)
connect_future = mqtt_connection.connect()
# Future.result() waits until a result is available
connect_future.result()
print("Connected!")
topic_data = read_config()
print(f"platform_type: {topic_data['platform_type']}")
print(f"random platform_type: {random.choice(topic_data['platform_type'])}")
# Publish message to server desired number of times
# This step loops forever if count was set to 0
if args.count == 0:
print("Sending messages until program killed")
else:
|
publish_count = 1
while (publish_count <= args.count) or (args.count == 0):
# topic definition: generate a random topic to publish to, based on the established hierarchy:
# ex: IOOS/<platform_type>/<ra>/<platform>/<sensor>/<variable>
platform_type = random.choice(topic_data["platform_type"])
ra = random.choice(topic_data["ra"])
platform = random.choice(topic_data["platform"])
sensor = random.choice(topic_data["sensor"])
variable = random.choice(topic_data["variable"])
topic = f"IOOS/{platform_type}/{ra}/{platform}/{sensor}/{variable}"
obs_data = random.uniform(1, 100)
# msg_json = """
# { "metadata": {
# "platform_type": "{platform_type}",
# "ra": "{ra}",
# "platform": "{platform}",
# "sensor": "{sensor}",
# "variable": "{variable}"
# },
# "data": {
# "value": "{data}"
# }
# }
# """
msg_dict = dict()
msg_dict["metadata"] = {
"platform_type": platform_type,
"ra": ra,
"platform": platform,
"sensor": sensor,
"variable": variable,
}
msg_dict["data"] = {"value": obs_data}
# print(msg_dict)
print(f"Topic: {topic}")
print(f"Message: {msg_dict}")
mqtt_connection.publish(
topic=topic,
# payload=str(msg_dict),
payload=json.dumps(msg_dict),
qos=mqtt.QoS.AT_LEAST_ONCE,
)
time.sleep(1)
publish_count += 1
# Disconnect
print("Disconnecting...")
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print("Disconnected!")
def mqtt_sub():
"""
Subscribe and echo messages from the broker using a user-provided topic filter string
"""
global args
args = parse_args()
init(args)
mqtt_connection = setup_connection(args)
connect_future = mqtt_connection.connect()
# Future.result() waits until a result is available
connect_future.result()
print("Connected!")
# Subscribe
print(f"Subscribing to topic '{args.subscribe_topic}'...")
subscribe_future, packet_id = mqtt_connection.subscribe(
topic=args.subscribe_topic,
qos=mqtt.QoS.AT_LEAST_ONCE,
callback=on_message_received,
)
subscribe_result = subscribe_future.result()
print("Subscribed with {}".format(str(subscribe_result["qos"])))
# Wait for all messages to be received.
# This waits forever if count was set to 0.
if args.count != 0 and not received_all_event.is_set():
print("Waiting for all messages to be received...")
received_all_event.wait()
print(f"{received_count} message(s) received.")
# Disconnect
print("Disconnecting...")
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print("Disconnected!")
def init(args):
"""
Main setup function
"""
io.init_logging(getattr(io.LogLevel, args.verbosity), "stderr")
# global received_count = 0
# global received_all_event = threading.Event()
def setup_connection(args):
"""
Set up an MQTT client connection and other details
"""
event_loop_group = io.EventLoopGroup(1)
host_resolver = io.DefaultHostResolver(event_loop_group)
client_bootstrap = io.ClientBootstrap(event_loop_group, host_resolver)
if args.use_websocket is True:
proxy_options = None
if args.proxy_host:
proxy_options = http.HttpProxyOptions(
host_name=args.proxy_host,
port=args.proxy_port,
)
credentials_provider = auth.AwsCredentialsProvider.new_default_chain(
client_bootstrap,
)
mqtt_connection = mqtt_connection_builder.websockets_with_default_aws_signing(
endpoint=args.endpoint,
client_bootstrap=client_bootstrap,
region=args.signing_region,
credentials_provider=credentials_provider,
websocket_proxy_options=proxy_options,
ca_filepath=args.root_ca,
on_connection_interrupted=on_connection_interrupted,
on_connection_resumed=on_connection_resumed,
client_id=args.client_id,
clean_session=False,
keep_alive_secs=6,
)
else:
mqtt_connection = mqtt_connection_builder.mtls_from_path(
endpoint=args.endpoint,
cert_filepath=args.cert,
pri_key_filepath=args.key,
client_bootstrap=client_bootstrap,
ca_filepath=args.root_ca,
on_connection_interrupted=on_connection_interrupted,
on_connection_resumed=on_connection_resumed,
client_id=args.client_id,
clean_session=False,
keep_alive_secs=6,
)
print(
f"Connecting to {args.endpoint} with client ID '{args.client_id}'...",
)
return mqtt_connection
# Callback when the subscribed topic receives a message
def on_message_received(topic, payload, **kwargs):
print(f"Received message from topic '{topic}': {payload}")
global received_count
received_count += 1
if received_count == args.count:
received_all_event.set()
# Callback when connection is accidentally lost.
def on_connection_interrupted(connection, error, **kwargs):
print(f"Connection interrupted. error: {error}")
# Callback when an interrupted connection is re-established.
def on_connection_resumed(connection, return_code, session_present, **kwargs):
print(
"Connection resumed. return_code: {} session_present: {}".format(
return_code,
session_present,
),
)
if return_code == mqtt.ConnectReturnCode.ACCEPTED and not session_present:
print("Session did not persist. Resubscribing to existing topics...")
resubscribe_future, _ = connection.resubscribe_existing_topics()
# Cannot synchronously wait for resubscribe result because we're on the connection's event-loop thread,
# evaluate result with a callback instead.
resubscribe_future.add_done_callback(on_resubscribe_complete)
def on_resubscribe_complete(resubscribe_future):
resubscribe_results = resubscribe_future.result()
print(f"Resubscribe results: {resubscribe_results}")
for topic, qos in resubscribe_results["topics"]:
if qos is None:
sys.exit(f"Server rejected resubscribe to topic: {topic}")
def parse_args():
"""
Parse command-line args passed in to either entrypoint function
"""
kwargs = {
"description": "A simple utility that leverages the AWS IoT SDK publish and subscribe to MQTT topics",
"formatter_class": argparse.RawDescriptionHelpFormatter,
}
parser = argparse.ArgumentParser(**kwargs)
parser.add_argument(
"--endpoint",
required=True,
help="Your AWS IoT custom endpoint, not including a port. "
+ 'Ex: "abcd123456wxyz-ats.iot.us-east-1.amazonaws.com"',
)
parser.add_argument(
"--cert",
help="File path to your client certificate, in PEM format.",
)
parser.add_argument("--key", help="File path to your private key, in PEM format.")
parser.add_argument(
"--root-ca",
help="File path to root certificate authority, in PEM format. "
+ "Necessary if MQTT server uses a certificate that's not already in "
+ "your trust store.",
)
parser.add_argument(
"--client-id",
default="test-" + str(uuid4()),
help="Client ID for MQTT connection.",
)
parser.add_argument(
"--subscribe_topic",
default="IOOS/#",
help="Topic to subscribe to.",
)
# parser.add_argument('--message', default="Hello World!", help="Message to publish. " +
# "Specify empty string to publish nothing.")
parser.add_argument(
"--count",
default=0,
type=int,
help="Number of messages to publish/receive before exiting. "
+ "Specify 0 to run forever.",
)
parser.add_argument(
"--use-websocket",
default=False,
action="store_true",
help="To use a websocket instead of raw mqtt. If you "
+ "specify this option you must specify a region for signing, you can also enable proxy mode.",
)
parser.add_argument(
"--signing-region",
default="us-east-1",
help="If you specify --use-web-socket, this "
+ "is the region that will be used for computing the Sigv4 signature",
)
# parser.add_argument('--proxy | print(f"Sending {args.count} message(s)") | conditional_block |
mqtt_dev.py | ot import mqtt_connection_builder
from .topic_config import read_config
# setup:
received_count = 0
received_all_event = threading.Event()
args = 0
def mqtt_pub():
| print("Sending messages until program killed")
else:
print(f"Sending {args.count} message(s)")
publish_count = 1
while (publish_count <= args.count) or (args.count == 0):
# topic definition: generate a random topic to publish to, based on the established hierarchy:
# ex: IOOS/<platform_type>/<ra>/<platform>/<sensor>/<variable>
platform_type = random.choice(topic_data["platform_type"])
ra = random.choice(topic_data["ra"])
platform = random.choice(topic_data["platform"])
sensor = random.choice(topic_data["sensor"])
variable = random.choice(topic_data["variable"])
topic = f"IOOS/{platform_type}/{ra}/{platform}/{sensor}/{variable}"
obs_data = random.uniform(1, 100)
# msg_json = """
# { "metadata": {
# "platform_type": "{platform_type}",
# "ra": "{ra}",
# "platform": "{platform}",
# "sensor": "{sensor}",
# "variable": "{variable}"
# },
# "data": {
# "value": "{data}"
# }
# }
# """
msg_dict = dict()
msg_dict["metadata"] = {
"platform_type": platform_type,
"ra": ra,
"platform": platform,
"sensor": sensor,
"variable": variable,
}
msg_dict["data"] = {"value": obs_data}
# print(msg_dict)
print(f"Topic: {topic}")
print(f"Message: {msg_dict}")
mqtt_connection.publish(
topic=topic,
# payload=str(msg_dict),
payload=json.dumps(msg_dict),
qos=mqtt.QoS.AT_LEAST_ONCE,
)
time.sleep(1)
publish_count += 1
# Disconnect
print("Disconnecting...")
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print("Disconnected!")
def mqtt_sub():
"""
Subscribe and echo messages from the broker using a user-provided topic filter string
"""
global args
args = parse_args()
init(args)
mqtt_connection = setup_connection(args)
connect_future = mqtt_connection.connect()
# Future.result() waits until a result is available
connect_future.result()
print("Connected!")
# Subscribe
print(f"Subscribing to topic '{args.subscribe_topic}'...")
subscribe_future, packet_id = mqtt_connection.subscribe(
topic=args.subscribe_topic,
qos=mqtt.QoS.AT_LEAST_ONCE,
callback=on_message_received,
)
subscribe_result = subscribe_future.result()
print("Subscribed with {}".format(str(subscribe_result["qos"])))
# Wait for all messages to be received.
# This waits forever if count was set to 0.
if args.count != 0 and not received_all_event.is_set():
print("Waiting for all messages to be received...")
received_all_event.wait()
print(f"{received_count} message(s) received.")
# Disconnect
print("Disconnecting...")
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print("Disconnected!")
def init(args):
"""
Main setup function
"""
io.init_logging(getattr(io.LogLevel, args.verbosity), "stderr")
# global received_count = 0
# global received_all_event = threading.Event()
def setup_connection(args):
"""
Set up an MQTT client connection and other details
"""
event_loop_group = io.EventLoopGroup(1)
host_resolver = io.DefaultHostResolver(event_loop_group)
client_bootstrap = io.ClientBootstrap(event_loop_group, host_resolver)
if args.use_websocket is True:
proxy_options = None
if args.proxy_host:
proxy_options = http.HttpProxyOptions(
host_name=args.proxy_host,
port=args.proxy_port,
)
credentials_provider = auth.AwsCredentialsProvider.new_default_chain(
client_bootstrap,
)
mqtt_connection = mqtt_connection_builder.websockets_with_default_aws_signing(
endpoint=args.endpoint,
client_bootstrap=client_bootstrap,
region=args.signing_region,
credentials_provider=credentials_provider,
websocket_proxy_options=proxy_options,
ca_filepath=args.root_ca,
on_connection_interrupted=on_connection_interrupted,
on_connection_resumed=on_connection_resumed,
client_id=args.client_id,
clean_session=False,
keep_alive_secs=6,
)
else:
mqtt_connection = mqtt_connection_builder.mtls_from_path(
endpoint=args.endpoint,
cert_filepath=args.cert,
pri_key_filepath=args.key,
client_bootstrap=client_bootstrap,
ca_filepath=args.root_ca,
on_connection_interrupted=on_connection_interrupted,
on_connection_resumed=on_connection_resumed,
client_id=args.client_id,
clean_session=False,
keep_alive_secs=6,
)
print(
f"Connecting to {args.endpoint} with client ID '{args.client_id}'...",
)
return mqtt_connection
# Callback when the subscribed topic receives a message
def on_message_received(topic, payload, **kwargs):
print(f"Received message from topic '{topic}': {payload}")
global received_count
received_count += 1
if received_count == args.count:
received_all_event.set()
# Callback when connection is accidentally lost.
def on_connection_interrupted(connection, error, **kwargs):
print(f"Connection interrupted. error: {error}")
# Callback when an interrupted connection is re-established.
def on_connection_resumed(connection, return_code, session_present, **kwargs):
print(
"Connection resumed. return_code: {} session_present: {}".format(
return_code,
session_present,
),
)
if return_code == mqtt.ConnectReturnCode.ACCEPTED and not session_present:
print("Session did not persist. Resubscribing to existing topics...")
resubscribe_future, _ = connection.resubscribe_existing_topics()
# Cannot synchronously wait for resubscribe result because we're on the connection's event-loop thread,
# evaluate result with a callback instead.
resubscribe_future.add_done_callback(on_resubscribe_complete)
def on_resubscribe_complete(resubscribe_future):
resubscribe_results = resubscribe_future.result()
print(f"Resubscribe results: {resubscribe_results}")
for topic, qos in resubscribe_results["topics"]:
if qos is None:
sys.exit(f"Server rejected resubscribe to topic: {topic}")
def parse_args():
"""
Parse command-line args passed in to either entrypoint function
"""
kwargs = {
"description": "A simple utility that leverages the AWS IoT SDK publish and subscribe to MQTT topics",
"formatter_class": argparse.RawDescriptionHelpFormatter,
}
parser = argparse.ArgumentParser(**kwargs)
parser.add_argument(
"--endpoint",
required=True,
help="Your AWS IoT custom endpoint, not including a port. "
+ 'Ex: "abcd123456wxyz-ats.iot.us-east-1.amazonaws.com"',
)
parser.add_argument(
"--cert",
help="File path to your client certificate, in PEM format.",
)
parser.add_argument("--key", help="File path to your private key, in PEM format.")
parser.add_argument(
"--root-ca",
help="File path to root certificate authority, in PEM format. "
+ "Necessary if MQTT server uses a certificate that's not already in "
+ "your trust store.",
)
parser.add_argument(
"--client-id",
default="test-" + str(uuid4()),
help="Client ID for MQTT connection.",
)
parser.add_argument(
"--subscribe_topic",
default="IOOS/#",
help="Topic to subscribe to.",
)
# parser.add_argument('--message', default="Hello World!", help="Message to publish. " +
# "Specify empty string to publish nothing.")
parser.add_argument(
"--count",
default=0,
type=int,
help="Number of messages to publish/receive before exiting. "
+ "Specify 0 to run forever.",
)
parser.add_argument(
"--use-websocket",
default=False,
action="store_true",
help="To use a websocket instead of raw mqtt. If you "
+ "specify this option you must specify a region for signing, you can also enable proxy mode.",
)
parser.add_argument(
"--signing-region",
default="us-east-1",
help="If you specify --use-web-socket, this "
+ "is the region that will be used for computing the Sigv4 signature",
)
# parser.add_argument('--proxy | """
Publish a random stream of messages to the AWS IoT Core MQTT broker
"""
global args
args = parse_args()
init(args)
mqtt_connection = setup_connection(args)
connect_future = mqtt_connection.connect()
# Future.result() waits until a result is available
connect_future.result()
print("Connected!")
topic_data = read_config()
print(f"platform_type: {topic_data['platform_type']}")
print(f"random platform_type: {random.choice(topic_data['platform_type'])}")
# Publish message to server desired number of times
# This step loops forever if count was set to 0
if args.count == 0: | identifier_body |
mqtt_dev.py | import mqtt_connection_builder
from .topic_config import read_config
# setup:
received_count = 0
received_all_event = threading.Event()
args = 0
def mqtt_pub():
"""
Publish a random stream of messages to the AWS IoT Core MQTT broker
"""
global args
args = parse_args()
init(args)
mqtt_connection = setup_connection(args)
connect_future = mqtt_connection.connect()
# Future.result() waits until a result is available
connect_future.result()
print("Connected!")
topic_data = read_config()
print(f"platform_type: {topic_data['platform_type']}")
print(f"random platform_type: {random.choice(topic_data['platform_type'])}")
# Publish message to server desired number of times
# This step loops forever if count was set to 0
if args.count == 0:
print("Sending messages until program killed")
else:
print(f"Sending {args.count} message(s)")
publish_count = 1
while (publish_count <= args.count) or (args.count == 0):
# topic definition: generate a random topic to publish to, based on the established hierarchy:
# ex: IOOS/<platform_type>/<ra>/<platform>/<sensor>/<variable>
platform_type = random.choice(topic_data["platform_type"])
ra = random.choice(topic_data["ra"])
platform = random.choice(topic_data["platform"])
sensor = random.choice(topic_data["sensor"])
variable = random.choice(topic_data["variable"])
topic = f"IOOS/{platform_type}/{ra}/{platform}/{sensor}/{variable}"
obs_data = random.uniform(1, 100)
# msg_json = """
# { "metadata": {
# "platform_type": "{platform_type}",
# "ra": "{ra}",
# "platform": "{platform}",
# "sensor": "{sensor}",
# "variable": "{variable}"
# },
# "data": {
# "value": "{data}"
# }
# }
# """
msg_dict = dict()
msg_dict["metadata"] = {
"platform_type": platform_type,
"ra": ra,
"platform": platform,
"sensor": sensor,
"variable": variable,
}
msg_dict["data"] = {"value": obs_data}
# print(msg_dict)
print(f"Topic: {topic}")
print(f"Message: {msg_dict}")
mqtt_connection.publish(
topic=topic,
# payload=str(msg_dict),
payload=json.dumps(msg_dict),
qos=mqtt.QoS.AT_LEAST_ONCE,
)
time.sleep(1)
publish_count += 1
# Disconnect
print("Disconnecting...")
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print("Disconnected!")
def mqtt_sub():
"""
Subscribe and echo messages from the broker using a user-provided topic filter string
"""
global args
args = parse_args()
init(args)
mqtt_connection = setup_connection(args)
connect_future = mqtt_connection.connect()
# Future.result() waits until a result is available
connect_future.result()
print("Connected!")
# Subscribe
print(f"Subscribing to topic '{args.subscribe_topic}'...")
subscribe_future, packet_id = mqtt_connection.subscribe(
topic=args.subscribe_topic,
qos=mqtt.QoS.AT_LEAST_ONCE,
callback=on_message_received,
)
subscribe_result = subscribe_future.result()
print("Subscribed with {}".format(str(subscribe_result["qos"])))
# Wait for all messages to be received.
# This waits forever if count was set to 0.
if args.count != 0 and not received_all_event.is_set():
print("Waiting for all messages to be received...")
received_all_event.wait()
print(f"{received_count} message(s) received.")
# Disconnect
print("Disconnecting...")
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print("Disconnected!")
def | (args):
"""
Main setup function
"""
io.init_logging(getattr(io.LogLevel, args.verbosity), "stderr")
# global received_count = 0
# global received_all_event = threading.Event()
def setup_connection(args):
"""
Set up an MQTT client connection and other details
"""
event_loop_group = io.EventLoopGroup(1)
host_resolver = io.DefaultHostResolver(event_loop_group)
client_bootstrap = io.ClientBootstrap(event_loop_group, host_resolver)
if args.use_websocket is True:
proxy_options = None
if args.proxy_host:
proxy_options = http.HttpProxyOptions(
host_name=args.proxy_host,
port=args.proxy_port,
)
credentials_provider = auth.AwsCredentialsProvider.new_default_chain(
client_bootstrap,
)
mqtt_connection = mqtt_connection_builder.websockets_with_default_aws_signing(
endpoint=args.endpoint,
client_bootstrap=client_bootstrap,
region=args.signing_region,
credentials_provider=credentials_provider,
websocket_proxy_options=proxy_options,
ca_filepath=args.root_ca,
on_connection_interrupted=on_connection_interrupted,
on_connection_resumed=on_connection_resumed,
client_id=args.client_id,
clean_session=False,
keep_alive_secs=6,
)
else:
mqtt_connection = mqtt_connection_builder.mtls_from_path(
endpoint=args.endpoint,
cert_filepath=args.cert,
pri_key_filepath=args.key,
client_bootstrap=client_bootstrap,
ca_filepath=args.root_ca,
on_connection_interrupted=on_connection_interrupted,
on_connection_resumed=on_connection_resumed,
client_id=args.client_id,
clean_session=False,
keep_alive_secs=6,
)
print(
f"Connecting to {args.endpoint} with client ID '{args.client_id}'...",
)
return mqtt_connection
# Callback when the subscribed topic receives a message
def on_message_received(topic, payload, **kwargs):
print(f"Received message from topic '{topic}': {payload}")
global received_count
received_count += 1
if received_count == args.count:
received_all_event.set()
# Callback when connection is accidentally lost.
def on_connection_interrupted(connection, error, **kwargs):
print(f"Connection interrupted. error: {error}")
# Callback when an interrupted connection is re-established.
def on_connection_resumed(connection, return_code, session_present, **kwargs):
print(
"Connection resumed. return_code: {} session_present: {}".format(
return_code,
session_present,
),
)
if return_code == mqtt.ConnectReturnCode.ACCEPTED and not session_present:
print("Session did not persist. Resubscribing to existing topics...")
resubscribe_future, _ = connection.resubscribe_existing_topics()
# Cannot synchronously wait for resubscribe result because we're on the connection's event-loop thread,
# evaluate result with a callback instead.
resubscribe_future.add_done_callback(on_resubscribe_complete)
def on_resubscribe_complete(resubscribe_future):
resubscribe_results = resubscribe_future.result()
print(f"Resubscribe results: {resubscribe_results}")
for topic, qos in resubscribe_results["topics"]:
if qos is None:
sys.exit(f"Server rejected resubscribe to topic: {topic}")
def parse_args():
"""
Parse command-line args passed in to either entrypoint function
"""
kwargs = {
"description": "A simple utility that leverages the AWS IoT SDK publish and subscribe to MQTT topics",
"formatter_class": argparse.RawDescriptionHelpFormatter,
}
parser = argparse.ArgumentParser(**kwargs)
parser.add_argument(
"--endpoint",
required=True,
help="Your AWS IoT custom endpoint, not including a port. "
+ 'Ex: "abcd123456wxyz-ats.iot.us-east-1.amazonaws.com"',
)
parser.add_argument(
"--cert",
help="File path to your client certificate, in PEM format.",
)
parser.add_argument("--key", help="File path to your private key, in PEM format.")
parser.add_argument(
"--root-ca",
help="File path to root certificate authority, in PEM format. "
+ "Necessary if MQTT server uses a certificate that's not already in "
+ "your trust store.",
)
parser.add_argument(
"--client-id",
default="test-" + str(uuid4()),
help="Client ID for MQTT connection.",
)
parser.add_argument(
"--subscribe_topic",
default="IOOS/#",
help="Topic to subscribe to.",
)
# parser.add_argument('--message', default="Hello World!", help="Message to publish. " +
# "Specify empty string to publish nothing.")
parser.add_argument(
"--count",
default=0,
type=int,
help="Number of messages to publish/receive before exiting. "
+ "Specify 0 to run forever.",
)
parser.add_argument(
"--use-websocket",
default=False,
action="store_true",
help="To use a websocket instead of raw mqtt. If you "
+ "specify this option you must specify a region for signing, you can also enable proxy mode.",
)
parser.add_argument(
"--signing-region",
default="us-east-1",
help="If you specify --use-web-socket, this "
+ "is the region that will be used for computing the Sigv4 signature",
)
# parser.add_argument('--proxy | init | identifier_name |
jisho.py | _one("span.unlinked").text if li.select_one("span.unlinked") is not None else None
if furigana:
kanji += unlifted
kana += furigana
kanaEnding = []
for i in reversed(range(len(unlifted))): | break
kana += ''.join(kanaEnding[::-1])
else:
kanji += unlifted
kana += unlifted
else:
text = str(child).strip()
if text:
kanji += text
kana += text
return kanji, kana
def get_pieces(sentenceElement):
pieceElements = sentenceElement.select("li.clearfix") + sentenceElement.select("el")
pieces = []
for pieceElement in pieceElements:
if pieceElement.name == 'li':
pieces.append({
'lifted': pieceElement.select_one("span.furigana").text if pieceElement.select_one("span.furigana") is not None else '',
'unlifted': pieceElement.select_one("span.unlinked").text if pieceElement.select_one("span.unlinked") is not None else '',
})
else:
pieces.append({
'lifted': '',
'unlifted': pieceElement.text,
})
return pieces
def parseExampleDiv(div):
english = str(div.select_one('span.english').find(text=True))
kanji, kana = get_kanji_and_kana(div)
return english, kanji, kana, get_pieces(div)
def parse_example_page_data(pageHtml, phrase):
string_page_html = str(pageHtml)
# pageHtmlReplaced = re.sub(
# r'</li>\s*([^\s<>]+)\s*<li class="clearfix">', r'</li><el>\1</el><li class="clearfix">', string_page_html)
# myhtml = BeautifulSoup(pageHtmlReplaced, 'lxml')
divs = pageHtml.select("div.sentence_content")
results = []
for div in divs:
# div = divs.eq(i)
results.append(parseExampleDiv(div))
return {
'query': phrase,
'found': len(results) > 0,
'result': results,
'uri': uri_for_search(phrase, filter="sentences"),
'phrase': phrase
}
# PHRASE SCRAPE FUNCTIONS START
def get_tags(my_html):
tags = []
tagElements = my_html.select("span.concept_light-tag")
for tagElement in tagElements:
tags.append(tagElement.text)
return tags
def get_meanings_other_forms_and_notes(my_html):
otherForms = []
notes = []
meaningsWrapper = my_html.select_one(
'#page_container > div > div > article > div > div.concept_light-meanings.medium-9.columns > div')
meaningsChildren = meaningsWrapper.children
meanings = []
mostRecentWordTypes = []
for child in meaningsChildren:
if child.get("class")[0] == 'meaning-tags':
mostRecentWordTypes = list(map(lambda x: x.strip().lower(), child.text.split(',')))
elif mostRecentWordTypes[0] == 'other forms':
otherForms = list(map(lambda y: ({'kanji': y[0], 'kana': y[1]}),
map(lambda x: x.replace('【', '').replace('】', '').split(' '),
child.text.split('、'))))
elif mostRecentWordTypes[0] == 'notes':
notes = child.text().split('\n')
else:
meaning = child.select_one("span.meaning-meaning").text
try:
child.select_one('.meaning-abstract').select_one('a').extract().end()
meaningAbstract = child.select_one('.meaning-abstract').text
except AttributeError:
meaningAbstract = ''
try:
supplemental = list(filter(lambda y: bool(y),
map(lambda x: x.strip(), child.select_one("span.supplemental_info").text.split(','))))
except AttributeError: # if we couldn't find supplemental info class
supplemental = []
seeAlsoTerms = []
for i in reversed(range(len(supplemental))):
supplementalEntry = supplemental[i]
if supplementalEntry.startswith('See also'):
seeAlsoTerms.append(supplementalEntry.replace('See also ', ''))
supplemental.pop(i)
sentences = []
sentenceElements = child.select_one("span.sentences > div.sentence") or []
for sentenceElement in sentenceElements:
english = sentenceElement.select_one("li.english").text
pieces = get_pieces(sentenceElement)
# remove english and furigana to get left with normal japanese
sentenceElement.select_one("li.english").extract()
# could (will) be multiple furiganas
for s in sentenceElement.select("span.furigana"):
s.extract()
japanese = sentenceElement.text
sentences.append({'english': english, 'japanese': japanese, 'pieces': pieces})
meanings.append({
'seeAlsoTerms': seeAlsoTerms,
'sentences': sentences,
'definition': meaning,
'supplemental': supplemental,
'definitionAbstract': meaningAbstract,
'tags': mostRecentWordTypes,
})
return meanings, otherForms, notes
def uri_for_phrase_scrape(searchTerm):
return f'https://jisho.org/word/{urllib.parse.quote(searchTerm)}'
def parse_phrase_page_data(pageHtml, query):
my_html = BeautifulSoup(pageHtml, "lxml")
meanings, otherForms, notes = get_meanings_other_forms_and_notes(my_html)
result = {
'found': True,
'query': query,
'uri': uri_for_phrase_scrape(query),
'tags': get_tags(my_html),
'meanings': meanings,
'other_forms': otherForms,
'notes': notes
}
return result
class Jisho:
"""
A class to interface with Jisho.org and store search results for use.
Stores html results from queries to Jisho.org as an instance variable
and
"""
def __init__(self):
self.html = None
self.response = None
def search_for_phrase(self, phrase):
"""Directly use Jisho's official API to get info on a phrase (can be multiple characters)"""
uri = uriForPhraseSearch(phrase)
return json.loads(requests.get(uri).content)
def search_for_kanji(self, kanji, depth = "shallow"):
"""Return lots of information for a *single* character"""
uri = uri_for_search(kanji, filter="kanji")
self._extract_html(uri)
return self.parse_kanji_page_data(kanji, depth)
def search_for_examples(self, phrase):
"""Return """
uri = uri_for_search(phrase, filter="sentences")
self._extract_html(uri)
return parse_example_page_data(self.html, phrase)
def scrape_for_phrase(self, phrase):
uri = uri_for_phrase_scrape(phrase)
response = requests.get(uri)
return parse_phrase_page_data(response.content, phrase)
def contains_kanji_glyph(self, kanji):
kanjiGlyphToken = f'<h1 class="character" data-area-name="print" lang="ja">{kanji}</h1>'
return kanjiGlyphToken in str(self.html)
def _get_int_between_strings(self, start_string, end_string):
string_between_strings = get_string_between_strings(self.html, start_string, end_string)
return int(string_between_strings) if string_between_strings else None
def _get_newspaper_frequency_rank(self):
frequency_section = get_string_between_strings(self.html, '<div class="frequency">', '</div>')
return get_string_between_strings(frequency_section, '<strong>', '</strong>') if frequency_section else None
def _get_yomi(self, page_html, yomiLocatorSymbol):
yomi_section = get_string_between_strings(self.html, f'<dt>{yomiLocatorSymbol}:</dt>', '</dl>')
return parse_anchors_to_array(yomi_section) or ''
def get_kunyomi(self):
return self._get_yomi(self.html, KUNYOMI_LOCATOR_SYMBOL)
def get_onyomi(self):
return self._get_yomi(self.html, ONYOMI_LOCATOR_SYMBOL)
def _get_yomi_examples(self, yomiLocatorSymbol):
locator_string = f'<h2>{yomiLocatorSymbol} reading compounds</h2>'
example_section = get_string_between_strings(self.html, locator_string, '</ul>')
if not example_section:
return []
regex = r'<li>(.*?)</li>'
regex_results = map(lambda x: x.strip(), re.findall(regex, example_section, re.DOTALL))
for example in regex_results:
example_lines = list(map(lambda x: x.strip(), example.split('\n')))
yield {
'example': example_lines[0],
'reading': example_lines[1].replace('【', '').replace('】', ''),
'meaning': html.unescape(example_lines[2]),
}
def get_onyomi_examples(self):
return self._get_yomi_examples(ONYOMI_LOCATOR_SYMBOL)
def get_kunyomi_examples(self):
return self._get_yomi_examples(KUNYOMI_LOCATOR_SYMBOL)
def get_rad | if not re.search(kanjiRegex, unlifted[i]):
kanaEnding.append(unlifted[i])
else: | random_line_split |
jisho.py | in sentenceElement.select("span.furigana"):
s.extract()
japanese = sentenceElement.text
sentences.append({'english': english, 'japanese': japanese, 'pieces': pieces})
meanings.append({
'seeAlsoTerms': seeAlsoTerms,
'sentences': sentences,
'definition': meaning,
'supplemental': supplemental,
'definitionAbstract': meaningAbstract,
'tags': mostRecentWordTypes,
})
return meanings, otherForms, notes
def uri_for_phrase_scrape(searchTerm):
return f'https://jisho.org/word/{urllib.parse.quote(searchTerm)}'
def parse_phrase_page_data(pageHtml, query):
my_html = BeautifulSoup(pageHtml, "lxml")
meanings, otherForms, notes = get_meanings_other_forms_and_notes(my_html)
result = {
'found': True,
'query': query,
'uri': uri_for_phrase_scrape(query),
'tags': get_tags(my_html),
'meanings': meanings,
'other_forms': otherForms,
'notes': notes
}
return result
class Jisho:
"""
A class to interface with Jisho.org and store search results for use.
Stores html results from queries to Jisho.org as an instance variable
and
"""
def __init__(self):
self.html = None
self.response = None
def search_for_phrase(self, phrase):
"""Directly use Jisho's official API to get info on a phrase (can be multiple characters)"""
uri = uriForPhraseSearch(phrase)
return json.loads(requests.get(uri).content)
def search_for_kanji(self, kanji, depth = "shallow"):
"""Return lots of information for a *single* character"""
uri = uri_for_search(kanji, filter="kanji")
self._extract_html(uri)
return self.parse_kanji_page_data(kanji, depth)
def search_for_examples(self, phrase):
"""Return """
uri = uri_for_search(phrase, filter="sentences")
self._extract_html(uri)
return parse_example_page_data(self.html, phrase)
def scrape_for_phrase(self, phrase):
uri = uri_for_phrase_scrape(phrase)
response = requests.get(uri)
return parse_phrase_page_data(response.content, phrase)
def contains_kanji_glyph(self, kanji):
kanjiGlyphToken = f'<h1 class="character" data-area-name="print" lang="ja">{kanji}</h1>'
return kanjiGlyphToken in str(self.html)
def _get_int_between_strings(self, start_string, end_string):
string_between_strings = get_string_between_strings(self.html, start_string, end_string)
return int(string_between_strings) if string_between_strings else None
def _get_newspaper_frequency_rank(self):
frequency_section = get_string_between_strings(self.html, '<div class="frequency">', '</div>')
return get_string_between_strings(frequency_section, '<strong>', '</strong>') if frequency_section else None
def _get_yomi(self, page_html, yomiLocatorSymbol):
yomi_section = get_string_between_strings(self.html, f'<dt>{yomiLocatorSymbol}:</dt>', '</dl>')
return parse_anchors_to_array(yomi_section) or ''
def get_kunyomi(self):
return self._get_yomi(self.html, KUNYOMI_LOCATOR_SYMBOL)
def get_onyomi(self):
return self._get_yomi(self.html, ONYOMI_LOCATOR_SYMBOL)
def _get_yomi_examples(self, yomiLocatorSymbol):
locator_string = f'<h2>{yomiLocatorSymbol} reading compounds</h2>'
example_section = get_string_between_strings(self.html, locator_string, '</ul>')
if not example_section:
return []
regex = r'<li>(.*?)</li>'
regex_results = map(lambda x: x.strip(), re.findall(regex, example_section, re.DOTALL))
for example in regex_results:
example_lines = list(map(lambda x: x.strip(), example.split('\n')))
yield {
'example': example_lines[0],
'reading': example_lines[1].replace('【', '').replace('】', ''),
'meaning': html.unescape(example_lines[2]),
}
def get_onyomi_examples(self):
return self._get_yomi_examples(ONYOMI_LOCATOR_SYMBOL)
def get_kunyomi_examples(self):
return self._get_yomi_examples(KUNYOMI_LOCATOR_SYMBOL)
def get_radical(self):
radicalMeaningStartString = '<span class="radical_meaning">'
radicalMeaningEndString = '</span>'
radicalMeaning = self.html.select_one("span.radical_meaning")
# TODO: Improve this? I don't like all the string finding that much, rather do it with BS finding
if radicalMeaning:
page_html_string = str(self.html)
radicalMeaningStartIndex = page_html_string.find(radicalMeaningStartString)
radicalMeaningEndIndex = page_html_string.find(radicalMeaningEndString, radicalMeaningStartIndex)
radicalSymbolStartIndex = radicalMeaningEndIndex + len(radicalMeaningEndString)
radicalSymbolEndString = '</span>'
radicalSymbolEndIndex = page_html_string.find(radicalSymbolEndString, radicalSymbolStartIndex)
radicalSymbolsString = page_html_string[radicalSymbolStartIndex:radicalSymbolEndIndex].replace("\n", '').strip()
if len(radicalSymbolsString) > 1:
radicalForms = radicalSymbolsString[1:].replace('(', '').replace(')', '').strip().split(', ')
return {'symbol': radicalSymbolsString[0], 'forms': radicalForms, 'meaning': radicalMeaning.string.strip()}
return {'symbol': radicalSymbolsString, 'meaning': radicalMeaning.text.replace("\n", '').strip()}
return None
def get_parts(self):
parts_section = self.html.find("dt", text="Parts:").find_next_sibling('dd')
result = parse_anchors_to_array(str(parts_section))
result.sort()
return result
def get_svg_uri(self):
svg_regex = re.compile(r"var url = \'//(.*?cloudfront\.net/.*?.svg)")
regex_result = svg_regex.search(str(self.html))
return f'https://{regex_result[1]}' if regex_result else None
def parse_kanji_page_data(self, kanji, depth):
result = {'query': kanji, 'found': self.contains_kanji_glyph(kanji)}
if not result['found']:
return result
result['taughtIn'] = get_string_between_strings(self.html, 'taught in <strong>', '</strong>')
result['jlptLevel'] = get_string_between_strings(self.html, 'JLPT level <strong>', '</strong>')
result['newspaperFrequencyRank'] = self._get_newspaper_frequency_rank()
result['strokeCount'] = self._get_int_between_strings('<strong>', '</strong> strokes')
result['meaning'] = html.unescape(
get_string_between_strings(self.html, '<div class="kanji-details__main-meanings">', '</div>')).strip().replace("\n", '')
result['kunyomi'] = self.get_kunyomi()
result['onyomi'] = self.get_onyomi()
result['onyomiExamples'] = list(self.get_onyomi_examples())
result['kunyomiExamples'] = list(self.get_kunyomi_examples())
result['radical'] = self.get_radical()
result['parts'] = self.get_parts()
result['strokeOrderDiagramUri'] = getUriForStrokeOrderDiagram(kanji)
result['strokeOrderSvgUri'] = self.get_svg_uri()
result['strokeOrderGifUri'] = get_gif_uri(kanji)
result['uri'] = uri_for_search(kanji, filter="kanji")
return result
def _extract_html(self, url):
"""With the response, extract the HTML and store it into the object."""
self.response = requests.get(url, timeout=5)
self.html = BeautifulSoup(self.response.content, "lxml") if self.response.ok else None
# return self.html
def search_for_word(self, word, depth="shallow"):
"""Take a japanese word and spit out well-formatted dictionaries for each entry.
"""
# self._get_search_response(word)
self._extract_html(uri_for_search(word))
results = self.html.select(".concept_light.clearfix")
# print(results)
fmtd_results = []
if depth == "shallow":
for r in results:
fmtd_results.append(self._extract_dictionary_information(r))
elif depth == "deep":
for r in results:
fmtd_results.append(self._extract_dictionary_information(r))
# If there are more than 20 results on the page, there is no "More Words" link
more = self.html.select_one(".more")
while more:
link = more.get("href")
response = requests.get(r"http:" + link, timeout=5)
html = BeautifulSoup(response.content, "html.parser")
results = html.select(".concept_light.clearfix")
for r in results:
fmtd_results.append(self._extract_dictionary_information(r))
more = html.select_one(".more")
return fmtd_results
def _isolate_meanings(self | , meanings_list): | identifier_name |
|
jisho.py |
def get_string_between_strings(data, start_string, end_string):
regex = f'{re.escape(start_string)}(.*?){re.escape(end_string)}'
# Need DOTALL because the HTML still has its newline characters
match = re.search(regex, str(data), re.DOTALL)
return match[1] if match is not None else None
def parse_anchors_to_array(my_string):
regex = r'<a href=".*?">(.*?)</a>'
return re.findall(regex, my_string)
def get_gif_uri(kanji):
"""Uses the unicode of an input kanji to find the corresponding stroke order gif in mistval's collection"""
fileName = kanji.encode("unicode-escape").decode("utf-8").replace("\\u", '') + '.gif'
animationUri = f'https://raw.githubusercontent.com/mistval/kanji_images/master/gifs/{fileName}'
return animationUri
def kana_to_halpern(untrans):
"""Take a word completely in hiragana or katakana and translate it into romaji"""
halpern = []
while untrans:
if len(untrans) > 1:
first = untrans[0]
second = untrans[1]
else:
first = untrans[0]
second = None
if first in hiragana:
if second and second in ["ゃ", "ゅ", "ょ"]:
halpern.append(hira2eng[first + second])
untrans = untrans[2:]
else:
halpern.append(hira2eng[first])
untrans = untrans[1:]
else:
if second and second in ["ャ", "ュ", "ョ"]:
halpern.append(kata2eng[first + second])
untrans = untrans[2:]
else:
halpern.append(kata2eng[first])
untrans = untrans[1:]
del first
del second
return "".join(halpern)
def contains_kana(word):
"""Takes a word and returns true if there are hiragana or katakana present within the word"""
for k in word:
if k in hiragana or k in katakana or k in small_characters:
return True
return False
kanjiRegex = '[\u4e00-\u9faf\u3400-\u4dbf]'
def get_kanji_and_kana(div):
ul = div.select_one('ul')
# contents = ul.contents()
kanji = ''
kana = ''
for child in ul.children:
if child.name == 'li':
li = child
furigana = li.select_one("span.furigana").text if li.select_one("span.furigana") is not None else None
unlifted = li.select_one("span.unlinked").text if li.select_one("span.unlinked") is not None else None
if furigana:
kanji += unlifted
kana += furigana
kanaEnding = []
for i in reversed(range(len(unlifted))):
if not re.search(kanjiRegex, unlifted[i]):
kanaEnding.append(unlifted[i])
else:
break
kana += ''.join(kanaEnding[::-1])
else:
kanji += unlifted
kana += unlifted
else:
text = str(child).strip()
if text:
kanji += text
kana += text
return kanji, kana
def get_pieces(sentenceElement):
pieceElements = sentenceElement.select("li.clearfix") + sentenceElement.select("el")
pieces = []
for pieceElement in pieceElements:
if pieceElement.name == 'li':
pieces.append({
'lifted': pieceElement.select_one("span.furigana").text if pieceElement.select_one("span.furigana") is not None else '',
'unlifted': pieceElement.select_one("span.unlinked").text if pieceElement.select_one("span.unlinked") is not None else '',
})
else:
pieces.append({
'lifted': '',
'unlifted': pieceElement.text,
})
return pieces
def parseExampleDiv(div):
english = str(div.select_one('span.english').find(text=True))
kanji, kana = get_kanji_and_kana(div)
return english, kanji, kana, get_pieces(div)
def parse_example_page_data(pageHtml, phrase):
string_page_html = str(pageHtml)
# pageHtmlReplaced = re.sub(
# r'</li>\s*([^\s<>]+)\s*<li class="clearfix">', r'</li><el>\1</el><li class="clearfix">', string_page_html)
# myhtml = BeautifulSoup(pageHtmlReplaced, 'lxml')
divs = pageHtml.select("div.sentence_content")
results = []
for div in divs:
# div = divs.eq(i)
results.append(parseExampleDiv(div))
return {
'query': phrase,
'found': len(results) > 0,
'result': results,
'uri': uri_for_search(phrase, filter="sentences"),
'phrase': phrase
}
# PHRASE SCRAPE FUNCTIONS START
def get_tags(my_html):
tags = []
tagElements = my_html.select("span.concept_light-tag")
for tagElement in tagElements:
tags.append(tagElement.text)
return tags
def get_meanings_other_forms_and_notes(my_html):
otherForms = []
notes = []
meaningsWrapper = my_html.select_one(
'#page_container > div > div > article > div > div.concept_light-meanings.medium-9.columns > div')
meaningsChildren = meaningsWrapper.children
meanings = []
mostRecentWordTypes = []
for child in meaningsChildren:
if child.get("class")[0] == 'meaning-tags':
mostRecentWordTypes = list(map(lambda x: x.strip().lower(), child.text.split(',')))
elif mostRecentWordTypes[0] == 'other forms':
otherForms = list(map(lambda y: ({'kanji': y[0], 'kana': y[1]}),
map(lambda x: x.replace('【', '').replace('】', '').split(' '),
child.text.split('、'))))
elif mostRecentWordTypes[0] == 'notes':
notes = child.text().split('\n')
else:
meaning = child.select_one("span.meaning-meaning").text
try:
child.select_one('.meaning-abstract').select_one('a').extract().end()
meaningAbstract = child.select_one('.meaning-abstract').text
except AttributeError:
meaningAbstract = ''
try:
supplemental = list(filter(lambda y: bool(y),
map(lambda x: x.strip(), child.select_one("span.supplemental_info").text.split(','))))
except AttributeError: # if we couldn't find supplemental info class
supplemental = []
seeAlsoTerms = []
for i in reversed(range(len(supplemental))):
supplementalEntry = supplemental[i]
if supplementalEntry.startswith('See also'):
seeAlsoTerms.append(supplementalEntry.replace('See also ', ''))
supplemental.pop(i)
sentences = []
sentenceElements = child.select_one("span.sentences > div.sentence") or []
for sentenceElement in sentenceElements:
english = sentenceElement.select_one("li.english").text
pieces = get_pieces(sentenceElement)
# remove english and furigana to get left with normal japanese
sentenceElement.select_one("li.english").extract()
# could (will) be multiple furiganas
for s in sentenceElement.select("span.furigana"):
s.extract()
japanese = sentenceElement.text
sentences.append({'english': english, 'japanese': japanese, 'pieces': pieces})
meanings.append({
'seeAlsoTerms': seeAlsoTerms,
'sentences': sentences,
'definition': meaning,
'supplemental': supplemental,
'definitionAbstract': meaningAbstract,
'tags': mostRecentWordTypes,
})
return meanings, otherForms, notes
def uri_for_phrase_scrape(searchTerm):
return f'https://jisho.org/word/{urllib.parse.quote(searchTerm)}'
def parse_phrase_page_data(pageHtml, query):
my_html = BeautifulSoup(pageHtml, "lxml")
meanings, otherForms, notes = get_meanings_other_forms_and_notes(my_html)
result = {
'found': True,
'query': query,
'uri': uri_for_phrase_scrape(query),
'tags': get_tags(my_html),
'meanings': meanings,
'other_forms': otherForms,
'notes': notes
}
return result
class Jisho:
"""
A class to interface with Jisho.org and store search results for use.
Stores html results from queries to Jisho.org as an instance variable
and
"""
def __init__(self):
self.html = None
self.response = None
def search_for_phrase(self, phrase):
"""Directly use Jisho's official API to get info on a phrase (can be multiple characters)"""
uri = uriForPhraseSearch(phrase)
return json.loads(requests.get(uri).content)
| return f'{JISHO_API}?keyword={urllib.parse.quote(phrase)}' | identifier_body |
|
jisho.py | AlsoTerms = []
for i in reversed(range(len(supplemental))):
supplementalEntry = supplemental[i]
if supplementalEntry.startswith('See also'):
seeAlsoTerms.append(supplementalEntry.replace('See also ', ''))
supplemental.pop(i)
sentences = []
sentenceElements = child.select_one("span.sentences > div.sentence") or []
for sentenceElement in sentenceElements:
english = sentenceElement.select_one("li.english").text
pieces = get_pieces(sentenceElement)
# remove english and furigana to get left with normal japanese
sentenceElement.select_one("li.english").extract()
# could (will) be multiple furiganas
for s in sentenceElement.select("span.furigana"):
s.extract()
japanese = sentenceElement.text
sentences.append({'english': english, 'japanese': japanese, 'pieces': pieces})
meanings.append({
'seeAlsoTerms': seeAlsoTerms,
'sentences': sentences,
'definition': meaning,
'supplemental': supplemental,
'definitionAbstract': meaningAbstract,
'tags': mostRecentWordTypes,
})
return meanings, otherForms, notes
def uri_for_phrase_scrape(searchTerm):
return f'https://jisho.org/word/{urllib.parse.quote(searchTerm)}'
def parse_phrase_page_data(pageHtml, query):
my_html = BeautifulSoup(pageHtml, "lxml")
meanings, otherForms, notes = get_meanings_other_forms_and_notes(my_html)
result = {
'found': True,
'query': query,
'uri': uri_for_phrase_scrape(query),
'tags': get_tags(my_html),
'meanings': meanings,
'other_forms': otherForms,
'notes': notes
}
return result
class Jisho:
"""
A class to interface with Jisho.org and store search results for use.
Stores html results from queries to Jisho.org as an instance variable
and
"""
def __init__(self):
self.html = None
self.response = None
def search_for_phrase(self, phrase):
"""Directly use Jisho's official API to get info on a phrase (can be multiple characters)"""
uri = uriForPhraseSearch(phrase)
return json.loads(requests.get(uri).content)
def search_for_kanji(self, kanji, depth = "shallow"):
"""Return lots of information for a *single* character"""
uri = uri_for_search(kanji, filter="kanji")
self._extract_html(uri)
return self.parse_kanji_page_data(kanji, depth)
def search_for_examples(self, phrase):
"""Return """
uri = uri_for_search(phrase, filter="sentences")
self._extract_html(uri)
return parse_example_page_data(self.html, phrase)
def scrape_for_phrase(self, phrase):
uri = uri_for_phrase_scrape(phrase)
response = requests.get(uri)
return parse_phrase_page_data(response.content, phrase)
def contains_kanji_glyph(self, kanji):
kanjiGlyphToken = f'<h1 class="character" data-area-name="print" lang="ja">{kanji}</h1>'
return kanjiGlyphToken in str(self.html)
def _get_int_between_strings(self, start_string, end_string):
string_between_strings = get_string_between_strings(self.html, start_string, end_string)
return int(string_between_strings) if string_between_strings else None
def _get_newspaper_frequency_rank(self):
frequency_section = get_string_between_strings(self.html, '<div class="frequency">', '</div>')
return get_string_between_strings(frequency_section, '<strong>', '</strong>') if frequency_section else None
def _get_yomi(self, page_html, yomiLocatorSymbol):
yomi_section = get_string_between_strings(self.html, f'<dt>{yomiLocatorSymbol}:</dt>', '</dl>')
return parse_anchors_to_array(yomi_section) or ''
def get_kunyomi(self):
return self._get_yomi(self.html, KUNYOMI_LOCATOR_SYMBOL)
def get_onyomi(self):
return self._get_yomi(self.html, ONYOMI_LOCATOR_SYMBOL)
def _get_yomi_examples(self, yomiLocatorSymbol):
locator_string = f'<h2>{yomiLocatorSymbol} reading compounds</h2>'
example_section = get_string_between_strings(self.html, locator_string, '</ul>')
if not example_section:
return []
regex = r'<li>(.*?)</li>'
regex_results = map(lambda x: x.strip(), re.findall(regex, example_section, re.DOTALL))
for example in regex_results:
example_lines = list(map(lambda x: x.strip(), example.split('\n')))
yield {
'example': example_lines[0],
'reading': example_lines[1].replace('【', '').replace('】', ''),
'meaning': html.unescape(example_lines[2]),
}
def get_onyomi_examples(self):
return self._get_yomi_examples(ONYOMI_LOCATOR_SYMBOL)
def get_kunyomi_examples(self):
return self._get_yomi_examples(KUNYOMI_LOCATOR_SYMBOL)
def get_radical(self):
radicalMeaningStartString = '<span class="radical_meaning">'
radicalMeaningEndString = '</span>'
radicalMeaning = self.html.select_one("span.radical_meaning")
# TODO: Improve this? I don't like all the string finding that much, rather do it with BS finding
if radicalMeaning:
page_html_string = str(self.html)
radicalMeaningStartIndex = page_html_string.find(radicalMeaningStartString)
radicalMeaningEndIndex = page_html_string.find(radicalMeaningEndString, radicalMeaningStartIndex)
radicalSymbolStartIndex = radicalMeaningEndIndex + len(radicalMeaningEndString)
radicalSymbolEndString = '</span>'
radicalSymbolEndIndex = page_html_string.find(radicalSymbolEndString, radicalSymbolStartIndex)
radicalSymbolsString = page_html_string[radicalSymbolStartIndex:radicalSymbolEndIndex].replace("\n", '').strip()
if len(radicalSymbolsString) > 1:
radicalForms = radicalSymbolsString[1:].replace('(', '').replace(')', '').strip().split(', ')
return {'symbol': radicalSymbolsString[0], 'forms': radicalForms, 'meaning': radicalMeaning.string.strip()}
return {'symbol': radicalSymbolsString, 'meaning': radicalMeaning.text.replace("\n", '').strip()}
return None
def get_parts(self):
parts_section = self.html.find("dt", text="Parts:").find_next_sibling('dd')
result = parse_anchors_to_array(str(parts_section))
result.sort()
return result
def get_svg_uri(self):
svg_regex = re.compile(r"var url = \'//(.*?cloudfront\.net/.*?.svg)")
regex_result = svg_regex.search(str(self.html))
return f'https://{regex_result[1]}' if regex_result else None
def parse_kanji_page_data(self, kanji, depth):
result = {'query': kanji, 'found': self.contains_kanji_glyph(kanji)}
if not result['found']:
return result
result['taughtIn'] = get_string_between_strings(self.html, 'taught in <strong>', '</strong>')
result['jlptLevel'] = get_string_between_strings(self.html, 'JLPT level <strong>', '</strong>')
result['newspaperFrequencyRank'] = self._get_newspaper_frequency_rank()
result['strokeCount'] = self._get_int_between_strings('<strong>', '</strong> strokes')
result['meaning'] = html.unescape(
get_string_between_strings(self.html, '<div class="kanji-details__main-meanings">', '</div>')).strip().replace("\n", '')
result['kunyomi'] = self.get_kunyomi()
result['onyomi'] = self.get_onyomi()
result['onyomiExamples'] = list(self.get_onyomi_examples())
result['kunyomiExamples'] = list(self.get_kunyomi_examples())
result['radical'] = self.get_radical()
result['parts'] = self.get_parts()
result['strokeOrderDiagramUri'] = getUriForStrokeOrderDiagram(kanji)
result['strokeOrderSvgUri'] = self.get_svg_uri()
result['strokeOrderGifUri'] = get_gif_uri(kanji)
result['uri'] = uri_for_search(kanji, filter="kanji")
return result
def _extract_html(self, url):
"""With the response, extract the HTML and store it into the object."""
self.response = requests.get(url, timeout=5)
self.html = BeautifulSoup(self.response.content, "lxml") if self.response.ok else None
# return self.html
def search_for_word(self, word, depth="shallow"):
"""Take a japanese word and spit out well-formatted dictionaries for each entry.
"""
# self._get_search_response(word)
self._extract_html(uri_for_search(word))
results = self.html.select(".concept_light.clearfix")
# print(results)
fmtd_results = []
if depth == "shallow":
for r in results:
fmtd_results.append(se | lf._extract_dictionary_information(r))
elif depth = | conditional_block |
|
main_window_test.py | We stay at 4 because it's appropriate
# Although the view index stayed the same, the view still changed, so the GUI needs to change.
app.check_gui_calls(app.mainwindow_gui, ['view_closed', 'change_current_pane', 'refresh_status_line'])
app.mw.close_pane(3)
eq_(app.mw.current_pane_index, 2)
@with_app(TestApp)
def test_current_pane_index(app):
# The main window has a `current_pane_index` property which indicate which view is currently
# selected.
for index in range(5):
eq_(app.mw.current_pane_index, index)
app.mw.select_next_view()
# we can't go further
app.mw.select_next_view()
eq_(app.mw.current_pane_index, 4)
for index in reversed(range(5)):
eq_(app.mw.current_pane_index, index)
app.mw.select_previous_view()
# we can't go further
app.mw.select_previous_view()
eq_(app.mw.current_pane_index, 0)
@with_app(TestApp)
def test_initial_panes(app):
eq_(app.mw.pane_count, 5)
eq_(app.mw.pane_label(0), "Net Worth")
eq_(app.mw.pane_label(1), "Profit & Loss")
eq_(app.mw.pane_label(2), "Transactions")
eq_(app.mw.pane_label(3), "Schedules")
eq_(app.mw.pane_label(4), "Budgets")
eq_(app.mw.pane_type(0), PaneType.NetWorth)
eq_(app.mw.pane_type(1), PaneType.Profit)
eq_(app.mw.pane_type(2), PaneType.Transaction)
eq_(app.mw.pane_type(3), PaneType.Schedule)
eq_(app.mw.pane_type(4), PaneType.Budget)
@with_app(TestApp)
def test_move_pane(app):
# moving a pane takes a pane at a specified index and moves it to the dest index
app.mw.move_pane(2, 1)
eq_(app.mw.pane_label(1), "Transactions")
eq_(app.mw.pane_label(2), "Profit & Loss")
# when moving a pane, the dest index is the index *with* the pane at its original position,
# *not* the index with the pane removed from the list.
app.mw.move_pane(2, 3)
eq_(app.mw.pane_label(2), "Schedules")
eq_(app.mw.pane_label(3), "Profit & Loss")
# When the pane index is the same as the dest index, we do nothing.
app.mw.move_pane(2, 2)
eq_(app.mw.pane_label(1), "Transactions")
eq_(app.mw.pane_label(2), "Schedules")
eq_(app.mw.pane_label(3), "Profit & Loss")
@with_app(TestApp)
def test_move_pane_before_selected(app):
# When a non-selected pane is moved before the selected one, update the selected index
app.mw.move_pane(1, 0)
eq_(app.mw.current_pane_index, 1)
@with_app(TestApp)
def test_move_pane_selected(app):
# When the moved pane is selected, the selection follows the pane.
app.mw.move_pane(0, 3)
eq_(app.mw.current_pane_index, 3)
@with_app(TestApp)
def test_selected_account_is_updated_on_nonrevalidating_show(app):
# When navigating between sheet-panes, the selected account (used for show_account and apanel)
# is correctly updated
app.add_account('Asset')
app.add_account('Income', account_type=AccountType.Income)
app.show_nwview()
app.show_pview() # no revalidation since nothing has changed
app.show_account()
app.check_current_pane(PaneType.Account, account_name='Income')
@with_app(TestApp)
def test_select_pane_of_type_creates_new_pane_if_needed(app):
# calling select_pane_of_type() creates a new pane if needed
app.mw.close_pane(0) # net worth
app.mw.select_pane_of_type(PaneType.NetWorth)
eq_(app.mw.pane_count, 5)
app.check_current_pane(PaneType.NetWorth)
@with_app(TestApp)
def test_select_ttable_on_sfield_query(app):
# Setting a value in the search field selects the ttable.
app.sfield.text = 'foobar'
eq_(app.mw.current_pane_index, 2)
@with_app(TestApp)
def test_dont_close_last_pane(app):
# if close_pane() is called with only one pane left, don't do anything.
while (app.mw.pane_count > 1):
app.mw.close_pane(0)
app.mw.close_pane(0) # no crash
eq_(app.mw.pane_count, 1) | # columns.
app.show_nwview()
expected = [("Account #", False), ("Start", True), ("Change", False), ("Change %", False),
("Budgeted", True)]
eq_(app.mw.column_menu_items(), expected)
app.mw.toggle_column_menu_item(0)
expected[0] = ("Account #", True)
eq_(app.mw.column_menu_items(), expected)
@with_app(TestApp)
def test_column_visibility_change_actually_changes_visibility(app):
# Changing the value of a column visibility in view options actually changes visibility
app.show_tview()
app.set_column_visible('description', False)
assert not app.ttable.columns.column_is_visible('description')
@with_app(TestApp)
def test_change_view_options_while_editing(app):
# When a table is in editing mode and that a column visibility is changed, we have to tell the
# gui to stop editing, or else we end up in a state where the core thinks it's editing when the
# GUI isn't.
app.show_tview()
app.mw.new_item()
app.ttable.payee = 'something' # in editing mode
app.set_column_visible('description', False)
assert app.ttable.edited is None
#--- Cleared GUI calls
def app_cleared_gui_calls():
app = TestApp()
app.clear_gui_calls()
return app
@with_app(app_cleared_gui_calls)
def test_new_tab(app):
emptyview = app.new_tab()
eq_(app.mw.pane_count, 6)
app.check_current_pane(PaneType.Empty)
app.check_gui_calls(app.mainwindow_gui, ['change_current_pane', 'refresh_panes', 'refresh_status_line'])
emptyview.select_pane_type(PaneType.Profit)
app.check_current_pane(PaneType.Profit)
app.check_gui_calls(app.mainwindow_gui, ['change_current_pane', 'refresh_panes', 'refresh_status_line'])
@with_app(app_cleared_gui_calls)
def test_toggle_area_visibility(app):
app.show_nwview()
app.mw.toggle_area_visibility(PaneArea.BottomGraph)
app.nwview.view.check_gui_calls(['update_visibility'])
# It sends the message to the main window as well (so it can update its buttons)
app.mw.view.check_gui_calls(['update_area_visibility'])
# Also update the visibility of other views when we select them
app.show_pview()
app.pview.view.check_gui_calls(['update_visibility'])
#--- One account
def app_one_account():
app = TestApp()
app.add_account("foo")
app.clear_gui_calls()
return app
@with_app(app_one_account)
def test_rename_opened_account_changes_tab_label(app):
# Renaming the account with an opened tab renames that tab.
app.show_account()
index = app.mw.current_pane_index
app.show_nwview()
app.clear_gui_calls()
app.bsheet.selected.name = 'renamed'
app.bsheet.save_edits()
eq_(app.mw.pane_label(index), 'renamed')
app.check_gui_calls(app.mainwindow_gui, ['refresh_panes', 'refresh_undo_actions'])
@with_app(app_one_account)
def test_show_account_opens_a_new_tab(app):
# Showing an account opens a new tab with the account shown in it.
app.show_account()
eq_(app.mw.pane_count, 6)
eq_(app.mw.current_pane_index, 5)
eq_(app.mw.pane_type(5), PaneType.Account)
eq_(app.mw.pane_label(5), "foo")
expected = ['refresh_panes', 'change_current_pane']
app.check_gui_calls_partial(app.mainwindow_gui, expected, verify_order=True)
@with_app(app_one_account)
def test_change_date_range(app):
app.show_account()
app.show_nwview()
app.clear_gui_calls()
app.doc.date_range = app.doc.date_range.prev()
expected_calls = ['refresh', 'animate_backward']
app.drsel.view.check_gui_calls(expected_calls)
|
@with_app(TestApp)
def test_column_menu_attributes(app):
# The column menu depends on the selected pane and shows the display attribute of optional | random_line_split |
main_window_test.py | We stay at 4 because it's appropriate
# Although the view index stayed the same, the view still changed, so the GUI needs to change.
app.check_gui_calls(app.mainwindow_gui, ['view_closed', 'change_current_pane', 'refresh_status_line'])
app.mw.close_pane(3)
eq_(app.mw.current_pane_index, 2)
@with_app(TestApp)
def test_current_pane_index(app):
# The main window has a `current_pane_index` property which indicate which view is currently
# selected.
for index in range(5):
eq_(app.mw.current_pane_index, index)
app.mw.select_next_view()
# we can't go further
app.mw.select_next_view()
eq_(app.mw.current_pane_index, 4)
for index in reversed(range(5)):
eq_(app.mw.current_pane_index, index)
app.mw.select_previous_view()
# we can't go further
app.mw.select_previous_view()
eq_(app.mw.current_pane_index, 0)
@with_app(TestApp)
def test_initial_panes(app):
eq_(app.mw.pane_count, 5)
eq_(app.mw.pane_label(0), "Net Worth")
eq_(app.mw.pane_label(1), "Profit & Loss")
eq_(app.mw.pane_label(2), "Transactions")
eq_(app.mw.pane_label(3), "Schedules")
eq_(app.mw.pane_label(4), "Budgets")
eq_(app.mw.pane_type(0), PaneType.NetWorth)
eq_(app.mw.pane_type(1), PaneType.Profit)
eq_(app.mw.pane_type(2), PaneType.Transaction)
eq_(app.mw.pane_type(3), PaneType.Schedule)
eq_(app.mw.pane_type(4), PaneType.Budget)
@with_app(TestApp)
def test_move_pane(app):
# moving a pane takes a pane at a specified index and moves it to the dest index
app.mw.move_pane(2, 1)
eq_(app.mw.pane_label(1), "Transactions")
eq_(app.mw.pane_label(2), "Profit & Loss")
# when moving a pane, the dest index is the index *with* the pane at its original position,
# *not* the index with the pane removed from the list.
app.mw.move_pane(2, 3)
eq_(app.mw.pane_label(2), "Schedules")
eq_(app.mw.pane_label(3), "Profit & Loss")
# When the pane index is the same as the dest index, we do nothing.
app.mw.move_pane(2, 2)
eq_(app.mw.pane_label(1), "Transactions")
eq_(app.mw.pane_label(2), "Schedules")
eq_(app.mw.pane_label(3), "Profit & Loss")
@with_app(TestApp)
def | (app):
# When a non-selected pane is moved before the selected one, update the selected index
app.mw.move_pane(1, 0)
eq_(app.mw.current_pane_index, 1)
@with_app(TestApp)
def test_move_pane_selected(app):
# When the moved pane is selected, the selection follows the pane.
app.mw.move_pane(0, 3)
eq_(app.mw.current_pane_index, 3)
@with_app(TestApp)
def test_selected_account_is_updated_on_nonrevalidating_show(app):
# When navigating between sheet-panes, the selected account (used for show_account and apanel)
# is correctly updated
app.add_account('Asset')
app.add_account('Income', account_type=AccountType.Income)
app.show_nwview()
app.show_pview() # no revalidation since nothing has changed
app.show_account()
app.check_current_pane(PaneType.Account, account_name='Income')
@with_app(TestApp)
def test_select_pane_of_type_creates_new_pane_if_needed(app):
# calling select_pane_of_type() creates a new pane if needed
app.mw.close_pane(0) # net worth
app.mw.select_pane_of_type(PaneType.NetWorth)
eq_(app.mw.pane_count, 5)
app.check_current_pane(PaneType.NetWorth)
@with_app(TestApp)
def test_select_ttable_on_sfield_query(app):
# Setting a value in the search field selects the ttable.
app.sfield.text = 'foobar'
eq_(app.mw.current_pane_index, 2)
@with_app(TestApp)
def test_dont_close_last_pane(app):
# if close_pane() is called with only one pane left, don't do anything.
while (app.mw.pane_count > 1):
app.mw.close_pane(0)
app.mw.close_pane(0) # no crash
eq_(app.mw.pane_count, 1)
@with_app(TestApp)
def test_column_menu_attributes(app):
# The column menu depends on the selected pane and shows the display attribute of optional
# columns.
app.show_nwview()
expected = [("Account #", False), ("Start", True), ("Change", False), ("Change %", False),
("Budgeted", True)]
eq_(app.mw.column_menu_items(), expected)
app.mw.toggle_column_menu_item(0)
expected[0] = ("Account #", True)
eq_(app.mw.column_menu_items(), expected)
@with_app(TestApp)
def test_column_visibility_change_actually_changes_visibility(app):
# Changing the value of a column visibility in view options actually changes visibility
app.show_tview()
app.set_column_visible('description', False)
assert not app.ttable.columns.column_is_visible('description')
@with_app(TestApp)
def test_change_view_options_while_editing(app):
# When a table is in editing mode and that a column visibility is changed, we have to tell the
# gui to stop editing, or else we end up in a state where the core thinks it's editing when the
# GUI isn't.
app.show_tview()
app.mw.new_item()
app.ttable.payee = 'something' # in editing mode
app.set_column_visible('description', False)
assert app.ttable.edited is None
#--- Cleared GUI calls
def app_cleared_gui_calls():
app = TestApp()
app.clear_gui_calls()
return app
@with_app(app_cleared_gui_calls)
def test_new_tab(app):
emptyview = app.new_tab()
eq_(app.mw.pane_count, 6)
app.check_current_pane(PaneType.Empty)
app.check_gui_calls(app.mainwindow_gui, ['change_current_pane', 'refresh_panes', 'refresh_status_line'])
emptyview.select_pane_type(PaneType.Profit)
app.check_current_pane(PaneType.Profit)
app.check_gui_calls(app.mainwindow_gui, ['change_current_pane', 'refresh_panes', 'refresh_status_line'])
@with_app(app_cleared_gui_calls)
def test_toggle_area_visibility(app):
app.show_nwview()
app.mw.toggle_area_visibility(PaneArea.BottomGraph)
app.nwview.view.check_gui_calls(['update_visibility'])
# It sends the message to the main window as well (so it can update its buttons)
app.mw.view.check_gui_calls(['update_area_visibility'])
# Also update the visibility of other views when we select them
app.show_pview()
app.pview.view.check_gui_calls(['update_visibility'])
#--- One account
def app_one_account():
app = TestApp()
app.add_account("foo")
app.clear_gui_calls()
return app
@with_app(app_one_account)
def test_rename_opened_account_changes_tab_label(app):
# Renaming the account with an opened tab renames that tab.
app.show_account()
index = app.mw.current_pane_index
app.show_nwview()
app.clear_gui_calls()
app.bsheet.selected.name = 'renamed'
app.bsheet.save_edits()
eq_(app.mw.pane_label(index), 'renamed')
app.check_gui_calls(app.mainwindow_gui, ['refresh_panes', 'refresh_undo_actions'])
@with_app(app_one_account)
def test_show_account_opens_a_new_tab(app):
# Showing an account opens a new tab with the account shown in it.
app.show_account()
eq_(app.mw.pane_count, 6)
eq_(app.mw.current_pane_index, 5)
eq_(app.mw.pane_type(5), PaneType.Account)
eq_(app.mw.pane_label(5), "foo")
expected = ['refresh_panes', 'change_current_pane']
app.check_gui_calls_partial(app.mainwindow_gui, expected, verify_order=True)
@with_app(app_one_account)
def test_change_date_range(app):
app.show_account()
app.show_nwview()
app.clear_gui_calls()
app.doc.date_range = app.doc.date_range.prev()
expected_calls = ['refresh', 'animate_backward']
app.drsel.view.check_gui_calls(expected_calls)
| test_move_pane_before_selected | identifier_name |
main_window_test.py | We stay at 4 because it's appropriate
# Although the view index stayed the same, the view still changed, so the GUI needs to change.
app.check_gui_calls(app.mainwindow_gui, ['view_closed', 'change_current_pane', 'refresh_status_line'])
app.mw.close_pane(3)
eq_(app.mw.current_pane_index, 2)
@with_app(TestApp)
def test_current_pane_index(app):
# The main window has a `current_pane_index` property which indicate which view is currently
# selected.
for index in range(5):
|
# we can't go further
app.mw.select_next_view()
eq_(app.mw.current_pane_index, 4)
for index in reversed(range(5)):
eq_(app.mw.current_pane_index, index)
app.mw.select_previous_view()
# we can't go further
app.mw.select_previous_view()
eq_(app.mw.current_pane_index, 0)
@with_app(TestApp)
def test_initial_panes(app):
eq_(app.mw.pane_count, 5)
eq_(app.mw.pane_label(0), "Net Worth")
eq_(app.mw.pane_label(1), "Profit & Loss")
eq_(app.mw.pane_label(2), "Transactions")
eq_(app.mw.pane_label(3), "Schedules")
eq_(app.mw.pane_label(4), "Budgets")
eq_(app.mw.pane_type(0), PaneType.NetWorth)
eq_(app.mw.pane_type(1), PaneType.Profit)
eq_(app.mw.pane_type(2), PaneType.Transaction)
eq_(app.mw.pane_type(3), PaneType.Schedule)
eq_(app.mw.pane_type(4), PaneType.Budget)
@with_app(TestApp)
def test_move_pane(app):
# moving a pane takes a pane at a specified index and moves it to the dest index
app.mw.move_pane(2, 1)
eq_(app.mw.pane_label(1), "Transactions")
eq_(app.mw.pane_label(2), "Profit & Loss")
# when moving a pane, the dest index is the index *with* the pane at its original position,
# *not* the index with the pane removed from the list.
app.mw.move_pane(2, 3)
eq_(app.mw.pane_label(2), "Schedules")
eq_(app.mw.pane_label(3), "Profit & Loss")
# When the pane index is the same as the dest index, we do nothing.
app.mw.move_pane(2, 2)
eq_(app.mw.pane_label(1), "Transactions")
eq_(app.mw.pane_label(2), "Schedules")
eq_(app.mw.pane_label(3), "Profit & Loss")
@with_app(TestApp)
def test_move_pane_before_selected(app):
# When a non-selected pane is moved before the selected one, update the selected index
app.mw.move_pane(1, 0)
eq_(app.mw.current_pane_index, 1)
@with_app(TestApp)
def test_move_pane_selected(app):
# When the moved pane is selected, the selection follows the pane.
app.mw.move_pane(0, 3)
eq_(app.mw.current_pane_index, 3)
@with_app(TestApp)
def test_selected_account_is_updated_on_nonrevalidating_show(app):
# When navigating between sheet-panes, the selected account (used for show_account and apanel)
# is correctly updated
app.add_account('Asset')
app.add_account('Income', account_type=AccountType.Income)
app.show_nwview()
app.show_pview() # no revalidation since nothing has changed
app.show_account()
app.check_current_pane(PaneType.Account, account_name='Income')
@with_app(TestApp)
def test_select_pane_of_type_creates_new_pane_if_needed(app):
# calling select_pane_of_type() creates a new pane if needed
app.mw.close_pane(0) # net worth
app.mw.select_pane_of_type(PaneType.NetWorth)
eq_(app.mw.pane_count, 5)
app.check_current_pane(PaneType.NetWorth)
@with_app(TestApp)
def test_select_ttable_on_sfield_query(app):
# Setting a value in the search field selects the ttable.
app.sfield.text = 'foobar'
eq_(app.mw.current_pane_index, 2)
@with_app(TestApp)
def test_dont_close_last_pane(app):
# if close_pane() is called with only one pane left, don't do anything.
while (app.mw.pane_count > 1):
app.mw.close_pane(0)
app.mw.close_pane(0) # no crash
eq_(app.mw.pane_count, 1)
@with_app(TestApp)
def test_column_menu_attributes(app):
# The column menu depends on the selected pane and shows the display attribute of optional
# columns.
app.show_nwview()
expected = [("Account #", False), ("Start", True), ("Change", False), ("Change %", False),
("Budgeted", True)]
eq_(app.mw.column_menu_items(), expected)
app.mw.toggle_column_menu_item(0)
expected[0] = ("Account #", True)
eq_(app.mw.column_menu_items(), expected)
@with_app(TestApp)
def test_column_visibility_change_actually_changes_visibility(app):
# Changing the value of a column visibility in view options actually changes visibility
app.show_tview()
app.set_column_visible('description', False)
assert not app.ttable.columns.column_is_visible('description')
@with_app(TestApp)
def test_change_view_options_while_editing(app):
# When a table is in editing mode and that a column visibility is changed, we have to tell the
# gui to stop editing, or else we end up in a state where the core thinks it's editing when the
# GUI isn't.
app.show_tview()
app.mw.new_item()
app.ttable.payee = 'something' # in editing mode
app.set_column_visible('description', False)
assert app.ttable.edited is None
#--- Cleared GUI calls
def app_cleared_gui_calls():
app = TestApp()
app.clear_gui_calls()
return app
@with_app(app_cleared_gui_calls)
def test_new_tab(app):
emptyview = app.new_tab()
eq_(app.mw.pane_count, 6)
app.check_current_pane(PaneType.Empty)
app.check_gui_calls(app.mainwindow_gui, ['change_current_pane', 'refresh_panes', 'refresh_status_line'])
emptyview.select_pane_type(PaneType.Profit)
app.check_current_pane(PaneType.Profit)
app.check_gui_calls(app.mainwindow_gui, ['change_current_pane', 'refresh_panes', 'refresh_status_line'])
@with_app(app_cleared_gui_calls)
def test_toggle_area_visibility(app):
app.show_nwview()
app.mw.toggle_area_visibility(PaneArea.BottomGraph)
app.nwview.view.check_gui_calls(['update_visibility'])
# It sends the message to the main window as well (so it can update its buttons)
app.mw.view.check_gui_calls(['update_area_visibility'])
# Also update the visibility of other views when we select them
app.show_pview()
app.pview.view.check_gui_calls(['update_visibility'])
#--- One account
def app_one_account():
app = TestApp()
app.add_account("foo")
app.clear_gui_calls()
return app
@with_app(app_one_account)
def test_rename_opened_account_changes_tab_label(app):
# Renaming the account with an opened tab renames that tab.
app.show_account()
index = app.mw.current_pane_index
app.show_nwview()
app.clear_gui_calls()
app.bsheet.selected.name = 'renamed'
app.bsheet.save_edits()
eq_(app.mw.pane_label(index), 'renamed')
app.check_gui_calls(app.mainwindow_gui, ['refresh_panes', 'refresh_undo_actions'])
@with_app(app_one_account)
def test_show_account_opens_a_new_tab(app):
# Showing an account opens a new tab with the account shown in it.
app.show_account()
eq_(app.mw.pane_count, 6)
eq_(app.mw.current_pane_index, 5)
eq_(app.mw.pane_type(5), PaneType.Account)
eq_(app.mw.pane_label(5), "foo")
expected = ['refresh_panes', 'change_current_pane']
app.check_gui_calls_partial(app.mainwindow_gui, expected, verify_order=True)
@with_app(app_one_account)
def test_change_date_range(app):
app.show_account()
app.show_nwview()
app.clear_gui_calls()
app.doc.date_range = app.doc.date_range.prev()
expected_calls = ['refresh', 'animate_backward']
app.drsel.view.check_gui_calls(expected_calls)
| eq_(app.mw.current_pane_index, index)
app.mw.select_next_view() | conditional_block |
main_window_test.py | _pane(0)
app.mw.close_pane(0) # no crash
eq_(app.mw.pane_count, 1)
@with_app(TestApp)
def test_column_menu_attributes(app):
# The column menu depends on the selected pane and shows the display attribute of optional
# columns.
app.show_nwview()
expected = [("Account #", False), ("Start", True), ("Change", False), ("Change %", False),
("Budgeted", True)]
eq_(app.mw.column_menu_items(), expected)
app.mw.toggle_column_menu_item(0)
expected[0] = ("Account #", True)
eq_(app.mw.column_menu_items(), expected)
@with_app(TestApp)
def test_column_visibility_change_actually_changes_visibility(app):
# Changing the value of a column visibility in view options actually changes visibility
app.show_tview()
app.set_column_visible('description', False)
assert not app.ttable.columns.column_is_visible('description')
@with_app(TestApp)
def test_change_view_options_while_editing(app):
# When a table is in editing mode and that a column visibility is changed, we have to tell the
# gui to stop editing, or else we end up in a state where the core thinks it's editing when the
# GUI isn't.
app.show_tview()
app.mw.new_item()
app.ttable.payee = 'something' # in editing mode
app.set_column_visible('description', False)
assert app.ttable.edited is None
#--- Cleared GUI calls
def app_cleared_gui_calls():
app = TestApp()
app.clear_gui_calls()
return app
@with_app(app_cleared_gui_calls)
def test_new_tab(app):
emptyview = app.new_tab()
eq_(app.mw.pane_count, 6)
app.check_current_pane(PaneType.Empty)
app.check_gui_calls(app.mainwindow_gui, ['change_current_pane', 'refresh_panes', 'refresh_status_line'])
emptyview.select_pane_type(PaneType.Profit)
app.check_current_pane(PaneType.Profit)
app.check_gui_calls(app.mainwindow_gui, ['change_current_pane', 'refresh_panes', 'refresh_status_line'])
@with_app(app_cleared_gui_calls)
def test_toggle_area_visibility(app):
app.show_nwview()
app.mw.toggle_area_visibility(PaneArea.BottomGraph)
app.nwview.view.check_gui_calls(['update_visibility'])
# It sends the message to the main window as well (so it can update its buttons)
app.mw.view.check_gui_calls(['update_area_visibility'])
# Also update the visibility of other views when we select them
app.show_pview()
app.pview.view.check_gui_calls(['update_visibility'])
#--- One account
def app_one_account():
app = TestApp()
app.add_account("foo")
app.clear_gui_calls()
return app
@with_app(app_one_account)
def test_rename_opened_account_changes_tab_label(app):
# Renaming the account with an opened tab renames that tab.
app.show_account()
index = app.mw.current_pane_index
app.show_nwview()
app.clear_gui_calls()
app.bsheet.selected.name = 'renamed'
app.bsheet.save_edits()
eq_(app.mw.pane_label(index), 'renamed')
app.check_gui_calls(app.mainwindow_gui, ['refresh_panes', 'refresh_undo_actions'])
@with_app(app_one_account)
def test_show_account_opens_a_new_tab(app):
# Showing an account opens a new tab with the account shown in it.
app.show_account()
eq_(app.mw.pane_count, 6)
eq_(app.mw.current_pane_index, 5)
eq_(app.mw.pane_type(5), PaneType.Account)
eq_(app.mw.pane_label(5), "foo")
expected = ['refresh_panes', 'change_current_pane']
app.check_gui_calls_partial(app.mainwindow_gui, expected, verify_order=True)
@with_app(app_one_account)
def test_change_date_range(app):
app.show_account()
app.show_nwview()
app.clear_gui_calls()
app.doc.date_range = app.doc.date_range.prev()
expected_calls = ['refresh', 'animate_backward']
app.drsel.view.check_gui_calls(expected_calls)
app.check_gui_calls_partial(app.bsheet_gui, ['refresh'])
app.check_gui_calls(app.nwgraph_gui, ['refresh'])
app.check_gui_calls_partial(app.balgraph_gui, not_expected=['refresh'])
app.check_gui_calls_partial(app.bargraph_gui, not_expected=['refresh'])
#--- Asset and Income accounts with txn
def app_asset_and_income_accounts_with_txn():
app = TestApp()
app.add_account('Checking')
app.show_account()
app.add_entry('10/10/2007', 'Deposit', payee='Payee', transfer='Salary', increase='42.00')
app.doc.date_range = YearRange(date(2007, 1, 1))
app.clear_gui_calls()
return app
@with_app(app_asset_and_income_accounts_with_txn)
def test_close_pane_of_autocleaned_accounts(app):
# When an account is auto cleaned, close its pane if it's opened
app.etable.show_transfer_account() # the Salary account, which is auto-created
app.link_aview()
app.etable.show_transfer_account() # We're back on the Checking account
app.link_aview()
app.etable.delete() # the Salary pane is supposed to be closed.
eq_(app.mw.pane_count, 6)
eq_(app.mw.current_pane_index, 5) # we stay on the current index
@with_app(app_asset_and_income_accounts_with_txn)
def test_delete_account(app):
# deleting a non-empty account shows the account reassign panel
app.show_nwview()
app.bsheet.selected = app.bsheet.assets[0]
app.clear_gui_calls()
app.bsheet.delete()
app.arpanel.view.check_gui_calls(['pre_load', 'post_load'])
@with_app(app_asset_and_income_accounts_with_txn)
def test_navigate_back(app):
# navigate_back() shows the appropriate sheet depending on which account entry table shows
app.show_nwview()
app.bsheet.selected = app.bsheet.assets[0]
app.show_account()
app.clear_gui_calls()
app.mw.navigate_back()
eq_(app.mw.current_pane_index, 0)
app.show_pview()
app.istatement.selected = app.istatement.income[0]
app.show_account()
app.clear_gui_calls()
app.mw.navigate_back()
eq_(app.mw.current_pane_index, 1)
@with_app(app_asset_and_income_accounts_with_txn)
def test_show_account_when_in_sheet(app):
# When a sheet is selected, show_account() shows the selected account. If the account already
# has a tab opened, re-use that tab.
app.show_nwview()
app.clear_gui_calls()
app.show_account()
eq_(app.mw.current_pane_index, 5) # The tab opened in setup is re-used
app.show_pview()
app.clear_gui_calls()
app.show_account()
eq_(app.mw.current_pane_index, 6) # a new tab is opened for this one
@with_app(app_asset_and_income_accounts_with_txn)
def test_switch_panes_through_show_account(app):
# Views shown in the main window depend on what's selected in the account tree.
app.show_pview()
eq_(app.mw.current_pane_index, 1)
app.istatement.selected = app.istatement.income[0]
app.show_account()
eq_(app.mw.current_pane_index, 6)
app.aview.view.check_gui_calls_partial(['show_bar_graph'])
app.show_nwview()
eq_(app.mw.current_pane_index, 0)
app.bsheet.selected = app.bsheet.assets[0]
app.show_account()
eq_(app.mw.current_pane_index, 5)
# this account was already created, so we don't have to refresh the graphs.
not_expected = ['show_line_graph']
app.aview.view.check_gui_calls_partial(not_expected=not_expected)
app.show_tview()
eq_(app.mw.current_pane_index, 2)
@with_app(app_asset_and_income_accounts_with_txn)
def test_switch_panes_through_pane_index(app):
app.etable.show_transfer_account()
eq_(app.mw.pane_count, 7) # Now, the two last views are our 2 accounts
app.mw.select_previous_view()
app.link_aview()
# etable has change its values
eq_(app.etable[0].transfer, "Salary")
app.mw.select_next_view()
app.link_aview()
# and again
eq_(app.etable[0].transfer, "Checking")
#--- One transaction
def app_one_transaction():
app = TestApp()
app.add_account('first')
app.add_txn(from_='first', to='second', amount='42')
app.clear_gui_calls()
return app
@with_app(app_one_transaction)
def test_show_account_when_in_etable(app):
| app.show_account('first')
app.show_account()
app.check_current_pane(PaneType.Account, 'second') | identifier_body |
|
cnn_model.py | .info('cnn_model: added {} Dense layer (-> {})'.format(output_activation, output_size))
# Compile
if optimizer == 'adam':
optimizer = Adam(lr=lr)
elif optimizer == 'rmsprop':
optimizer = RMSprop(lr=lr)
else:
logging.info('Can only handle adam or rmsprop optimizers currently')
quit(1)
if loss == 'custom':
loss = mse_no_NaN
logging.info('compiling cnn_model...')
model.compile(loss=loss, optimizer=optimizer)
logging.info('done compiling.')
return model
def train_model(model,
X_train,
y_train,
X_inner_val,
y_inner_val,
X_test,
y_test,
X_outer_val=None,
y_outer_val=None,
nb_epoch=0,
batch_size=50,
lr_func='0.01',
patience=10):
"""
inputs:
model - a Keras model
data - X_train, X_inner_val, X_outer_val, y_train, y_inner_val, y_outer_val, X_test, y_test
nb_epoch - number of epochs to train for
lr_func - string which is evaluated with 'epoch' to produce the learning
rate at each epoch
patience - number of epochs to wait when no progress is being made in
the validation loss
outputs:
model - a trained Keras model
loss - list of training losses corresponding to each epoch
inner_val_loss - list of validation losses corresponding to each epoch
"""
X_train = np.array(X_train)
y_train = np.array(y_train)
# Create learning rate function
lr_func_string = 'def lr(epoch):\n return {}\n'.format(lr_func)
exec lr_func_string
# Fit (allows keyboard interrupts in the middle)
try:
loss = []
inner_val_loss = []
wait = 0
prev_best_inner_val_loss = 99999999
for i in range(nb_epoch):
logging.info('\nEpoch {}/{}, lr = {}'.format(i + 1, nb_epoch, lr(i)))
this_loss = []
this_inner_val_loss = []
model.optimizer.lr.set_value(lr(i))
# Run through training set
logging.info('Training with batch size: {0}...'.format(batch_size))
epoch_training_start = time.time()
training_size = len(X_train)
batch_num = int(np.ceil(float(training_size) / batch_size))
training_order = range(training_size)
np.random.shuffle(training_order)
for batch_idx in range(batch_num):
start = batch_idx * batch_size
end = min(start + batch_size, training_size)
single_mol_as_array = X_train[training_order[start:end]]
single_y_as_array = y_train[training_order[start:end]]
sloss = model.train_on_batch(single_mol_as_array, single_y_as_array)
this_loss.append(sloss)
epoch_training_end = time.time()
logging.info('Training takes {0:0.1f} secs..'.format(epoch_training_end - epoch_training_start ))
# Run through testing set
logging.info('Inner Validating..')
for j in range(len(X_inner_val)):
single_mol_as_array = np.array(X_inner_val[j:j+1])
single_y_as_array = np.reshape(y_inner_val[j], (1, -1))
sloss = model.test_on_batch(single_mol_as_array, single_y_as_array)
this_inner_val_loss.append(sloss)
loss.append(np.mean(this_loss))
inner_val_loss.append(np.mean(this_inner_val_loss))
logging.info('mse loss: {}\tmse inner_val_loss: {}'.format(loss[i], inner_val_loss[i]))
# report outer_val and test loss
if i % 1 == 0:
if X_outer_val:
mean_outer_val_loss = evaluate_mean_tst_loss(model, X_outer_val, y_outer_val)
logging.info('mse outer_val_loss: {}'.format(mean_outer_val_loss))
mean_test_loss = evaluate_mean_tst_loss(model, X_test, y_test)
logging.info('mse test_loss: {}'.format(mean_test_loss))
# Check progress
if np.mean(this_inner_val_loss) < prev_best_inner_val_loss:
wait = 0
prev_best_inner_val_loss = np.mean(this_inner_val_loss)
if patience == -1:
model.save_weights('train_cnn_results/best.h5', overwrite=True)
else:
wait = wait + 1
logging.info('{} epochs without inner_val_loss progress'.format(wait))
if wait == patience:
logging.info('stopping early!')
break
if patience == -1:
model.load_weights('train_cnn_results/best.h5')
# evaluate outer validation loss and test loss upon final model
if X_outer_val:
mean_outer_val_loss = evaluate_mean_tst_loss(model, X_outer_val, y_outer_val)
else:
mean_outer_val_loss = None
mean_test_loss = evaluate_mean_tst_loss(model, X_test, y_test)
except KeyboardInterrupt:
logging.info('User terminated training early (intentionally)')
return (model, loss, inner_val_loss, mean_outer_val_loss, mean_test_loss)
def evaluate_mean_tst_loss(model, X_test, y_test):
"""
Given final model and test examples
returns mean test loss: a float number
"""
test_losses = []
for j in range(len(X_test)):
single_mol_as_array = np.array(X_test[j:j+1])
single_y_as_array = np.reshape(y_test[j], (1, -1))
sloss = model.test_on_batch(single_mol_as_array, single_y_as_array)
test_losses.append(sloss)
mean_test_loss = np.mean(test_losses)
return mean_test_loss
def reset_model(model):
"""
Given a Keras model consisting only of MoleculeConv, Dense, and Dropout layers,
this function will reset the trainable weights to save time for CV tests.
"""
for layer in model.layers:
# Note: these are custom depending on the layer type
if '.MoleculeConv' in str(layer):
W_inner = layer.init_inner((layer.inner_dim, layer.inner_dim))
b_inner = np.zeros((1, layer.inner_dim))
# Inner weights
layer.W_inner.set_value((T.tile(W_inner, (layer.depth + 1, 1, 1)).eval() + \
initializations.uniform((layer.depth + 1, layer.inner_dim, layer.inner_dim)).eval()).astype(np.float32))
layer.b_inner.set_value((T.tile(b_inner, (layer.depth + 1, 1, 1)).eval() + \
initializations.uniform((layer.depth + 1, 1, layer.inner_dim)).eval()).astype(np.float32))
# Outer weights
W_output = layer.init_output((layer.inner_dim, layer.units), scale = layer.scale_output)
b_output = np.zeros((1, layer.units))
# Initialize weights tensor
layer.W_output.set_value((T.tile(W_output, (layer.depth + 1, 1, 1)).eval()).astype(np.float32))
layer.b_output.set_value((T.tile(b_output, (layer.depth + 1, 1, 1)).eval()).astype(np.float32))
logging.info('graphFP layer reset')
elif '.Dense' in str(layer):
layer.W.set_value((layer.init(layer.W.shape.eval()).eval()).astype(np.float32))
layer.b.set_value(np.zeros(layer.b.shape.eval(), dtype=np.float32))
logging.info('dense layer reset')
elif '.Dropout' in str(layer):
logging.info('dropout unchanged')
else:
raise ValueError('Unknown layer {}, cannot reset weights'.format(str(layer)))
logging.info('Reset model weights')
return model
def save_model(model, loss, inner_val_loss, mean_outer_val_loss, mean_test_loss, fpath):
"""
Saves NN model object and associated information.
inputs:
model - a Keras model
loss - list of training losses
inner_val_loss - list of inner validation losses
mean_test_loss - mean loss on outer validation set
mean_test_loss - mean loss on test set
fpath - root filepath to save everything to (with .json, h5, png, info
config - the configuration dictionary that defined this model
tstamp - current timestamp to log in info file
"""
# Dump data
with open(fpath + '.json', 'w') as structure_fpath:
json.dump(model.to_json(), structure_fpath)
logging.info('...saved structural information')
# Dump weights
model.save_weights(fpath + '.h5', overwrite = True)
logging.info('...saved weights')
# Dump image
try:
plot(model, to_file = fpath + '.png')
logging.info('...saved image')
except:
pass
# Dump history
save_model_history_manual(loss, inner_val_loss, fpath + '.hist')
mean_loss = loss[-1] | mean_inner_val_loss = inner_val_loss[-1]
write_loss_report(mean_loss, mean_inner_val_loss, mean_outer_val_loss, mean_test_loss, fpath + '_loss_report.txt') | random_line_split |
|
cnn_model.py | 0,
batch_size=50,
lr_func='0.01',
patience=10):
"""
inputs:
model - a Keras model
data - X_train, X_inner_val, X_outer_val, y_train, y_inner_val, y_outer_val, X_test, y_test
nb_epoch - number of epochs to train for
lr_func - string which is evaluated with 'epoch' to produce the learning
rate at each epoch
patience - number of epochs to wait when no progress is being made in
the validation loss
outputs:
model - a trained Keras model
loss - list of training losses corresponding to each epoch
inner_val_loss - list of validation losses corresponding to each epoch
"""
X_train = np.array(X_train)
y_train = np.array(y_train)
# Create learning rate function
lr_func_string = 'def lr(epoch):\n return {}\n'.format(lr_func)
exec lr_func_string
# Fit (allows keyboard interrupts in the middle)
try:
loss = []
inner_val_loss = []
wait = 0
prev_best_inner_val_loss = 99999999
for i in range(nb_epoch):
logging.info('\nEpoch {}/{}, lr = {}'.format(i + 1, nb_epoch, lr(i)))
this_loss = []
this_inner_val_loss = []
model.optimizer.lr.set_value(lr(i))
# Run through training set
logging.info('Training with batch size: {0}...'.format(batch_size))
epoch_training_start = time.time()
training_size = len(X_train)
batch_num = int(np.ceil(float(training_size) / batch_size))
training_order = range(training_size)
np.random.shuffle(training_order)
for batch_idx in range(batch_num):
start = batch_idx * batch_size
end = min(start + batch_size, training_size)
single_mol_as_array = X_train[training_order[start:end]]
single_y_as_array = y_train[training_order[start:end]]
sloss = model.train_on_batch(single_mol_as_array, single_y_as_array)
this_loss.append(sloss)
epoch_training_end = time.time()
logging.info('Training takes {0:0.1f} secs..'.format(epoch_training_end - epoch_training_start ))
# Run through testing set
logging.info('Inner Validating..')
for j in range(len(X_inner_val)):
single_mol_as_array = np.array(X_inner_val[j:j+1])
single_y_as_array = np.reshape(y_inner_val[j], (1, -1))
sloss = model.test_on_batch(single_mol_as_array, single_y_as_array)
this_inner_val_loss.append(sloss)
loss.append(np.mean(this_loss))
inner_val_loss.append(np.mean(this_inner_val_loss))
logging.info('mse loss: {}\tmse inner_val_loss: {}'.format(loss[i], inner_val_loss[i]))
# report outer_val and test loss
if i % 1 == 0:
if X_outer_val:
mean_outer_val_loss = evaluate_mean_tst_loss(model, X_outer_val, y_outer_val)
logging.info('mse outer_val_loss: {}'.format(mean_outer_val_loss))
mean_test_loss = evaluate_mean_tst_loss(model, X_test, y_test)
logging.info('mse test_loss: {}'.format(mean_test_loss))
# Check progress
if np.mean(this_inner_val_loss) < prev_best_inner_val_loss:
wait = 0
prev_best_inner_val_loss = np.mean(this_inner_val_loss)
if patience == -1:
model.save_weights('train_cnn_results/best.h5', overwrite=True)
else:
wait = wait + 1
logging.info('{} epochs without inner_val_loss progress'.format(wait))
if wait == patience:
logging.info('stopping early!')
break
if patience == -1:
model.load_weights('train_cnn_results/best.h5')
# evaluate outer validation loss and test loss upon final model
if X_outer_val:
mean_outer_val_loss = evaluate_mean_tst_loss(model, X_outer_val, y_outer_val)
else:
mean_outer_val_loss = None
mean_test_loss = evaluate_mean_tst_loss(model, X_test, y_test)
except KeyboardInterrupt:
logging.info('User terminated training early (intentionally)')
return (model, loss, inner_val_loss, mean_outer_val_loss, mean_test_loss)
def evaluate_mean_tst_loss(model, X_test, y_test):
"""
Given final model and test examples
returns mean test loss: a float number
"""
test_losses = []
for j in range(len(X_test)):
single_mol_as_array = np.array(X_test[j:j+1])
single_y_as_array = np.reshape(y_test[j], (1, -1))
sloss = model.test_on_batch(single_mol_as_array, single_y_as_array)
test_losses.append(sloss)
mean_test_loss = np.mean(test_losses)
return mean_test_loss
def reset_model(model):
"""
Given a Keras model consisting only of MoleculeConv, Dense, and Dropout layers,
this function will reset the trainable weights to save time for CV tests.
"""
for layer in model.layers:
# Note: these are custom depending on the layer type
if '.MoleculeConv' in str(layer):
W_inner = layer.init_inner((layer.inner_dim, layer.inner_dim))
b_inner = np.zeros((1, layer.inner_dim))
# Inner weights
layer.W_inner.set_value((T.tile(W_inner, (layer.depth + 1, 1, 1)).eval() + \
initializations.uniform((layer.depth + 1, layer.inner_dim, layer.inner_dim)).eval()).astype(np.float32))
layer.b_inner.set_value((T.tile(b_inner, (layer.depth + 1, 1, 1)).eval() + \
initializations.uniform((layer.depth + 1, 1, layer.inner_dim)).eval()).astype(np.float32))
# Outer weights
W_output = layer.init_output((layer.inner_dim, layer.units), scale = layer.scale_output)
b_output = np.zeros((1, layer.units))
# Initialize weights tensor
layer.W_output.set_value((T.tile(W_output, (layer.depth + 1, 1, 1)).eval()).astype(np.float32))
layer.b_output.set_value((T.tile(b_output, (layer.depth + 1, 1, 1)).eval()).astype(np.float32))
logging.info('graphFP layer reset')
elif '.Dense' in str(layer):
layer.W.set_value((layer.init(layer.W.shape.eval()).eval()).astype(np.float32))
layer.b.set_value(np.zeros(layer.b.shape.eval(), dtype=np.float32))
logging.info('dense layer reset')
elif '.Dropout' in str(layer):
logging.info('dropout unchanged')
else:
raise ValueError('Unknown layer {}, cannot reset weights'.format(str(layer)))
logging.info('Reset model weights')
return model
def save_model(model, loss, inner_val_loss, mean_outer_val_loss, mean_test_loss, fpath):
"""
Saves NN model object and associated information.
inputs:
model - a Keras model
loss - list of training losses
inner_val_loss - list of inner validation losses
mean_test_loss - mean loss on outer validation set
mean_test_loss - mean loss on test set
fpath - root filepath to save everything to (with .json, h5, png, info
config - the configuration dictionary that defined this model
tstamp - current timestamp to log in info file
"""
# Dump data
with open(fpath + '.json', 'w') as structure_fpath:
json.dump(model.to_json(), structure_fpath)
logging.info('...saved structural information')
# Dump weights
model.save_weights(fpath + '.h5', overwrite = True)
logging.info('...saved weights')
# Dump image
try:
plot(model, to_file = fpath + '.png')
logging.info('...saved image')
except:
pass
# Dump history
save_model_history_manual(loss, inner_val_loss, fpath + '.hist')
mean_loss = loss[-1]
mean_inner_val_loss = inner_val_loss[-1]
write_loss_report(mean_loss, mean_inner_val_loss, mean_outer_val_loss, mean_test_loss, fpath + '_loss_report.txt')
logging.info ('...saved history')
logging.info('...saved model to {}.[json, h5, png]'.format(fpath))
def save_model_history_manual(loss, val_loss, fpath):
| """
This function saves the history returned by model.fit to a tab-
delimited file, where model is a keras model
"""
# Open file
fid = open(fpath, 'a')
logging.info('trained at {}'.format(datetime.datetime.utcnow()))
print('iteration\tloss\tval_loss', file=fid)
try:
# Iterate through
for i in range(len(loss)):
print('{}\t{}\t{}'.format(i + 1,
loss[i], val_loss[i]),
file = fid)
except KeyError:
print('<no history found>', file = fid)
# Close file | identifier_body |
|
cnn_model.py | (embedding_size=512, attribute_vector_size=33, depth=5,
scale_output=0.05, padding=False,
mol_conv_inner_activation='tanh',
mol_conv_outer_activation='softmax',
hidden=50, hidden_activation='tanh',
output_activation='linear', output_size=1,
lr=0.01, optimizer='adam', loss='mse'):
"""
build generic cnn model that takes molecule tensor and predicts output
with size of output_size.
"""
model = Sequential()
model.add(MoleculeConv(units=embedding_size,
inner_dim=attribute_vector_size-1,
depth=depth,
scale_output=scale_output,
padding=padding,
activation_inner=mol_conv_inner_activation,
activation_output=mol_conv_outer_activation))
logging.info('cnn_model: added MoleculeConv layer ({} -> {})'.format('mol', embedding_size))
if hidden > 0:
model.add(Dense(hidden, activation=hidden_activation))
logging.info('cnn_model: added {} Dense layer (-> {})'.format(hidden_activation, hidden))
model.add(Dense(output_size, activation=output_activation))
logging.info('cnn_model: added {} Dense layer (-> {})'.format(output_activation, output_size))
# Compile
if optimizer == 'adam':
optimizer = Adam(lr=lr)
elif optimizer == 'rmsprop':
optimizer = RMSprop(lr=lr)
else:
logging.info('Can only handle adam or rmsprop optimizers currently')
quit(1)
if loss == 'custom':
loss = mse_no_NaN
logging.info('compiling cnn_model...')
model.compile(loss=loss, optimizer=optimizer)
logging.info('done compiling.')
return model
def train_model(model,
X_train,
y_train,
X_inner_val,
y_inner_val,
X_test,
y_test,
X_outer_val=None,
y_outer_val=None,
nb_epoch=0,
batch_size=50,
lr_func='0.01',
patience=10):
"""
inputs:
model - a Keras model
data - X_train, X_inner_val, X_outer_val, y_train, y_inner_val, y_outer_val, X_test, y_test
nb_epoch - number of epochs to train for
lr_func - string which is evaluated with 'epoch' to produce the learning
rate at each epoch
patience - number of epochs to wait when no progress is being made in
the validation loss
outputs:
model - a trained Keras model
loss - list of training losses corresponding to each epoch
inner_val_loss - list of validation losses corresponding to each epoch
"""
X_train = np.array(X_train)
y_train = np.array(y_train)
# Create learning rate function
lr_func_string = 'def lr(epoch):\n return {}\n'.format(lr_func)
exec lr_func_string
# Fit (allows keyboard interrupts in the middle)
try:
loss = []
inner_val_loss = []
wait = 0
prev_best_inner_val_loss = 99999999
for i in range(nb_epoch):
logging.info('\nEpoch {}/{}, lr = {}'.format(i + 1, nb_epoch, lr(i)))
this_loss = []
this_inner_val_loss = []
model.optimizer.lr.set_value(lr(i))
# Run through training set
logging.info('Training with batch size: {0}...'.format(batch_size))
epoch_training_start = time.time()
training_size = len(X_train)
batch_num = int(np.ceil(float(training_size) / batch_size))
training_order = range(training_size)
np.random.shuffle(training_order)
for batch_idx in range(batch_num):
start = batch_idx * batch_size
end = min(start + batch_size, training_size)
single_mol_as_array = X_train[training_order[start:end]]
single_y_as_array = y_train[training_order[start:end]]
sloss = model.train_on_batch(single_mol_as_array, single_y_as_array)
this_loss.append(sloss)
epoch_training_end = time.time()
logging.info('Training takes {0:0.1f} secs..'.format(epoch_training_end - epoch_training_start ))
# Run through testing set
logging.info('Inner Validating..')
for j in range(len(X_inner_val)):
single_mol_as_array = np.array(X_inner_val[j:j+1])
single_y_as_array = np.reshape(y_inner_val[j], (1, -1))
sloss = model.test_on_batch(single_mol_as_array, single_y_as_array)
this_inner_val_loss.append(sloss)
loss.append(np.mean(this_loss))
inner_val_loss.append(np.mean(this_inner_val_loss))
logging.info('mse loss: {}\tmse inner_val_loss: {}'.format(loss[i], inner_val_loss[i]))
# report outer_val and test loss
if i % 1 == 0:
if X_outer_val:
mean_outer_val_loss = evaluate_mean_tst_loss(model, X_outer_val, y_outer_val)
logging.info('mse outer_val_loss: {}'.format(mean_outer_val_loss))
mean_test_loss = evaluate_mean_tst_loss(model, X_test, y_test)
logging.info('mse test_loss: {}'.format(mean_test_loss))
# Check progress
if np.mean(this_inner_val_loss) < prev_best_inner_val_loss:
wait = 0
prev_best_inner_val_loss = np.mean(this_inner_val_loss)
if patience == -1:
model.save_weights('train_cnn_results/best.h5', overwrite=True)
else:
wait = wait + 1
logging.info('{} epochs without inner_val_loss progress'.format(wait))
if wait == patience:
logging.info('stopping early!')
break
if patience == -1:
model.load_weights('train_cnn_results/best.h5')
# evaluate outer validation loss and test loss upon final model
if X_outer_val:
mean_outer_val_loss = evaluate_mean_tst_loss(model, X_outer_val, y_outer_val)
else:
mean_outer_val_loss = None
mean_test_loss = evaluate_mean_tst_loss(model, X_test, y_test)
except KeyboardInterrupt:
logging.info('User terminated training early (intentionally)')
return (model, loss, inner_val_loss, mean_outer_val_loss, mean_test_loss)
def evaluate_mean_tst_loss(model, X_test, y_test):
"""
Given final model and test examples
returns mean test loss: a float number
"""
test_losses = []
for j in range(len(X_test)):
single_mol_as_array = np.array(X_test[j:j+1])
single_y_as_array = np.reshape(y_test[j], (1, -1))
sloss = model.test_on_batch(single_mol_as_array, single_y_as_array)
test_losses.append(sloss)
mean_test_loss = np.mean(test_losses)
return mean_test_loss
def reset_model(model):
"""
Given a Keras model consisting only of MoleculeConv, Dense, and Dropout layers,
this function will reset the trainable weights to save time for CV tests.
"""
for layer in model.layers:
# Note: these are custom depending on the layer type
if '.MoleculeConv' in str(layer):
W_inner = layer.init_inner((layer.inner_dim, layer.inner_dim))
b_inner = np.zeros((1, layer.inner_dim))
# Inner weights
layer.W_inner.set_value((T.tile(W_inner, (layer.depth + 1, 1, 1)).eval() + \
initializations.uniform((layer.depth + 1, layer.inner_dim, layer.inner_dim)).eval()).astype(np.float32))
layer.b_inner.set_value((T.tile(b_inner, (layer.depth + 1, 1, 1)).eval() + \
initializations.uniform((layer.depth + 1, 1, layer.inner_dim)).eval()).astype(np.float32))
# Outer weights
W_output = layer.init_output((layer.inner_dim, layer.units), scale = layer.scale_output)
b_output = np.zeros((1, layer.units))
# Initialize weights tensor
layer.W_output.set_value((T.tile(W_output, (layer.depth + 1, 1, 1)).eval()).astype(np.float32))
layer.b_output.set_value((T.tile(b_output, (layer.depth + 1, 1, 1)).eval()).astype(np.float32))
logging.info('graphFP layer reset')
elif '.Dense' in str(layer):
layer.W.set_value((layer.init(layer.W.shape.eval()).eval()).astype(np.float32))
layer.b.set_value(np.zeros(layer.b.shape.eval(), dtype=np.float32))
logging.info('dense layer reset')
elif '.Dropout' in str(layer):
logging.info('dropout unchanged')
else:
raise ValueError('Unknown layer {}, cannot reset weights'.format(str(layer)))
logging.info('Reset model weights')
return model
def save_model(model, loss, inner_val_loss, mean_outer_val_loss, mean_test_loss, fpath):
"""
Saves NN model object and associated information.
inputs:
model - a Keras model
| build_model | identifier_name |
|
cnn_model.py | .add(MoleculeConv(units=embedding_size,
inner_dim=attribute_vector_size-1,
depth=depth,
scale_output=scale_output,
padding=padding,
activation_inner=mol_conv_inner_activation,
activation_output=mol_conv_outer_activation))
logging.info('cnn_model: added MoleculeConv layer ({} -> {})'.format('mol', embedding_size))
if hidden > 0:
model.add(Dense(hidden, activation=hidden_activation))
logging.info('cnn_model: added {} Dense layer (-> {})'.format(hidden_activation, hidden))
model.add(Dense(output_size, activation=output_activation))
logging.info('cnn_model: added {} Dense layer (-> {})'.format(output_activation, output_size))
# Compile
if optimizer == 'adam':
optimizer = Adam(lr=lr)
elif optimizer == 'rmsprop':
optimizer = RMSprop(lr=lr)
else:
logging.info('Can only handle adam or rmsprop optimizers currently')
quit(1)
if loss == 'custom':
loss = mse_no_NaN
logging.info('compiling cnn_model...')
model.compile(loss=loss, optimizer=optimizer)
logging.info('done compiling.')
return model
def train_model(model,
X_train,
y_train,
X_inner_val,
y_inner_val,
X_test,
y_test,
X_outer_val=None,
y_outer_val=None,
nb_epoch=0,
batch_size=50,
lr_func='0.01',
patience=10):
"""
inputs:
model - a Keras model
data - X_train, X_inner_val, X_outer_val, y_train, y_inner_val, y_outer_val, X_test, y_test
nb_epoch - number of epochs to train for
lr_func - string which is evaluated with 'epoch' to produce the learning
rate at each epoch
patience - number of epochs to wait when no progress is being made in
the validation loss
outputs:
model - a trained Keras model
loss - list of training losses corresponding to each epoch
inner_val_loss - list of validation losses corresponding to each epoch
"""
X_train = np.array(X_train)
y_train = np.array(y_train)
# Create learning rate function
lr_func_string = 'def lr(epoch):\n return {}\n'.format(lr_func)
exec lr_func_string
# Fit (allows keyboard interrupts in the middle)
try:
loss = []
inner_val_loss = []
wait = 0
prev_best_inner_val_loss = 99999999
for i in range(nb_epoch):
logging.info('\nEpoch {}/{}, lr = {}'.format(i + 1, nb_epoch, lr(i)))
this_loss = []
this_inner_val_loss = []
model.optimizer.lr.set_value(lr(i))
# Run through training set
logging.info('Training with batch size: {0}...'.format(batch_size))
epoch_training_start = time.time()
training_size = len(X_train)
batch_num = int(np.ceil(float(training_size) / batch_size))
training_order = range(training_size)
np.random.shuffle(training_order)
for batch_idx in range(batch_num):
start = batch_idx * batch_size
end = min(start + batch_size, training_size)
single_mol_as_array = X_train[training_order[start:end]]
single_y_as_array = y_train[training_order[start:end]]
sloss = model.train_on_batch(single_mol_as_array, single_y_as_array)
this_loss.append(sloss)
epoch_training_end = time.time()
logging.info('Training takes {0:0.1f} secs..'.format(epoch_training_end - epoch_training_start ))
# Run through testing set
logging.info('Inner Validating..')
for j in range(len(X_inner_val)):
single_mol_as_array = np.array(X_inner_val[j:j+1])
single_y_as_array = np.reshape(y_inner_val[j], (1, -1))
sloss = model.test_on_batch(single_mol_as_array, single_y_as_array)
this_inner_val_loss.append(sloss)
loss.append(np.mean(this_loss))
inner_val_loss.append(np.mean(this_inner_val_loss))
logging.info('mse loss: {}\tmse inner_val_loss: {}'.format(loss[i], inner_val_loss[i]))
# report outer_val and test loss
if i % 1 == 0:
if X_outer_val:
mean_outer_val_loss = evaluate_mean_tst_loss(model, X_outer_val, y_outer_val)
logging.info('mse outer_val_loss: {}'.format(mean_outer_val_loss))
mean_test_loss = evaluate_mean_tst_loss(model, X_test, y_test)
logging.info('mse test_loss: {}'.format(mean_test_loss))
# Check progress
if np.mean(this_inner_val_loss) < prev_best_inner_val_loss:
wait = 0
prev_best_inner_val_loss = np.mean(this_inner_val_loss)
if patience == -1:
model.save_weights('train_cnn_results/best.h5', overwrite=True)
else:
wait = wait + 1
logging.info('{} epochs without inner_val_loss progress'.format(wait))
if wait == patience:
logging.info('stopping early!')
break
if patience == -1:
model.load_weights('train_cnn_results/best.h5')
# evaluate outer validation loss and test loss upon final model
if X_outer_val:
mean_outer_val_loss = evaluate_mean_tst_loss(model, X_outer_val, y_outer_val)
else:
|
mean_test_loss = evaluate_mean_tst_loss(model, X_test, y_test)
except KeyboardInterrupt:
logging.info('User terminated training early (intentionally)')
return (model, loss, inner_val_loss, mean_outer_val_loss, mean_test_loss)
def evaluate_mean_tst_loss(model, X_test, y_test):
"""
Given final model and test examples
returns mean test loss: a float number
"""
test_losses = []
for j in range(len(X_test)):
single_mol_as_array = np.array(X_test[j:j+1])
single_y_as_array = np.reshape(y_test[j], (1, -1))
sloss = model.test_on_batch(single_mol_as_array, single_y_as_array)
test_losses.append(sloss)
mean_test_loss = np.mean(test_losses)
return mean_test_loss
def reset_model(model):
"""
Given a Keras model consisting only of MoleculeConv, Dense, and Dropout layers,
this function will reset the trainable weights to save time for CV tests.
"""
for layer in model.layers:
# Note: these are custom depending on the layer type
if '.MoleculeConv' in str(layer):
W_inner = layer.init_inner((layer.inner_dim, layer.inner_dim))
b_inner = np.zeros((1, layer.inner_dim))
# Inner weights
layer.W_inner.set_value((T.tile(W_inner, (layer.depth + 1, 1, 1)).eval() + \
initializations.uniform((layer.depth + 1, layer.inner_dim, layer.inner_dim)).eval()).astype(np.float32))
layer.b_inner.set_value((T.tile(b_inner, (layer.depth + 1, 1, 1)).eval() + \
initializations.uniform((layer.depth + 1, 1, layer.inner_dim)).eval()).astype(np.float32))
# Outer weights
W_output = layer.init_output((layer.inner_dim, layer.units), scale = layer.scale_output)
b_output = np.zeros((1, layer.units))
# Initialize weights tensor
layer.W_output.set_value((T.tile(W_output, (layer.depth + 1, 1, 1)).eval()).astype(np.float32))
layer.b_output.set_value((T.tile(b_output, (layer.depth + 1, 1, 1)).eval()).astype(np.float32))
logging.info('graphFP layer reset')
elif '.Dense' in str(layer):
layer.W.set_value((layer.init(layer.W.shape.eval()).eval()).astype(np.float32))
layer.b.set_value(np.zeros(layer.b.shape.eval(), dtype=np.float32))
logging.info('dense layer reset')
elif '.Dropout' in str(layer):
logging.info('dropout unchanged')
else:
raise ValueError('Unknown layer {}, cannot reset weights'.format(str(layer)))
logging.info('Reset model weights')
return model
def save_model(model, loss, inner_val_loss, mean_outer_val_loss, mean_test_loss, fpath):
"""
Saves NN model object and associated information.
inputs:
model - a Keras model
loss - list of training losses
inner_val_loss - list of inner validation losses
mean_test_loss - mean loss on outer validation set
mean_test_loss - mean loss on test set
fpath - root filepath to save everything to (with .json, h5, png, info
config - the configuration dictionary that defined this model
tstamp - current timestamp to log in info file
"""
# Dump data
with open(fpath + '.json', 'w') as structure_fpath:
json.dump(model.to_json(), structure_fpath)
logging | mean_outer_val_loss = None | conditional_block |
lib.rs |
/// * https://docs.microsoft.com/en-us/windows/uwp/controls-and-patterns/tiles-and-notifications-toast-xml-schema
/// * https://docs.microsoft.com/en-us/windows/uwp/controls-and-patterns/tiles-and-notifications-adaptive-interactive-toasts
/// * https://msdn.microsoft.com/library/14a07fce-d631-4bad-ab99-305b703713e6#Sending_toast_notifications_from_desktop_apps
/// for Windows 7 and older support look into Shell_NotifyIcon
/// https://msdn.microsoft.com/en-us/library/windows/desktop/ee330740(v=vs.85).aspx
/// https://softwareengineering.stackexchange.com/questions/222339/using-the-system-tray-notification-area-app-in-windows-7
extern crate windows;
extern crate xml;
#[macro_use]
extern crate strum;
#[allow(dead_code)]
mod bindings {
::windows::include_bindings!();
}
use bindings::{
windows::data::xml::dom::XmlDocument,
windows::ui::notifications::ToastNotification,
windows::ui::notifications::ToastNotificationManager,
windows::HString,
};
use std::fmt;
use std::path::Path;
use xml::escape::escape_str_attribute;
mod windows_check;
pub use windows::Error;
pub struct Toast {
duration: String,
title: String,
line1: String,
line2: String,
images: String,
audio: String,
app_id: String,
}
#[derive(Clone, Copy)]
pub enum Duration {
/// 7 seconds
Short,
/// 25 seconds
Long,
}
#[derive(Debug, EnumString, Clone, Copy)]
pub enum Sound {
Default,
IM,
Mail,
Reminder,
SMS,
/// Play the loopable sound only once
#[strum(disabled)]
Single(LoopableSound),
/// Loop the loopable sound for the entire duration of the toast
#[strum(disabled)]
Loop(LoopableSound),
}
/// Sounds suitable for Looping
#[allow(dead_code)]
#[derive(Debug, Clone, Copy)]
pub enum LoopableSound {
Alarm,
Alarm2,
Alarm3,
Alarm4,
Alarm5,
Alarm6,
Alarm7,
Alarm8, | Call,
Call2,
Call3,
Call4,
Call5,
Call6,
Call7,
Call8,
Call9,
Call10,
}
#[allow(dead_code)]
#[derive(Clone, Copy)]
pub enum IconCrop {
Square,
Circular,
}
#[doc(hidden)]
impl fmt::Display for Sound {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
#[doc(hidden)]
impl fmt::Display for LoopableSound {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
impl Toast {
/// This can be used if you do not have a AppUserModelID.
///
/// However, the toast will erroniously report its origin as powershell.
pub const POWERSHELL_APP_ID: &'static str = "{1AC14E77-02E7-4E5D-B744-2EB1AE5198B7}\
\\WindowsPowerShell\\v1.0\\powershell.exe";
/// Constructor for the toast builder.
///
/// app_id is the running application's [AppUserModelID][1].
///
/// [1]: https://msdn.microsoft.com/en-us/library/windows/desktop/dd378459(v=vs.85).aspx
///
/// If the program you are using this in was not installed, use Toast::POWERSHELL_APP_ID for now
#[allow(dead_code)]
pub fn new(app_id: &str) -> Toast {
Toast {
duration: String::new(),
title: String::new(),
line1: String::new(),
line2: String::new(),
images: String::new(),
audio: String::new(),
app_id: app_id.to_string(),
}
}
/// Sets the title of the toast.
///
/// Will be white.
/// Supports Unicode ✓
pub fn title(mut self, content: &str) -> Toast {
self.title = format!(r#"<text id="1">{}</text>"#, escape_str_attribute(content));
self
}
/// Add/Sets the first line of text below title.
///
/// Will be grey.
/// Supports Unicode ✓
pub fn text1(mut self, content: &str) -> Toast {
self.line1 = format!(r#"<text id="2">{}</text>"#, escape_str_attribute(content));
self
}
/// Add/Sets the second line of text below title.
///
/// Will be grey.
/// Supports Unicode ✓
pub fn text2(mut self, content: &str) -> Toast {
self.line2 = format!(r#"<text id="3">{}</text>"#, escape_str_attribute(content));
self
}
/// Set the length of time to show the toast
pub fn duration(mut self, duration: Duration) -> Toast {
self.duration = match duration {
Duration::Long => "duration=\"long\"",
Duration::Short => "duration=\"short\"",
}
.to_owned();
self
}
/// Set the icon shown in the upper left of the toast
///
/// The default is determined by your app id.
/// If you are using the powershell workaround, it will be the powershell icon
pub fn icon(mut self, source: &Path, crop: IconCrop, alt_text: &str) -> Toast {
if windows_check::is_newer_than_windows81() {
let crop_type_attr = match crop {
IconCrop::Square => "".to_string(),
IconCrop::Circular => "hint-crop=\"circle\"".to_string(),
};
self.images = format!(
r#"{}<image placement="appLogoOverride" {} src="file:///{}" alt="{}" />"#,
self.images,
crop_type_attr,
escape_str_attribute(&source.display().to_string()),
escape_str_attribute(alt_text)
);
self
} else {
// Win81 rejects the above xml so we fallback to a simpler call
self.image(source, alt_text)
}
}
/// Add/Set a Hero image for the toast.
///
/// This will be above the toast text and the icon.
pub fn hero(mut self, source: &Path, alt_text: &str) -> Toast {
if windows_check::is_newer_than_windows81() {
self.images = format!(
r#"{}<image placement="Hero" src="file:///{}" alt="{}" />"#,
self.images,
escape_str_attribute(&source.display().to_string()),
escape_str_attribute(alt_text)
);
self
} else {
// win81 rejects the above xml so we fallback to a simpler call
self.image(source, alt_text)
}
}
/// Add an image to the toast
///
/// May be done many times.
/// Will appear below text.
pub fn image(mut self, source: &Path, alt_text: &str) -> Toast {
if !windows_check::is_newer_than_windows81() {
// win81 cannot have more than 1 image and shows nothing if there is more than that
self.images = "".to_owned();
}
self.images = format!(
r#"{}<image id="1" src="file:///{}" alt="{}" />"#,
self.images,
escape_str_attribute(&source.display().to_string()),
escape_str_attribute(alt_text)
);
self
}
/// Set the sound for the toast or silence it
///
/// Default is [Sound::IM](enum.Sound.html)
pub fn sound(mut self, src: Option<Sound>) -> Toast {
self.audio = match src {
None => "<audio silent=\"true\" />".to_owned(),
Some(Sound::Default) => "".to_owned(),
Some(Sound::Loop(sound)) => format!(
r#"<audio loop="true" src="ms-winsoundevent:Notification.Looping.{}" />"#,
sound
),
Some(Sound::Single(sound)) => format!(
r#"<audio src="ms-winsoundevent:Notification.Looping.{}" />"#,
sound
),
Some(sound) => format!(r#"<audio src="ms-winsoundevent:Notification.{}" />"#, sound),
};
self
}
/// Display the toast on the screen
pub fn show(&self) -> windows::Result<()> {
//using this to get an instance of XmlDocument
let toast_xml = XmlDocument::new()?;
let template_binding = if windows_check::is_newer_than_windows81() {
"ToastGeneric"
} else
//win8 or win81
{
// Need to do this or an empty placeholder will be shown if no image is set
if self.images == "" {
"ToastText04"
} else {
"Toast | Alarm9,
Alarm10, | random_line_split |
lib.rs | /// * https://docs.microsoft.com/en-us/windows/uwp/controls-and-patterns/tiles-and-notifications-toast-xml-schema
/// * https://docs.microsoft.com/en-us/windows/uwp/controls-and-patterns/tiles-and-notifications-adaptive-interactive-toasts
/// * https://msdn.microsoft.com/library/14a07fce-d631-4bad-ab99-305b703713e6#Sending_toast_notifications_from_desktop_apps
/// for Windows 7 and older support look into Shell_NotifyIcon
/// https://msdn.microsoft.com/en-us/library/windows/desktop/ee330740(v=vs.85).aspx
/// https://softwareengineering.stackexchange.com/questions/222339/using-the-system-tray-notification-area-app-in-windows-7
extern crate windows;
extern crate xml;
#[macro_use]
extern crate strum;
#[allow(dead_code)]
mod bindings {
::windows::include_bindings!();
}
use bindings::{
windows::data::xml::dom::XmlDocument,
windows::ui::notifications::ToastNotification,
windows::ui::notifications::ToastNotificationManager,
windows::HString,
};
use std::fmt;
use std::path::Path;
use xml::escape::escape_str_attribute;
mod windows_check;
pub use windows::Error;
pub struct Toast {
duration: String,
title: String,
line1: String,
line2: String,
images: String,
audio: String,
app_id: String,
}
#[derive(Clone, Copy)]
pub enum | {
/// 7 seconds
Short,
/// 25 seconds
Long,
}
#[derive(Debug, EnumString, Clone, Copy)]
pub enum Sound {
Default,
IM,
Mail,
Reminder,
SMS,
/// Play the loopable sound only once
#[strum(disabled)]
Single(LoopableSound),
/// Loop the loopable sound for the entire duration of the toast
#[strum(disabled)]
Loop(LoopableSound),
}
/// Sounds suitable for Looping
#[allow(dead_code)]
#[derive(Debug, Clone, Copy)]
pub enum LoopableSound {
Alarm,
Alarm2,
Alarm3,
Alarm4,
Alarm5,
Alarm6,
Alarm7,
Alarm8,
Alarm9,
Alarm10,
Call,
Call2,
Call3,
Call4,
Call5,
Call6,
Call7,
Call8,
Call9,
Call10,
}
#[allow(dead_code)]
#[derive(Clone, Copy)]
pub enum IconCrop {
Square,
Circular,
}
#[doc(hidden)]
impl fmt::Display for Sound {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
#[doc(hidden)]
impl fmt::Display for LoopableSound {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
impl Toast {
/// This can be used if you do not have a AppUserModelID.
///
/// However, the toast will erroniously report its origin as powershell.
pub const POWERSHELL_APP_ID: &'static str = "{1AC14E77-02E7-4E5D-B744-2EB1AE5198B7}\
\\WindowsPowerShell\\v1.0\\powershell.exe";
/// Constructor for the toast builder.
///
/// app_id is the running application's [AppUserModelID][1].
///
/// [1]: https://msdn.microsoft.com/en-us/library/windows/desktop/dd378459(v=vs.85).aspx
///
/// If the program you are using this in was not installed, use Toast::POWERSHELL_APP_ID for now
#[allow(dead_code)]
pub fn new(app_id: &str) -> Toast {
Toast {
duration: String::new(),
title: String::new(),
line1: String::new(),
line2: String::new(),
images: String::new(),
audio: String::new(),
app_id: app_id.to_string(),
}
}
/// Sets the title of the toast.
///
/// Will be white.
/// Supports Unicode ✓
pub fn title(mut self, content: &str) -> Toast {
self.title = format!(r#"<text id="1">{}</text>"#, escape_str_attribute(content));
self
}
/// Add/Sets the first line of text below title.
///
/// Will be grey.
/// Supports Unicode ✓
pub fn text1(mut self, content: &str) -> Toast {
self.line1 = format!(r#"<text id="2">{}</text>"#, escape_str_attribute(content));
self
}
/// Add/Sets the second line of text below title.
///
/// Will be grey.
/// Supports Unicode ✓
pub fn text2(mut self, content: &str) -> Toast {
self.line2 = format!(r#"<text id="3">{}</text>"#, escape_str_attribute(content));
self
}
/// Set the length of time to show the toast
pub fn duration(mut self, duration: Duration) -> Toast {
self.duration = match duration {
Duration::Long => "duration=\"long\"",
Duration::Short => "duration=\"short\"",
}
.to_owned();
self
}
/// Set the icon shown in the upper left of the toast
///
/// The default is determined by your app id.
/// If you are using the powershell workaround, it will be the powershell icon
pub fn icon(mut self, source: &Path, crop: IconCrop, alt_text: &str) -> Toast {
if windows_check::is_newer_than_windows81() {
let crop_type_attr = match crop {
IconCrop::Square => "".to_string(),
IconCrop::Circular => "hint-crop=\"circle\"".to_string(),
};
self.images = format!(
r#"{}<image placement="appLogoOverride" {} src="file:///{}" alt="{}" />"#,
self.images,
crop_type_attr,
escape_str_attribute(&source.display().to_string()),
escape_str_attribute(alt_text)
);
self
} else {
// Win81 rejects the above xml so we fallback to a simpler call
self.image(source, alt_text)
}
}
/// Add/Set a Hero image for the toast.
///
/// This will be above the toast text and the icon.
pub fn hero(mut self, source: &Path, alt_text: &str) -> Toast {
if windows_check::is_newer_than_windows81() {
self.images = format!(
r#"{}<image placement="Hero" src="file:///{}" alt="{}" />"#,
self.images,
escape_str_attribute(&source.display().to_string()),
escape_str_attribute(alt_text)
);
self
} else {
// win81 rejects the above xml so we fallback to a simpler call
self.image(source, alt_text)
}
}
/// Add an image to the toast
///
/// May be done many times.
/// Will appear below text.
pub fn image(mut self, source: &Path, alt_text: &str) -> Toast {
if !windows_check::is_newer_than_windows81() {
// win81 cannot have more than 1 image and shows nothing if there is more than that
self.images = "".to_owned();
}
self.images = format!(
r#"{}<image id="1" src="file:///{}" alt="{}" />"#,
self.images,
escape_str_attribute(&source.display().to_string()),
escape_str_attribute(alt_text)
);
self
}
/// Set the sound for the toast or silence it
///
/// Default is [Sound::IM](enum.Sound.html)
pub fn sound(mut self, src: Option<Sound>) -> Toast {
self.audio = match src {
None => "<audio silent=\"true\" />".to_owned(),
Some(Sound::Default) => "".to_owned(),
Some(Sound::Loop(sound)) => format!(
r#"<audio loop="true" src="ms-winsoundevent:Notification.Looping.{}" />"#,
sound
),
Some(Sound::Single(sound)) => format!(
r#"<audio src="ms-winsoundevent:Notification.Looping.{}" />"#,
sound
),
Some(sound) => format!(r#"<audio src="ms-winsoundevent:Notification.{}" />"#, sound),
};
self
}
/// Display the toast on the screen
pub fn show(&self) -> windows::Result<()> {
//using this to get an instance of XmlDocument
let toast_xml = XmlDocument::new()?;
let template_binding = if windows_check::is_newer_than_windows81() {
"ToastGeneric"
} else
//win8 or win81
{
// Need to do this or an empty placeholder will be shown if no image is set
if self.images == "" {
"ToastText04"
} else {
"Toast | Duration | identifier_name |
lib.rs | /uwp/controls-and-patterns/tiles-and-notifications-adaptive-interactive-toasts
/// * https://msdn.microsoft.com/library/14a07fce-d631-4bad-ab99-305b703713e6#Sending_toast_notifications_from_desktop_apps
/// for Windows 7 and older support look into Shell_NotifyIcon
/// https://msdn.microsoft.com/en-us/library/windows/desktop/ee330740(v=vs.85).aspx
/// https://softwareengineering.stackexchange.com/questions/222339/using-the-system-tray-notification-area-app-in-windows-7
extern crate windows;
extern crate xml;
#[macro_use]
extern crate strum;
#[allow(dead_code)]
mod bindings {
::windows::include_bindings!();
}
use bindings::{
windows::data::xml::dom::XmlDocument,
windows::ui::notifications::ToastNotification,
windows::ui::notifications::ToastNotificationManager,
windows::HString,
};
use std::fmt;
use std::path::Path;
use xml::escape::escape_str_attribute;
mod windows_check;
pub use windows::Error;
pub struct Toast {
duration: String,
title: String,
line1: String,
line2: String,
images: String,
audio: String,
app_id: String,
}
#[derive(Clone, Copy)]
pub enum Duration {
/// 7 seconds
Short,
/// 25 seconds
Long,
}
#[derive(Debug, EnumString, Clone, Copy)]
pub enum Sound {
Default,
IM,
Mail,
Reminder,
SMS,
/// Play the loopable sound only once
#[strum(disabled)]
Single(LoopableSound),
/// Loop the loopable sound for the entire duration of the toast
#[strum(disabled)]
Loop(LoopableSound),
}
/// Sounds suitable for Looping
#[allow(dead_code)]
#[derive(Debug, Clone, Copy)]
pub enum LoopableSound {
Alarm,
Alarm2,
Alarm3,
Alarm4,
Alarm5,
Alarm6,
Alarm7,
Alarm8,
Alarm9,
Alarm10,
Call,
Call2,
Call3,
Call4,
Call5,
Call6,
Call7,
Call8,
Call9,
Call10,
}
#[allow(dead_code)]
#[derive(Clone, Copy)]
pub enum IconCrop {
Square,
Circular,
}
#[doc(hidden)]
impl fmt::Display for Sound {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
#[doc(hidden)]
impl fmt::Display for LoopableSound {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
impl Toast {
/// This can be used if you do not have a AppUserModelID.
///
/// However, the toast will erroniously report its origin as powershell.
pub const POWERSHELL_APP_ID: &'static str = "{1AC14E77-02E7-4E5D-B744-2EB1AE5198B7}\
\\WindowsPowerShell\\v1.0\\powershell.exe";
/// Constructor for the toast builder.
///
/// app_id is the running application's [AppUserModelID][1].
///
/// [1]: https://msdn.microsoft.com/en-us/library/windows/desktop/dd378459(v=vs.85).aspx
///
/// If the program you are using this in was not installed, use Toast::POWERSHELL_APP_ID for now
#[allow(dead_code)]
pub fn new(app_id: &str) -> Toast {
Toast {
duration: String::new(),
title: String::new(),
line1: String::new(),
line2: String::new(),
images: String::new(),
audio: String::new(),
app_id: app_id.to_string(),
}
}
/// Sets the title of the toast.
///
/// Will be white.
/// Supports Unicode ✓
pub fn title(mut self, content: &str) -> Toast {
self.title = format!(r#"<text id="1">{}</text>"#, escape_str_attribute(content));
self
}
/// Add/Sets the first line of text below title.
///
/// Will be grey.
/// Supports Unicode ✓
pub fn text1(mut self, content: &str) -> Toast {
self.line1 = format!(r#"<text id="2">{}</text>"#, escape_str_attribute(content));
self
}
/// Add/Sets the second line of text below title.
///
/// Will be grey.
/// Supports Unicode ✓
pub fn text2(mut self, content: &str) -> Toast {
self.line2 = format!(r#"<text id="3">{}</text>"#, escape_str_attribute(content));
self
}
/// Set the length of time to show the toast
pub fn duration(mut self, duration: Duration) -> Toast {
self.duration = match duration {
Duration::Long => "duration=\"long\"",
Duration::Short => "duration=\"short\"",
}
.to_owned();
self
}
/// Set the icon shown in the upper left of the toast
///
/// The default is determined by your app id.
/// If you are using the powershell workaround, it will be the powershell icon
pub fn icon(mut self, source: &Path, crop: IconCrop, alt_text: &str) -> Toast {
if windows_check::is_newer_than_windows81() {
let crop_type_attr = match crop {
IconCrop::Square => "".to_string(),
IconCrop::Circular => "hint-crop=\"circle\"".to_string(),
};
self.images = format!(
r#"{}<image placement="appLogoOverride" {} src="file:///{}" alt="{}" />"#,
self.images,
crop_type_attr,
escape_str_attribute(&source.display().to_string()),
escape_str_attribute(alt_text)
);
self
} else {
// Win81 rejects the above xml so we fallback to a simpler call
self.image(source, alt_text)
}
}
/// Add/Set a Hero image for the toast.
///
/// This will be above the toast text and the icon.
pub fn hero(mut self, source: &Path, alt_text: &str) -> Toast {
if windows_check::is_newer_than_windows81() {
self.images = format!(
r#"{}<image placement="Hero" src="file:///{}" alt="{}" />"#,
self.images,
escape_str_attribute(&source.display().to_string()),
escape_str_attribute(alt_text)
);
self
} else {
// win81 rejects the above xml so we fallback to a simpler call
self.image(source, alt_text)
}
}
/// Add an image to the toast
///
/// May be done many times.
/// Will appear below text.
pub fn image(mut self, source: &Path, alt_text: &str) -> Toast {
if !windows_check::is_newer_than_windows81() {
// win81 cannot have more than 1 image and shows nothing if there is more than that
self.images = "".to_owned();
}
self.images = format!(
r#"{}<image id="1" src="file:///{}" alt="{}" />"#,
self.images,
escape_str_attribute(&source.display().to_string()),
escape_str_attribute(alt_text)
);
self
}
/// Set the sound for the toast or silence it
///
/// Default is [Sound::IM](enum.Sound.html)
pub fn sound(mut self, src: Option<Sound>) -> Toast {
self.audio = match src {
None => "<audio silent=\"true\" />".to_owned(),
Some(Sound::Default) => "".to_owned(),
Some(Sound::Loop(sound)) => format!(
r#"<audio loop="true" src="ms-winsoundevent:Notification.Looping.{}" />"#,
sound
),
Some(Sound::Single(sound)) => format!(
r#"<audio src="ms-winsoundevent:Notification.Looping.{}" />"#,
sound
),
Some(sound) => format!(r#"<audio src="ms-winsoundevent:Notification.{}" />"#, sound),
};
self
}
/// Display the toast on the screen
pub fn show(&self) -> windows::Result<()> {
| //using this to get an instance of XmlDocument
let toast_xml = XmlDocument::new()?;
let template_binding = if windows_check::is_newer_than_windows81() {
"ToastGeneric"
} else
//win8 or win81
{
// Need to do this or an empty placeholder will be shown if no image is set
if self.images == "" {
"ToastText04"
} else {
"ToastImageAndText04"
}
};
toast_xml.load_xml(HString::from(format!(
"<toast {}>
<visual>
<binding template=\"{}\"> | identifier_body |
|
testcase.py | a new temporary HOME directory.
#:
#: If set, a directory will be created at test startup, and will be
#: set as the home directory.
#:
#: Version Added:
#: 3.0
needs_temp_home: bool = False
@classmethod
def setUpClass(cls):
super(TestCase, cls).setUpClass()
cls._cls_old_cwd = os.getcwd()
@classmethod
def tearDownClass(cls):
os.chdir(cls._cls_old_cwd)
super(TestCase, cls).tearDownClass()
def setUp(self):
super(TestCase, self).setUp()
self._old_cwd = os.getcwd()
self.old_home = self.get_user_home()
if self.needs_temp_home:
home_dir = make_tempdir()
self.set_user_home(home_dir)
# Since the tests need a safer HOME setup, it stands to reason
# that we should also not operate within the tree, as it could
# result in RBTools's .reviewboardrc being picked up. We'll
# instead default to running within the new home directory.
os.chdir(home_dir)
os.environ[str('RBTOOLS_EDITOR')] = str(self.default_text_editor)
def tearDown(self):
super(TestCase, self).tearDown()
os.chdir(self._old_cwd)
cleanup_tempfiles()
if self.old_home:
self.set_user_home(self.old_home)
def shortDescription(self):
"""Returns the description of the current test.
This changes the default behavior to replace all newlines with spaces,
allowing a test description to span lines. It should still be kept
short, though.
Returns:
unicode:
The descriptive text for the current unit test.
"""
doc = self._testMethodDoc
if doc is not None:
doc = doc.split('\n\n', 1)[0]
doc = self.ws_re.sub(' ', doc).strip()
return doc
def get_user_home(self):
"""Return the user's current home directory.
Version Added:
3.0
Returns:
unicode:
The current home directory.
"""
return os.environ['HOME']
def set_user_home(self, path):
"""Set the user's current home directory.
This will be unset when the unit test has finished.
Version Added:
3.0
Args:
path (unicode):
The new home directory.
"""
os.environ['HOME'] = path
def chdir_tmp(self):
"""Create a temporary directory and set it as the working directory.
The directory will be deleted after the test has finished.
Version Added:
3.0
Returns:
unicode:
The path to the temp directory.
"""
dirname = make_tempdir()
os.chdir(dirname)
return dirname
@contextmanager
def env(
self,
env: Dict[str, Optional[str]],
) -> Iterator[None]:
"""Run code with custom environment variables temporarily set.
This will set environment variables to the provided values (or
erase them from the environment if set to ``None``) before executing
the code in the context.
Once executed, the old environment will be restored.
Version Added:
5.0
Args:
env (dict):
The environment variables to set/remove.
Context:
Code will execute with the new environment set.
"""
old_env: Dict[str, Optional[str]] = {}
for key, value in env.items():
old_env[key] = os.environ.get(key)
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = value
try:
yield
finally:
for key, value in old_env.items():
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = value
def precreate_tempfiles(self, count):
"""Pre-create a specific number of temporary files.
This will call :py:func:`~rbtools.utils.filesystem.make_tempfile`
the specified number of times, returning the list of generated temp
file paths, and will then spy that function to return those temp
files.
Once each pre-created temp file is used up, any further calls to
:py:func:`~rbtools.utils.filesystem.make_tempfile` will result in
an error, failing the test.
This is useful in unit tests that need to script a series of
expected calls using :py:mod:`kgb` (such as through
:py:class:`kgb.ops.SpyOpMatchInOrder`) that need to know the names
of temporary filenames up-front.
Unit test suites that use this must mix in :py:class:`kgb.SpyAgency`.
Version Added:
3.0
Args: | Raises:
AssertionError:
The test suite class did not mix in :py:class:`kgb.SpyAgency`.
"""
spy_for = getattr(self, 'spy_for', None)
assert spy_for, (
'%r must mix in kgb.SpyAgency in order to call this method.'
% self.__class__)
tmpfiles: List[str] = [
make_tempfile()
for i in range(count)
]
tmpfiles_iter = iter(tmpfiles)
@spy_for(make_tempfile)
def _return_next_tempfile(*args, **kwargs) -> str:
try:
tmpfile = next(tmpfiles_iter)
except StopIteration:
self.fail('Too many calls to make_tempfile(). Expected %s, '
'got %s.'
% (count, count + 1))
content = kwargs.get('content')
if content:
with open(tmpfile, 'wb') as fp:
fp.write(content)
return tmpfile
return tmpfiles
def precreate_tempdirs(self, count):
"""Pre-create a specific number of temporary directories.
This will call :py:func:`~rbtools.utils.filesystem.make_tempdir`
the specified number of times, returning the list of generated temp
paths, and will then spy that function to return those temp paths.
Once each pre-created temp path is used up, any further calls to
:py:func:`~rbtools.utils.filesystem.make_tempdir` will result in
an error, failing the test.
This is useful in unit tests that need to script a series of
expected calls using :py:mod:`kgb` (such as through
:py:class:`kgb.ops.SpyOpMatchInOrder`) that need to know the names
of temporary filenames up-front.
Unit test suites that use this must mix in :py:class:`kgb.SpyAgency`.
Version Added:
3.0
Args:
count (int):
The number of temporary directories to pre-create.
Raises:
AssertionError:
The test suite class did not mix in :py:class:`kgb.SpyAgency`.
"""
assert hasattr(self, 'spy_on'), (
'%r must mix in kgb.SpyAgency in order to call this method.'
% self.__class__)
tmpdirs = [
make_tempdir()
for i in range(count)
]
self.spy_on(make_tempdir, op=kgb.SpyOpReturnInOrder(tmpdirs))
return tmpdirs
def assertDiffEqual(self, diff, expected_diff):
"""Assert that two diffs are equal.
Args:
diff (bytes):
The generated diff.
expected_diff (bytes):
The expected diff.
Raises:
AssertionError:
The diffs aren't equal or of the right type.
"""
self.assertIsInstance(diff, bytes)
self.assertIsInstance(expected_diff, bytes)
self.assertEqual(diff.splitlines(), expected_diff.splitlines())
def assertRaisesMessage(self, expected_exception, expected_message):
"""Assert that a call raises an exception with the given message.
Args:
expected_exception (type):
The type of exception that's expected to be raised.
expected_message (unicode):
The expected exception message.
Raises:
AssertionError:
The assertion failure, if the exception and message isn't
raised.
"""
# This explicitly uses the old name, as opposed to assertRaisesRegex,
# because we still need Python 2.7 support. Once we move to Python 3,
# we can fix this.
return self.assertRaisesRegexp(expected_exception,
re.escape(expected_message))
def create_rbclient(self):
"""Return a RBClient for testing.
This will set up a :py:class:`~rbtools.testing.api.transport.
URLMapTransport`. It's recommended that the caller access it via
:py:meth:`get_rbclient_transport`.
Version Added:
3.1
Args:
transport (rbtools.api.transport.Transport, optional):
An explicit transport instance to use
Returns:
rbtools.api.client.RBClient:
The client for testing purposes.
"""
return RBClient(url=self.TEST_SERVER_URL,
transport_cls=URLMapTransport)
def get_rbclient_transport(self, client):
"""Return the transport associated with a RBClient.
This allows tests to avoid reaching into
:py:class:`~rbtools.api.client.RBClient` internals in order to get
the transport.
Version Added:
3.1
Args:
client (rbtools.api.client.RBClient):
The client instance.
Returns:
| count (int):
The number of temporary filenames to pre-create.
| random_line_split |
testcase.py | a new temporary HOME directory.
#:
#: If set, a directory will be created at test startup, and will be
#: set as the home directory.
#:
#: Version Added:
#: 3.0
needs_temp_home: bool = False
@classmethod
def setUpClass(cls):
super(TestCase, cls).setUpClass()
cls._cls_old_cwd = os.getcwd()
@classmethod
def tearDownClass(cls):
os.chdir(cls._cls_old_cwd)
super(TestCase, cls).tearDownClass()
def setUp(self):
super(TestCase, self).setUp()
self._old_cwd = os.getcwd()
self.old_home = self.get_user_home()
if self.needs_temp_home:
home_dir = make_tempdir()
self.set_user_home(home_dir)
# Since the tests need a safer HOME setup, it stands to reason
# that we should also not operate within the tree, as it could
# result in RBTools's .reviewboardrc being picked up. We'll
# instead default to running within the new home directory.
os.chdir(home_dir)
os.environ[str('RBTOOLS_EDITOR')] = str(self.default_text_editor)
def tearDown(self):
super(TestCase, self).tearDown()
os.chdir(self._old_cwd)
cleanup_tempfiles()
if self.old_home:
self.set_user_home(self.old_home)
def shortDescription(self):
"""Returns the description of the current test.
This changes the default behavior to replace all newlines with spaces,
allowing a test description to span lines. It should still be kept
short, though.
Returns:
unicode:
The descriptive text for the current unit test.
"""
doc = self._testMethodDoc
if doc is not None:
doc = doc.split('\n\n', 1)[0]
doc = self.ws_re.sub(' ', doc).strip()
return doc
def get_user_home(self):
"""Return the user's current home directory.
Version Added:
3.0
Returns:
unicode:
The current home directory.
"""
return os.environ['HOME']
def set_user_home(self, path):
"""Set the user's current home directory.
This will be unset when the unit test has finished.
Version Added:
3.0
Args:
path (unicode):
The new home directory.
"""
os.environ['HOME'] = path
def chdir_tmp(self):
"""Create a temporary directory and set it as the working directory.
The directory will be deleted after the test has finished.
Version Added:
3.0
Returns:
unicode:
The path to the temp directory.
"""
dirname = make_tempdir()
os.chdir(dirname)
return dirname
@contextmanager
def env(
self,
env: Dict[str, Optional[str]],
) -> Iterator[None]:
"""Run code with custom environment variables temporarily set.
This will set environment variables to the provided values (or
erase them from the environment if set to ``None``) before executing
the code in the context.
Once executed, the old environment will be restored.
Version Added:
5.0
Args:
env (dict):
The environment variables to set/remove.
Context:
Code will execute with the new environment set.
"""
old_env: Dict[str, Optional[str]] = {}
for key, value in env.items():
old_env[key] = os.environ.get(key)
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = value
try:
yield
finally:
for key, value in old_env.items():
if value is None:
|
else:
os.environ[key] = value
def precreate_tempfiles(self, count):
"""Pre-create a specific number of temporary files.
This will call :py:func:`~rbtools.utils.filesystem.make_tempfile`
the specified number of times, returning the list of generated temp
file paths, and will then spy that function to return those temp
files.
Once each pre-created temp file is used up, any further calls to
:py:func:`~rbtools.utils.filesystem.make_tempfile` will result in
an error, failing the test.
This is useful in unit tests that need to script a series of
expected calls using :py:mod:`kgb` (such as through
:py:class:`kgb.ops.SpyOpMatchInOrder`) that need to know the names
of temporary filenames up-front.
Unit test suites that use this must mix in :py:class:`kgb.SpyAgency`.
Version Added:
3.0
Args:
count (int):
The number of temporary filenames to pre-create.
Raises:
AssertionError:
The test suite class did not mix in :py:class:`kgb.SpyAgency`.
"""
spy_for = getattr(self, 'spy_for', None)
assert spy_for, (
'%r must mix in kgb.SpyAgency in order to call this method.'
% self.__class__)
tmpfiles: List[str] = [
make_tempfile()
for i in range(count)
]
tmpfiles_iter = iter(tmpfiles)
@spy_for(make_tempfile)
def _return_next_tempfile(*args, **kwargs) -> str:
try:
tmpfile = next(tmpfiles_iter)
except StopIteration:
self.fail('Too many calls to make_tempfile(). Expected %s, '
'got %s.'
% (count, count + 1))
content = kwargs.get('content')
if content:
with open(tmpfile, 'wb') as fp:
fp.write(content)
return tmpfile
return tmpfiles
def precreate_tempdirs(self, count):
"""Pre-create a specific number of temporary directories.
This will call :py:func:`~rbtools.utils.filesystem.make_tempdir`
the specified number of times, returning the list of generated temp
paths, and will then spy that function to return those temp paths.
Once each pre-created temp path is used up, any further calls to
:py:func:`~rbtools.utils.filesystem.make_tempdir` will result in
an error, failing the test.
This is useful in unit tests that need to script a series of
expected calls using :py:mod:`kgb` (such as through
:py:class:`kgb.ops.SpyOpMatchInOrder`) that need to know the names
of temporary filenames up-front.
Unit test suites that use this must mix in :py:class:`kgb.SpyAgency`.
Version Added:
3.0
Args:
count (int):
The number of temporary directories to pre-create.
Raises:
AssertionError:
The test suite class did not mix in :py:class:`kgb.SpyAgency`.
"""
assert hasattr(self, 'spy_on'), (
'%r must mix in kgb.SpyAgency in order to call this method.'
% self.__class__)
tmpdirs = [
make_tempdir()
for i in range(count)
]
self.spy_on(make_tempdir, op=kgb.SpyOpReturnInOrder(tmpdirs))
return tmpdirs
def assertDiffEqual(self, diff, expected_diff):
"""Assert that two diffs are equal.
Args:
diff (bytes):
The generated diff.
expected_diff (bytes):
The expected diff.
Raises:
AssertionError:
The diffs aren't equal or of the right type.
"""
self.assertIsInstance(diff, bytes)
self.assertIsInstance(expected_diff, bytes)
self.assertEqual(diff.splitlines(), expected_diff.splitlines())
def assertRaisesMessage(self, expected_exception, expected_message):
"""Assert that a call raises an exception with the given message.
Args:
expected_exception (type):
The type of exception that's expected to be raised.
expected_message (unicode):
The expected exception message.
Raises:
AssertionError:
The assertion failure, if the exception and message isn't
raised.
"""
# This explicitly uses the old name, as opposed to assertRaisesRegex,
# because we still need Python 2.7 support. Once we move to Python 3,
# we can fix this.
return self.assertRaisesRegexp(expected_exception,
re.escape(expected_message))
def create_rbclient(self):
"""Return a RBClient for testing.
This will set up a :py:class:`~rbtools.testing.api.transport.
URLMapTransport`. It's recommended that the caller access it via
:py:meth:`get_rbclient_transport`.
Version Added:
3.1
Args:
transport (rbtools.api.transport.Transport, optional):
An explicit transport instance to use
Returns:
rbtools.api.client.RBClient:
The client for testing purposes.
"""
return RBClient(url=self.TEST_SERVER_URL,
transport_cls=URLMapTransport)
def get_rbclient_transport(self, client):
"""Return the transport associated with a RBClient.
This allows tests to avoid reaching into
:py:class:`~rbtools.api.client.RBClient` internals in order to get
the transport.
Version Added:
3.1
Args:
client (rbtools.api.client.RBClient):
The client instance.
Returns:
| os.environ.pop(key, None) | conditional_block |
testcase.py | a new temporary HOME directory.
#:
#: If set, a directory will be created at test startup, and will be
#: set as the home directory.
#:
#: Version Added:
#: 3.0
needs_temp_home: bool = False
@classmethod
def setUpClass(cls):
super(TestCase, cls).setUpClass()
cls._cls_old_cwd = os.getcwd()
@classmethod
def tearDownClass(cls):
os.chdir(cls._cls_old_cwd)
super(TestCase, cls).tearDownClass()
def setUp(self):
super(TestCase, self).setUp()
self._old_cwd = os.getcwd()
self.old_home = self.get_user_home()
if self.needs_temp_home:
home_dir = make_tempdir()
self.set_user_home(home_dir)
# Since the tests need a safer HOME setup, it stands to reason
# that we should also not operate within the tree, as it could
# result in RBTools's .reviewboardrc being picked up. We'll
# instead default to running within the new home directory.
os.chdir(home_dir)
os.environ[str('RBTOOLS_EDITOR')] = str(self.default_text_editor)
def tearDown(self):
super(TestCase, self).tearDown()
os.chdir(self._old_cwd)
cleanup_tempfiles()
if self.old_home:
self.set_user_home(self.old_home)
def shortDescription(self):
"""Returns the description of the current test.
This changes the default behavior to replace all newlines with spaces,
allowing a test description to span lines. It should still be kept
short, though.
Returns:
unicode:
The descriptive text for the current unit test.
"""
doc = self._testMethodDoc
if doc is not None:
doc = doc.split('\n\n', 1)[0]
doc = self.ws_re.sub(' ', doc).strip()
return doc
def get_user_home(self):
"""Return the user's current home directory.
Version Added:
3.0
Returns:
unicode:
The current home directory.
"""
return os.environ['HOME']
def set_user_home(self, path):
"""Set the user's current home directory.
This will be unset when the unit test has finished.
Version Added:
3.0
Args:
path (unicode):
The new home directory.
"""
os.environ['HOME'] = path
def chdir_tmp(self):
"""Create a temporary directory and set it as the working directory.
The directory will be deleted after the test has finished.
Version Added:
3.0
Returns:
unicode:
The path to the temp directory.
"""
dirname = make_tempdir()
os.chdir(dirname)
return dirname
@contextmanager
def env(
self,
env: Dict[str, Optional[str]],
) -> Iterator[None]:
"""Run code with custom environment variables temporarily set.
This will set environment variables to the provided values (or
erase them from the environment if set to ``None``) before executing
the code in the context.
Once executed, the old environment will be restored.
Version Added:
5.0
Args:
env (dict):
The environment variables to set/remove.
Context:
Code will execute with the new environment set.
"""
old_env: Dict[str, Optional[str]] = {}
for key, value in env.items():
old_env[key] = os.environ.get(key)
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = value
try:
yield
finally:
for key, value in old_env.items():
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = value
def precreate_tempfiles(self, count):
|
Args:
count (int):
The number of temporary filenames to pre-create.
Raises:
AssertionError:
The test suite class did not mix in :py:class:`kgb.SpyAgency`.
"""
spy_for = getattr(self, 'spy_for', None)
assert spy_for, (
'%r must mix in kgb.SpyAgency in order to call this method.'
% self.__class__)
tmpfiles: List[str] = [
make_tempfile()
for i in range(count)
]
tmpfiles_iter = iter(tmpfiles)
@spy_for(make_tempfile)
def _return_next_tempfile(*args, **kwargs) -> str:
try:
tmpfile = next(tmpfiles_iter)
except StopIteration:
self.fail('Too many calls to make_tempfile(). Expected %s, '
'got %s.'
% (count, count + 1))
content = kwargs.get('content')
if content:
with open(tmpfile, 'wb') as fp:
fp.write(content)
return tmpfile
return tmpfiles
def precreate_tempdirs(self, count):
"""Pre-create a specific number of temporary directories.
This will call :py:func:`~rbtools.utils.filesystem.make_tempdir`
the specified number of times, returning the list of generated temp
paths, and will then spy that function to return those temp paths.
Once each pre-created temp path is used up, any further calls to
:py:func:`~rbtools.utils.filesystem.make_tempdir` will result in
an error, failing the test.
This is useful in unit tests that need to script a series of
expected calls using :py:mod:`kgb` (such as through
:py:class:`kgb.ops.SpyOpMatchInOrder`) that need to know the names
of temporary filenames up-front.
Unit test suites that use this must mix in :py:class:`kgb.SpyAgency`.
Version Added:
3.0
Args:
count (int):
The number of temporary directories to pre-create.
Raises:
AssertionError:
The test suite class did not mix in :py:class:`kgb.SpyAgency`.
"""
assert hasattr(self, 'spy_on'), (
'%r must mix in kgb.SpyAgency in order to call this method.'
% self.__class__)
tmpdirs = [
make_tempdir()
for i in range(count)
]
self.spy_on(make_tempdir, op=kgb.SpyOpReturnInOrder(tmpdirs))
return tmpdirs
def assertDiffEqual(self, diff, expected_diff):
"""Assert that two diffs are equal.
Args:
diff (bytes):
The generated diff.
expected_diff (bytes):
The expected diff.
Raises:
AssertionError:
The diffs aren't equal or of the right type.
"""
self.assertIsInstance(diff, bytes)
self.assertIsInstance(expected_diff, bytes)
self.assertEqual(diff.splitlines(), expected_diff.splitlines())
def assertRaisesMessage(self, expected_exception, expected_message):
"""Assert that a call raises an exception with the given message.
Args:
expected_exception (type):
The type of exception that's expected to be raised.
expected_message (unicode):
The expected exception message.
Raises:
AssertionError:
The assertion failure, if the exception and message isn't
raised.
"""
# This explicitly uses the old name, as opposed to assertRaisesRegex,
# because we still need Python 2.7 support. Once we move to Python 3,
# we can fix this.
return self.assertRaisesRegexp(expected_exception,
re.escape(expected_message))
def create_rbclient(self):
"""Return a RBClient for testing.
This will set up a :py:class:`~rbtools.testing.api.transport.
URLMapTransport`. It's recommended that the caller access it via
:py:meth:`get_rbclient_transport`.
Version Added:
3.1
Args:
transport (rbtools.api.transport.Transport, optional):
An explicit transport instance to use
Returns:
rbtools.api.client.RBClient:
The client for testing purposes.
"""
return RBClient(url=self.TEST_SERVER_URL,
transport_cls=URLMapTransport)
def get_rbclient_transport(self, client):
"""Return the transport associated with a RBClient.
This allows tests to avoid reaching into
:py:class:`~rbtools.api.client.RBClient` internals in order to get
the transport.
Version Added:
3.1
Args:
client (rbtools.api.client.RBClient):
The client instance.
Returns:
| """Pre-create a specific number of temporary files.
This will call :py:func:`~rbtools.utils.filesystem.make_tempfile`
the specified number of times, returning the list of generated temp
file paths, and will then spy that function to return those temp
files.
Once each pre-created temp file is used up, any further calls to
:py:func:`~rbtools.utils.filesystem.make_tempfile` will result in
an error, failing the test.
This is useful in unit tests that need to script a series of
expected calls using :py:mod:`kgb` (such as through
:py:class:`kgb.ops.SpyOpMatchInOrder`) that need to know the names
of temporary filenames up-front.
Unit test suites that use this must mix in :py:class:`kgb.SpyAgency`.
Version Added:
3.0 | identifier_body |
testcase.py | a new temporary HOME directory.
#:
#: If set, a directory will be created at test startup, and will be
#: set as the home directory.
#:
#: Version Added:
#: 3.0
needs_temp_home: bool = False
@classmethod
def setUpClass(cls):
super(TestCase, cls).setUpClass()
cls._cls_old_cwd = os.getcwd()
@classmethod
def tearDownClass(cls):
os.chdir(cls._cls_old_cwd)
super(TestCase, cls).tearDownClass()
def setUp(self):
super(TestCase, self).setUp()
self._old_cwd = os.getcwd()
self.old_home = self.get_user_home()
if self.needs_temp_home:
home_dir = make_tempdir()
self.set_user_home(home_dir)
# Since the tests need a safer HOME setup, it stands to reason
# that we should also not operate within the tree, as it could
# result in RBTools's .reviewboardrc being picked up. We'll
# instead default to running within the new home directory.
os.chdir(home_dir)
os.environ[str('RBTOOLS_EDITOR')] = str(self.default_text_editor)
def tearDown(self):
super(TestCase, self).tearDown()
os.chdir(self._old_cwd)
cleanup_tempfiles()
if self.old_home:
self.set_user_home(self.old_home)
def shortDescription(self):
"""Returns the description of the current test.
This changes the default behavior to replace all newlines with spaces,
allowing a test description to span lines. It should still be kept
short, though.
Returns:
unicode:
The descriptive text for the current unit test.
"""
doc = self._testMethodDoc
if doc is not None:
doc = doc.split('\n\n', 1)[0]
doc = self.ws_re.sub(' ', doc).strip()
return doc
def get_user_home(self):
"""Return the user's current home directory.
Version Added:
3.0
Returns:
unicode:
The current home directory.
"""
return os.environ['HOME']
def set_user_home(self, path):
"""Set the user's current home directory.
This will be unset when the unit test has finished.
Version Added:
3.0
Args:
path (unicode):
The new home directory.
"""
os.environ['HOME'] = path
def chdir_tmp(self):
"""Create a temporary directory and set it as the working directory.
The directory will be deleted after the test has finished.
Version Added:
3.0
Returns:
unicode:
The path to the temp directory.
"""
dirname = make_tempdir()
os.chdir(dirname)
return dirname
@contextmanager
def env(
self,
env: Dict[str, Optional[str]],
) -> Iterator[None]:
"""Run code with custom environment variables temporarily set.
This will set environment variables to the provided values (or
erase them from the environment if set to ``None``) before executing
the code in the context.
Once executed, the old environment will be restored.
Version Added:
5.0
Args:
env (dict):
The environment variables to set/remove.
Context:
Code will execute with the new environment set.
"""
old_env: Dict[str, Optional[str]] = {}
for key, value in env.items():
old_env[key] = os.environ.get(key)
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = value
try:
yield
finally:
for key, value in old_env.items():
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = value
def | (self, count):
"""Pre-create a specific number of temporary files.
This will call :py:func:`~rbtools.utils.filesystem.make_tempfile`
the specified number of times, returning the list of generated temp
file paths, and will then spy that function to return those temp
files.
Once each pre-created temp file is used up, any further calls to
:py:func:`~rbtools.utils.filesystem.make_tempfile` will result in
an error, failing the test.
This is useful in unit tests that need to script a series of
expected calls using :py:mod:`kgb` (such as through
:py:class:`kgb.ops.SpyOpMatchInOrder`) that need to know the names
of temporary filenames up-front.
Unit test suites that use this must mix in :py:class:`kgb.SpyAgency`.
Version Added:
3.0
Args:
count (int):
The number of temporary filenames to pre-create.
Raises:
AssertionError:
The test suite class did not mix in :py:class:`kgb.SpyAgency`.
"""
spy_for = getattr(self, 'spy_for', None)
assert spy_for, (
'%r must mix in kgb.SpyAgency in order to call this method.'
% self.__class__)
tmpfiles: List[str] = [
make_tempfile()
for i in range(count)
]
tmpfiles_iter = iter(tmpfiles)
@spy_for(make_tempfile)
def _return_next_tempfile(*args, **kwargs) -> str:
try:
tmpfile = next(tmpfiles_iter)
except StopIteration:
self.fail('Too many calls to make_tempfile(). Expected %s, '
'got %s.'
% (count, count + 1))
content = kwargs.get('content')
if content:
with open(tmpfile, 'wb') as fp:
fp.write(content)
return tmpfile
return tmpfiles
def precreate_tempdirs(self, count):
"""Pre-create a specific number of temporary directories.
This will call :py:func:`~rbtools.utils.filesystem.make_tempdir`
the specified number of times, returning the list of generated temp
paths, and will then spy that function to return those temp paths.
Once each pre-created temp path is used up, any further calls to
:py:func:`~rbtools.utils.filesystem.make_tempdir` will result in
an error, failing the test.
This is useful in unit tests that need to script a series of
expected calls using :py:mod:`kgb` (such as through
:py:class:`kgb.ops.SpyOpMatchInOrder`) that need to know the names
of temporary filenames up-front.
Unit test suites that use this must mix in :py:class:`kgb.SpyAgency`.
Version Added:
3.0
Args:
count (int):
The number of temporary directories to pre-create.
Raises:
AssertionError:
The test suite class did not mix in :py:class:`kgb.SpyAgency`.
"""
assert hasattr(self, 'spy_on'), (
'%r must mix in kgb.SpyAgency in order to call this method.'
% self.__class__)
tmpdirs = [
make_tempdir()
for i in range(count)
]
self.spy_on(make_tempdir, op=kgb.SpyOpReturnInOrder(tmpdirs))
return tmpdirs
def assertDiffEqual(self, diff, expected_diff):
"""Assert that two diffs are equal.
Args:
diff (bytes):
The generated diff.
expected_diff (bytes):
The expected diff.
Raises:
AssertionError:
The diffs aren't equal or of the right type.
"""
self.assertIsInstance(diff, bytes)
self.assertIsInstance(expected_diff, bytes)
self.assertEqual(diff.splitlines(), expected_diff.splitlines())
def assertRaisesMessage(self, expected_exception, expected_message):
"""Assert that a call raises an exception with the given message.
Args:
expected_exception (type):
The type of exception that's expected to be raised.
expected_message (unicode):
The expected exception message.
Raises:
AssertionError:
The assertion failure, if the exception and message isn't
raised.
"""
# This explicitly uses the old name, as opposed to assertRaisesRegex,
# because we still need Python 2.7 support. Once we move to Python 3,
# we can fix this.
return self.assertRaisesRegexp(expected_exception,
re.escape(expected_message))
def create_rbclient(self):
"""Return a RBClient for testing.
This will set up a :py:class:`~rbtools.testing.api.transport.
URLMapTransport`. It's recommended that the caller access it via
:py:meth:`get_rbclient_transport`.
Version Added:
3.1
Args:
transport (rbtools.api.transport.Transport, optional):
An explicit transport instance to use
Returns:
rbtools.api.client.RBClient:
The client for testing purposes.
"""
return RBClient(url=self.TEST_SERVER_URL,
transport_cls=URLMapTransport)
def get_rbclient_transport(self, client):
"""Return the transport associated with a RBClient.
This allows tests to avoid reaching into
:py:class:`~rbtools.api.client.RBClient` internals in order to get
the transport.
Version Added:
3.1
Args:
client (rbtools.api.client.RBClient):
The client instance.
Returns:
| precreate_tempfiles | identifier_name |
data_loader.py | random.randint(0, 255), random.randint(
0, 255), random.randint(0, 255)) for _ in range(5000)]
nprandomseed = 2019
class DataLoaderError(Exception):
pass
def get_pairs_from_paths(images_path, segs_path, ignore_non_matching=False):
""" Find all the images from the images_path directory and
the segmentation images from the segs_path directory
while checking integrity of data """
ACCEPTABLE_IMAGE_FORMATS = [".jpg", ".jpeg", ".png", ".bmp"]
ACCEPTABLE_SEGMENTATION_FORMATS = [".png", ".bmp"]
image_files = []
segmentation_files = {}
for dir_entry in sorted(os.listdir(images_path)):
if os.path.isfile(os.path.join(images_path, dir_entry)) and \
os.path.splitext(dir_entry)[1] in ACCEPTABLE_IMAGE_FORMATS:
file_name, file_extension = os.path.splitext(dir_entry)
image_files.append((file_name, file_extension,
os.path.join(images_path, dir_entry)))
for dir_entry in sorted(os.listdir(segs_path)):
if os.path.isfile(os.path.join(segs_path, dir_entry)) and \
os.path.splitext(dir_entry)[1] in ACCEPTABLE_SEGMENTATION_FORMATS:
file_name, file_extension = os.path.splitext(dir_entry)
full_dir_entry = os.path.join(segs_path, dir_entry)
if file_name in segmentation_files:
raise DataLoaderError("Segmentation file with filename {0}"
" already exists and is ambiguous to"
" resolve with path {1}."
" Please remove or rename the latter."
.format(file_name, full_dir_entry))
segmentation_files[file_name] = (file_extension, full_dir_entry)
return_value = []
# Match the images and segmentations
for image_file, _, image_full_path in image_files:
if image_file in segmentation_files:
return_value.append((image_full_path,
segmentation_files[image_file][1]))
elif ignore_non_matching:
continue
else:
# Error out
raise DataLoaderError("No corresponding segmentation "
"found for image {0}."
.format(image_full_path))
return return_value
def get_image_array(image_input,
width, height,
imgNorm="sub_mean", ordering='channels_first'):
""" Load image array from input """
if type(image_input) is np.ndarray:
# It is already an array, use it as it is
img = image_input
elif isinstance(image_input, six.string_types):
if not os.path.isfile(image_input):
raise DataLoaderError("get_image_array: path {0} doesn't exist"
.format(image_input))
img = cv2.imread(image_input, 1)
else:
raise DataLoaderError("get_image_array: Can't process input type {0}"
.format(str(type(image_input))))
if imgNorm == "sub_and_divide":
img = np.float32(cv2.resize(img, (width, height))) / 127.5 - 1
elif imgNorm == "sub_mean":
img = cv2.resize(img, (width, height))
img = img.astype(np.float32)
img[:, :, 0] -= 103.939
img[:, :, 1] -= 116.779
img[:, :, 2] -= 123.68
img = img[:, :, ::-1]
elif imgNorm == "divide":
img = cv2.resize(img, (width, height))
img = img.astype(np.float32)
img = img/255.0
if ordering == 'channels_first':
img = np.rollaxis(img, 2, 0)
return img
def get_segmentation_array(image_input, nClasses,
width, height, no_reshape=False, loss_type=0):
""" Load segmentation array from input """
seg_labels = np.zeros((height, width, nClasses))
if type(image_input) is np.ndarray:
# It is already an array, use it as it is
img = image_input
elif isinstance(image_input, six.string_types):
if not os.path.isfile(image_input):
raise DataLoaderError("get_segmentation_array: "
"path {0} doesn't exist".format(image_input))
img = cv2.imread(image_input, 1)
else:
raise DataLoaderError("get_segmentation_array: "
"Can't process input type {0}"
.format(str(type(image_input))))
img = cv2.resize(img, (width, height), interpolation=cv2.INTER_NEAREST)
img = img[:, :, 0]
for c in range(nClasses):
seg_labels[:, :, c] = (img == c).astype(int)
if not no_reshape:
if (loss_type==1 or loss_type==2):
seg_labels = np.reshape(seg_labels, (width*height, nClasses))
return seg_labels
def verify_segmentation_dataset(images_path, segs_path,
n_classes, deterministic=False, show_all_errors=False):
try:
img_seg_pairs = get_pairs_from_paths(images_path, segs_path)
if not len(img_seg_pairs):
|
return_value = True
for im_fn, seg_fn in tqdm(img_seg_pairs):
img = cv2.imread(im_fn)
seg = cv2.imread(seg_fn)
# Check dimensions match
if not img.shape == seg.shape:
return_value = False
print("The size of image {0} and its segmentation {1} "
"doesn't match (possibly the files are corrupt)."
.format(im_fn, seg_fn))
if not show_all_errors:
break
else:
max_pixel_value = np.max(seg[:, :, 0])
if max_pixel_value >= n_classes:
return_value = False
print("The pixel values of the segmentation image {0} "
"violating range [0, {1}]. "
"Found maximum pixel value {2}"
.format(seg_fn, str(n_classes - 1), max_pixel_value))
if not show_all_errors:
break
if return_value:
print("Dataset verified! ")
else:
print("Dataset not verified!")
return return_value
except DataLoaderError as e:
print("Found error during data loading\n{0}".format(str(e)))
return False
def image_segmentation_generator(images_path, segs_path, batch_size,
n_classes, input_height, input_width,
output_height, output_width, deterministic=False,
do_augment=False,
augmentation_name="aug_all",
num_shards=1,
shard_id=0,
loss_type=0):
img_seg_pairs = get_pairs_from_paths(images_path, segs_path)
if num_shards > 1:
np.random.seed(nprandomseed)
if not deterministic:
random.shuffle(img_seg_pairs)
if num_shards > 1:
process_subset_size = len(img_seg_pairs) // num_shards
ids_from = process_subset_size*shard_id
ids_to = ids_from + process_subset_size
img_seg_pairs_ = img_seg_pairs[ids_from : ids_to]
# make sure all samples are used
img_seg_pairs_ += img_seg_pairs[process_subset_size*num_shards:]
img_seg_pairs = img_seg_pairs_
#print(f'Image Generator : [ {shard_id} ] , {len(img_seg_pairs)} - {img_seg_pairs[:10]}')
print(f'Ids from to : [{shard_id}], {ids_from} to {ids_to}')
zipped = itertools.cycle(img_seg_pairs)
while True:
X = []
Y = []
for _ in range(batch_size):
im, seg = next(zipped)
im = cv2.imread(im, 1)
seg = cv2.imread(seg, 1)
if do_augment:
im, seg[:, :, 0] = augment_seg(im, seg[:, :, 0],
augmentation_name)
X.append(get_image_array(im, input_width,
input_height, ordering=IMAGE_ORDERING))
Y.append(get_segmentation_array(
seg, n_classes, output_width, output_height, loss_type=loss_type))
if not (loss_type==1 or loss_type==2):
Y = np.reshape(Y, [batch_size, output_height, output_width, n_classes])
yield np.array(X), np.array(Y)
def create_segmentation_list(generator, iterations):
X=[]
Y=[]
print("Genering list: ", iterations)
for itr in tqdm(range(iterations)):
img,seg = next(generator)
X.append(img)
Y.append(seg)
X_concat = np.concatenate(X, axis=0)
Y_concat = np.concatenate(Y, axis=0)
return X_concat, Y_concat
def cached_image_generator(generator, num_shards, shard_id, batch_size, total_img_num, deterministic=False):
shard_img = total_img_num//num_shards + total_img_num%num_shards if shard_id == num_shards-1 else total_img_num//num_shards
padding = batch_size - shard_img%batch_size if shard_img%batch_size != 0 else 0
X, y = create_segment | print("Couldn't load any data from images_path: "
"{0} and segmentations path: {1}"
.format(images_path, segs_path))
return False | conditional_block |
data_loader.py | 5000)]
nprandomseed = 2019
class DataLoaderError(Exception):
pass
def get_pairs_from_paths(images_path, segs_path, ignore_non_matching=False):
""" Find all the images from the images_path directory and
the segmentation images from the segs_path directory
while checking integrity of data """
ACCEPTABLE_IMAGE_FORMATS = [".jpg", ".jpeg", ".png", ".bmp"]
ACCEPTABLE_SEGMENTATION_FORMATS = [".png", ".bmp"]
image_files = []
segmentation_files = {}
for dir_entry in sorted(os.listdir(images_path)):
if os.path.isfile(os.path.join(images_path, dir_entry)) and \
os.path.splitext(dir_entry)[1] in ACCEPTABLE_IMAGE_FORMATS:
file_name, file_extension = os.path.splitext(dir_entry)
image_files.append((file_name, file_extension,
os.path.join(images_path, dir_entry)))
for dir_entry in sorted(os.listdir(segs_path)):
if os.path.isfile(os.path.join(segs_path, dir_entry)) and \
os.path.splitext(dir_entry)[1] in ACCEPTABLE_SEGMENTATION_FORMATS:
file_name, file_extension = os.path.splitext(dir_entry)
full_dir_entry = os.path.join(segs_path, dir_entry)
if file_name in segmentation_files:
raise DataLoaderError("Segmentation file with filename {0}"
" already exists and is ambiguous to"
" resolve with path {1}."
" Please remove or rename the latter."
.format(file_name, full_dir_entry))
segmentation_files[file_name] = (file_extension, full_dir_entry)
return_value = []
# Match the images and segmentations
for image_file, _, image_full_path in image_files:
if image_file in segmentation_files:
return_value.append((image_full_path,
segmentation_files[image_file][1]))
elif ignore_non_matching:
continue
else:
# Error out
raise DataLoaderError("No corresponding segmentation "
"found for image {0}."
.format(image_full_path))
return return_value
def get_image_array(image_input,
width, height,
imgNorm="sub_mean", ordering='channels_first'):
""" Load image array from input """
if type(image_input) is np.ndarray:
# It is already an array, use it as it is
img = image_input
elif isinstance(image_input, six.string_types):
if not os.path.isfile(image_input):
raise DataLoaderError("get_image_array: path {0} doesn't exist"
.format(image_input))
img = cv2.imread(image_input, 1)
else:
raise DataLoaderError("get_image_array: Can't process input type {0}"
.format(str(type(image_input))))
if imgNorm == "sub_and_divide":
img = np.float32(cv2.resize(img, (width, height))) / 127.5 - 1
elif imgNorm == "sub_mean":
img = cv2.resize(img, (width, height))
img = img.astype(np.float32)
img[:, :, 0] -= 103.939
img[:, :, 1] -= 116.779
img[:, :, 2] -= 123.68
img = img[:, :, ::-1]
elif imgNorm == "divide":
img = cv2.resize(img, (width, height))
img = img.astype(np.float32)
img = img/255.0
if ordering == 'channels_first':
img = np.rollaxis(img, 2, 0)
return img
def get_segmentation_array(image_input, nClasses,
width, height, no_reshape=False, loss_type=0):
""" Load segmentation array from input """
seg_labels = np.zeros((height, width, nClasses))
if type(image_input) is np.ndarray:
# It is already an array, use it as it is
img = image_input
elif isinstance(image_input, six.string_types):
if not os.path.isfile(image_input):
raise DataLoaderError("get_segmentation_array: "
"path {0} doesn't exist".format(image_input))
img = cv2.imread(image_input, 1)
else:
raise DataLoaderError("get_segmentation_array: "
"Can't process input type {0}"
.format(str(type(image_input))))
img = cv2.resize(img, (width, height), interpolation=cv2.INTER_NEAREST)
img = img[:, :, 0]
for c in range(nClasses):
seg_labels[:, :, c] = (img == c).astype(int)
if not no_reshape:
if (loss_type==1 or loss_type==2):
seg_labels = np.reshape(seg_labels, (width*height, nClasses))
return seg_labels
def verify_segmentation_dataset(images_path, segs_path,
n_classes, deterministic=False, show_all_errors=False):
try:
img_seg_pairs = get_pairs_from_paths(images_path, segs_path)
if not len(img_seg_pairs):
print("Couldn't load any data from images_path: "
"{0} and segmentations path: {1}"
.format(images_path, segs_path))
return False
return_value = True
for im_fn, seg_fn in tqdm(img_seg_pairs):
img = cv2.imread(im_fn)
seg = cv2.imread(seg_fn)
# Check dimensions match
if not img.shape == seg.shape:
return_value = False
print("The size of image {0} and its segmentation {1} "
"doesn't match (possibly the files are corrupt)."
.format(im_fn, seg_fn))
if not show_all_errors:
break
else:
max_pixel_value = np.max(seg[:, :, 0])
if max_pixel_value >= n_classes:
return_value = False
print("The pixel values of the segmentation image {0} "
"violating range [0, {1}]. "
"Found maximum pixel value {2}"
.format(seg_fn, str(n_classes - 1), max_pixel_value))
if not show_all_errors:
break
if return_value:
print("Dataset verified! ")
else:
print("Dataset not verified!")
return return_value
except DataLoaderError as e:
print("Found error during data loading\n{0}".format(str(e)))
return False
def image_segmentation_generator(images_path, segs_path, batch_size,
n_classes, input_height, input_width,
output_height, output_width, deterministic=False,
do_augment=False,
augmentation_name="aug_all",
num_shards=1,
shard_id=0,
loss_type=0):
img_seg_pairs = get_pairs_from_paths(images_path, segs_path)
if num_shards > 1:
np.random.seed(nprandomseed)
if not deterministic:
random.shuffle(img_seg_pairs)
if num_shards > 1:
process_subset_size = len(img_seg_pairs) // num_shards
ids_from = process_subset_size*shard_id
ids_to = ids_from + process_subset_size
img_seg_pairs_ = img_seg_pairs[ids_from : ids_to]
# make sure all samples are used
img_seg_pairs_ += img_seg_pairs[process_subset_size*num_shards:]
img_seg_pairs = img_seg_pairs_
#print(f'Image Generator : [ {shard_id} ] , {len(img_seg_pairs)} - {img_seg_pairs[:10]}')
print(f'Ids from to : [{shard_id}], {ids_from} to {ids_to}')
zipped = itertools.cycle(img_seg_pairs)
while True:
X = []
Y = []
for _ in range(batch_size):
im, seg = next(zipped)
im = cv2.imread(im, 1)
seg = cv2.imread(seg, 1)
if do_augment:
im, seg[:, :, 0] = augment_seg(im, seg[:, :, 0],
augmentation_name)
X.append(get_image_array(im, input_width,
input_height, ordering=IMAGE_ORDERING))
Y.append(get_segmentation_array(
seg, n_classes, output_width, output_height, loss_type=loss_type))
if not (loss_type==1 or loss_type==2):
Y = np.reshape(Y, [batch_size, output_height, output_width, n_classes])
yield np.array(X), np.array(Y)
def create_segmentation_list(generator, iterations):
X=[]
Y=[]
print("Genering list: ", iterations)
for itr in tqdm(range(iterations)):
img,seg = next(generator)
X.append(img)
Y.append(seg)
X_concat = np.concatenate(X, axis=0)
Y_concat = np.concatenate(Y, axis=0)
return X_concat, Y_concat
def cached_image_generator(generator, num_shards, shard_id, batch_size, total_img_num, deterministic=False):
shard_img = total_img_num//num_shards + total_img_num%num_shards if shard_id == num_shards-1 else total_img_num//num_shards
padding = batch_size - shard_img%batch_size if shard_img%batch_size != 0 else 0
X, y = create_segmentation_list(generator, shard_img + padding)
# create a pipeline from a generator given full dataset X,y. This should prevent 2GB protobuf error
def | create_pipeline | identifier_name |
|
data_loader.py | random.randint(0, 255), random.randint(
0, 255), random.randint(0, 255)) for _ in range(5000)]
nprandomseed = 2019
class DataLoaderError(Exception):
pass
def get_pairs_from_paths(images_path, segs_path, ignore_non_matching=False):
""" Find all the images from the images_path directory and
the segmentation images from the segs_path directory
while checking integrity of data """
ACCEPTABLE_IMAGE_FORMATS = [".jpg", ".jpeg", ".png", ".bmp"]
ACCEPTABLE_SEGMENTATION_FORMATS = [".png", ".bmp"]
image_files = []
segmentation_files = {}
for dir_entry in sorted(os.listdir(images_path)):
if os.path.isfile(os.path.join(images_path, dir_entry)) and \
os.path.splitext(dir_entry)[1] in ACCEPTABLE_IMAGE_FORMATS:
file_name, file_extension = os.path.splitext(dir_entry)
image_files.append((file_name, file_extension,
os.path.join(images_path, dir_entry)))
for dir_entry in sorted(os.listdir(segs_path)):
if os.path.isfile(os.path.join(segs_path, dir_entry)) and \
os.path.splitext(dir_entry)[1] in ACCEPTABLE_SEGMENTATION_FORMATS:
file_name, file_extension = os.path.splitext(dir_entry)
full_dir_entry = os.path.join(segs_path, dir_entry)
if file_name in segmentation_files:
raise DataLoaderError("Segmentation file with filename {0}"
" already exists and is ambiguous to"
" resolve with path {1}."
" Please remove or rename the latter."
.format(file_name, full_dir_entry))
segmentation_files[file_name] = (file_extension, full_dir_entry)
return_value = []
# Match the images and segmentations
for image_file, _, image_full_path in image_files:
if image_file in segmentation_files:
return_value.append((image_full_path,
segmentation_files[image_file][1]))
elif ignore_non_matching:
continue
else:
# Error out
raise DataLoaderError("No corresponding segmentation "
"found for image {0}."
.format(image_full_path))
return return_value
def get_image_array(image_input,
width, height,
imgNorm="sub_mean", ordering='channels_first'):
""" Load image array from input """
if type(image_input) is np.ndarray:
# It is already an array, use it as it is
img = image_input
elif isinstance(image_input, six.string_types):
if not os.path.isfile(image_input):
raise DataLoaderError("get_image_array: path {0} doesn't exist"
.format(image_input))
img = cv2.imread(image_input, 1)
else:
raise DataLoaderError("get_image_array: Can't process input type {0}"
.format(str(type(image_input))))
if imgNorm == "sub_and_divide":
img = np.float32(cv2.resize(img, (width, height))) / 127.5 - 1
elif imgNorm == "sub_mean":
img = cv2.resize(img, (width, height))
img = img.astype(np.float32)
img[:, :, 0] -= 103.939
img[:, :, 1] -= 116.779
img[:, :, 2] -= 123.68
img = img[:, :, ::-1]
elif imgNorm == "divide":
img = cv2.resize(img, (width, height))
img = img.astype(np.float32)
img = img/255.0
if ordering == 'channels_first':
img = np.rollaxis(img, 2, 0)
return img
def get_segmentation_array(image_input, nClasses,
width, height, no_reshape=False, loss_type=0):
""" Load segmentation array from input """
seg_labels = np.zeros((height, width, nClasses))
if type(image_input) is np.ndarray:
# It is already an array, use it as it is
img = image_input
elif isinstance(image_input, six.string_types):
if not os.path.isfile(image_input):
raise DataLoaderError("get_segmentation_array: "
"path {0} doesn't exist".format(image_input))
img = cv2.imread(image_input, 1)
else:
raise DataLoaderError("get_segmentation_array: "
"Can't process input type {0}"
.format(str(type(image_input))))
img = cv2.resize(img, (width, height), interpolation=cv2.INTER_NEAREST)
img = img[:, :, 0]
for c in range(nClasses):
seg_labels[:, :, c] = (img == c).astype(int)
if not no_reshape:
if (loss_type==1 or loss_type==2):
seg_labels = np.reshape(seg_labels, (width*height, nClasses))
return seg_labels
def verify_segmentation_dataset(images_path, segs_path,
n_classes, deterministic=False, show_all_errors=False):
try:
img_seg_pairs = get_pairs_from_paths(images_path, segs_path)
if not len(img_seg_pairs):
print("Couldn't load any data from images_path: "
"{0} and segmentations path: {1}"
.format(images_path, segs_path))
return False
return_value = True
for im_fn, seg_fn in tqdm(img_seg_pairs):
img = cv2.imread(im_fn)
seg = cv2.imread(seg_fn)
# Check dimensions match
if not img.shape == seg.shape:
return_value = False
print("The size of image {0} and its segmentation {1} "
"doesn't match (possibly the files are corrupt)."
.format(im_fn, seg_fn))
if not show_all_errors:
break
else:
max_pixel_value = np.max(seg[:, :, 0])
if max_pixel_value >= n_classes:
return_value = False
print("The pixel values of the segmentation image {0} "
"violating range [0, {1}]. "
"Found maximum pixel value {2}"
.format(seg_fn, str(n_classes - 1), max_pixel_value))
if not show_all_errors:
break
if return_value:
print("Dataset verified! ")
else:
print("Dataset not verified!")
return return_value
except DataLoaderError as e:
print("Found error during data loading\n{0}".format(str(e)))
return False
def image_segmentation_generator(images_path, segs_path, batch_size,
n_classes, input_height, input_width,
output_height, output_width, deterministic=False,
do_augment=False,
augmentation_name="aug_all",
num_shards=1,
shard_id=0,
loss_type=0):
img_seg_pairs = get_pairs_from_paths(images_path, segs_path)
if num_shards > 1:
np.random.seed(nprandomseed)
if not deterministic:
random.shuffle(img_seg_pairs)
if num_shards > 1:
process_subset_size = len(img_seg_pairs) // num_shards
ids_from = process_subset_size*shard_id
ids_to = ids_from + process_subset_size
img_seg_pairs_ = img_seg_pairs[ids_from : ids_to]
# make sure all samples are used
img_seg_pairs_ += img_seg_pairs[process_subset_size*num_shards:]
img_seg_pairs = img_seg_pairs_
#print(f'Image Generator : [ {shard_id} ] , {len(img_seg_pairs)} - {img_seg_pairs[:10]}')
print(f'Ids from to : [{shard_id}], {ids_from} to {ids_to}')
zipped = itertools.cycle(img_seg_pairs)
while True:
X = []
Y = []
for _ in range(batch_size):
im, seg = next(zipped) |
if do_augment:
im, seg[:, :, 0] = augment_seg(im, seg[:, :, 0],
augmentation_name)
X.append(get_image_array(im, input_width,
input_height, ordering=IMAGE_ORDERING))
Y.append(get_segmentation_array(
seg, n_classes, output_width, output_height, loss_type=loss_type))
if not (loss_type==1 or loss_type==2):
Y = np.reshape(Y, [batch_size, output_height, output_width, n_classes])
yield np.array(X), np.array(Y)
def create_segmentation_list(generator, iterations):
X=[]
Y=[]
print("Genering list: ", iterations)
for itr in tqdm(range(iterations)):
img,seg = next(generator)
X.append(img)
Y.append(seg)
X_concat = np.concatenate(X, axis=0)
Y_concat = np.concatenate(Y, axis=0)
return X_concat, Y_concat
def cached_image_generator(generator, num_shards, shard_id, batch_size, total_img_num, deterministic=False):
shard_img = total_img_num//num_shards + total_img_num%num_shards if shard_id == num_shards-1 else total_img_num//num_shards
padding = batch_size - shard_img%batch_size if shard_img%batch_size != 0 else 0
X, y = create | im = cv2.imread(im, 1)
seg = cv2.imread(seg, 1) | random_line_split |
data_loader.py | [(random.randint(0, 255), random.randint(
0, 255), random.randint(0, 255)) for _ in range(5000)]
nprandomseed = 2019
class DataLoaderError(Exception):
pass
def get_pairs_from_paths(images_path, segs_path, ignore_non_matching=False):
""" Find all the images from the images_path directory and
the segmentation images from the segs_path directory
while checking integrity of data """
ACCEPTABLE_IMAGE_FORMATS = [".jpg", ".jpeg", ".png", ".bmp"]
ACCEPTABLE_SEGMENTATION_FORMATS = [".png", ".bmp"]
image_files = []
segmentation_files = {}
for dir_entry in sorted(os.listdir(images_path)):
if os.path.isfile(os.path.join(images_path, dir_entry)) and \
os.path.splitext(dir_entry)[1] in ACCEPTABLE_IMAGE_FORMATS:
file_name, file_extension = os.path.splitext(dir_entry)
image_files.append((file_name, file_extension,
os.path.join(images_path, dir_entry)))
for dir_entry in sorted(os.listdir(segs_path)):
if os.path.isfile(os.path.join(segs_path, dir_entry)) and \
os.path.splitext(dir_entry)[1] in ACCEPTABLE_SEGMENTATION_FORMATS:
file_name, file_extension = os.path.splitext(dir_entry)
full_dir_entry = os.path.join(segs_path, dir_entry)
if file_name in segmentation_files:
raise DataLoaderError("Segmentation file with filename {0}"
" already exists and is ambiguous to"
" resolve with path {1}."
" Please remove or rename the latter."
.format(file_name, full_dir_entry))
segmentation_files[file_name] = (file_extension, full_dir_entry)
return_value = []
# Match the images and segmentations
for image_file, _, image_full_path in image_files:
if image_file in segmentation_files:
return_value.append((image_full_path,
segmentation_files[image_file][1]))
elif ignore_non_matching:
continue
else:
# Error out
raise DataLoaderError("No corresponding segmentation "
"found for image {0}."
.format(image_full_path))
return return_value
def get_image_array(image_input,
width, height,
imgNorm="sub_mean", ordering='channels_first'):
| img[:, :, 1] -= 116.779
img[:, :, 2] -= 123.68
img = img[:, :, ::-1]
elif imgNorm == "divide":
img = cv2.resize(img, (width, height))
img = img.astype(np.float32)
img = img/255.0
if ordering == 'channels_first':
img = np.rollaxis(img, 2, 0)
return img
def get_segmentation_array(image_input, nClasses,
width, height, no_reshape=False, loss_type=0):
""" Load segmentation array from input """
seg_labels = np.zeros((height, width, nClasses))
if type(image_input) is np.ndarray:
# It is already an array, use it as it is
img = image_input
elif isinstance(image_input, six.string_types):
if not os.path.isfile(image_input):
raise DataLoaderError("get_segmentation_array: "
"path {0} doesn't exist".format(image_input))
img = cv2.imread(image_input, 1)
else:
raise DataLoaderError("get_segmentation_array: "
"Can't process input type {0}"
.format(str(type(image_input))))
img = cv2.resize(img, (width, height), interpolation=cv2.INTER_NEAREST)
img = img[:, :, 0]
for c in range(nClasses):
seg_labels[:, :, c] = (img == c).astype(int)
if not no_reshape:
if (loss_type==1 or loss_type==2):
seg_labels = np.reshape(seg_labels, (width*height, nClasses))
return seg_labels
def verify_segmentation_dataset(images_path, segs_path,
n_classes, deterministic=False, show_all_errors=False):
try:
img_seg_pairs = get_pairs_from_paths(images_path, segs_path)
if not len(img_seg_pairs):
print("Couldn't load any data from images_path: "
"{0} and segmentations path: {1}"
.format(images_path, segs_path))
return False
return_value = True
for im_fn, seg_fn in tqdm(img_seg_pairs):
img = cv2.imread(im_fn)
seg = cv2.imread(seg_fn)
# Check dimensions match
if not img.shape == seg.shape:
return_value = False
print("The size of image {0} and its segmentation {1} "
"doesn't match (possibly the files are corrupt)."
.format(im_fn, seg_fn))
if not show_all_errors:
break
else:
max_pixel_value = np.max(seg[:, :, 0])
if max_pixel_value >= n_classes:
return_value = False
print("The pixel values of the segmentation image {0} "
"violating range [0, {1}]. "
"Found maximum pixel value {2}"
.format(seg_fn, str(n_classes - 1), max_pixel_value))
if not show_all_errors:
break
if return_value:
print("Dataset verified! ")
else:
print("Dataset not verified!")
return return_value
except DataLoaderError as e:
print("Found error during data loading\n{0}".format(str(e)))
return False
def image_segmentation_generator(images_path, segs_path, batch_size,
n_classes, input_height, input_width,
output_height, output_width, deterministic=False,
do_augment=False,
augmentation_name="aug_all",
num_shards=1,
shard_id=0,
loss_type=0):
img_seg_pairs = get_pairs_from_paths(images_path, segs_path)
if num_shards > 1:
np.random.seed(nprandomseed)
if not deterministic:
random.shuffle(img_seg_pairs)
if num_shards > 1:
process_subset_size = len(img_seg_pairs) // num_shards
ids_from = process_subset_size*shard_id
ids_to = ids_from + process_subset_size
img_seg_pairs_ = img_seg_pairs[ids_from : ids_to]
# make sure all samples are used
img_seg_pairs_ += img_seg_pairs[process_subset_size*num_shards:]
img_seg_pairs = img_seg_pairs_
#print(f'Image Generator : [ {shard_id} ] , {len(img_seg_pairs)} - {img_seg_pairs[:10]}')
print(f'Ids from to : [{shard_id}], {ids_from} to {ids_to}')
zipped = itertools.cycle(img_seg_pairs)
while True:
X = []
Y = []
for _ in range(batch_size):
im, seg = next(zipped)
im = cv2.imread(im, 1)
seg = cv2.imread(seg, 1)
if do_augment:
im, seg[:, :, 0] = augment_seg(im, seg[:, :, 0],
augmentation_name)
X.append(get_image_array(im, input_width,
input_height, ordering=IMAGE_ORDERING))
Y.append(get_segmentation_array(
seg, n_classes, output_width, output_height, loss_type=loss_type))
if not (loss_type==1 or loss_type==2):
Y = np.reshape(Y, [batch_size, output_height, output_width, n_classes])
yield np.array(X), np.array(Y)
def create_segmentation_list(generator, iterations):
X=[]
Y=[]
print("Genering list: ", iterations)
for itr in tqdm(range(iterations)):
img,seg = next(generator)
X.append(img)
Y.append(seg)
X_concat = np.concatenate(X, axis=0)
Y_concat = np.concatenate(Y, axis=0)
return X_concat, Y_concat
def cached_image_generator(generator, num_shards, shard_id, batch_size, total_img_num, deterministic=False):
shard_img = total_img_num//num_shards + total_img_num%num_shards if shard_id == num_shards-1 else total_img_num//num_shards
padding = batch_size - shard_img%batch_size if shard_img%batch_size != 0 else 0
X, y = create_segment | """ Load image array from input """
if type(image_input) is np.ndarray:
# It is already an array, use it as it is
img = image_input
elif isinstance(image_input, six.string_types):
if not os.path.isfile(image_input):
raise DataLoaderError("get_image_array: path {0} doesn't exist"
.format(image_input))
img = cv2.imread(image_input, 1)
else:
raise DataLoaderError("get_image_array: Can't process input type {0}"
.format(str(type(image_input))))
if imgNorm == "sub_and_divide":
img = np.float32(cv2.resize(img, (width, height))) / 127.5 - 1
elif imgNorm == "sub_mean":
img = cv2.resize(img, (width, height))
img = img.astype(np.float32)
img[:, :, 0] -= 103.939 | identifier_body |
authorization.rs | <'a>() -> &'a str {
"sid"
}
}
/// ## Cookie Data
/// The AuthorizeCookie trait is used with a custom data structure that
/// will contain the data in the cookie. This trait provides methods
/// to store and retrieve a data structure from a cookie's string contents.
///
/// Using a request guard a route can easily check whether the user is
/// a valid Administrator or any custom user type.
///
/// ### Example
///
/// ```
///
/// use rocket::{Request, Outcome};
/// use rocket::request::FromRequest;
/// use auth::authorization::*;
/// // Define a custom data type that hold the cookie information
/// pub struct AdministratorCookie {
/// pub userid: u32,
/// pub username: String,
/// pub display: Option<String>,
/// }
///
/// // Implement CookieId for AdministratorCookie
/// impl CookieId for AdministratorCookie {
/// // Tell
/// type CookieType = AdministratorCookie;
/// fn cookie_id<'a>() -> &'a str {
/// "asid"
/// }
/// }
///
/// // Implement AuthorizeCookie for the AdministratorCookie
/// // This code can be changed to use other serialization formats
/// impl AuthorizeCookie for AdministratorCookie {
/// fn store_cookie(&self) -> String {
/// ::serde_json::to_string(self).expect("Could not serialize structure")
/// }
/// fn retrieve_cookie(string: String) -> Option<Self> {
/// let mut des_buf = string.clone();
/// let des: Result<AdministratorCookie, _> = ::serde_json::from_str(&mut des_buf);
/// if let Ok(cooky) = des {
/// Some(cooky)
/// } else {
/// None
/// }
/// }
/// }
///
/// // Implement FromRequest for the Cookie type to allow direct
/// // use of the type in routes, instead of through AuthCont
/// //
/// // The only part that needs to be changed is the impl and
/// // function return type; the type should match your struct
/// impl<'a, 'r> FromRequest<'a, 'r> for AdministratorCookie {
/// type Error = ();
/// // Change the return type to match your type
/// fn from_request(request: &'a Request<'r>) -> ::rocket::request::Outcome<AdministratorCookie,Self::Error>{
/// let cid = AdministratorCookie::cookie_id();
/// let mut cookies = request.cookies();
///
/// match cookies.get_private(cid) {
/// Some(cookie) => {
/// if let Some(cookie_deserialized) = AdministratorCookie::retrieve_cookie(cookie.value().to_string()) {
/// Outcome::Success(
/// cookie_deserialized
/// )
/// } else {
/// Outcome::Forward(())
/// }
/// },
/// None => Outcome::Forward(())
/// }
/// }
/// }
///
/// // In your route use the AdministratorCookie request guard to ensure
/// // that only verified administrators can reach a page
/// #[get("/administrator", rank=1)]
/// fn admin_page(admin: AdministratorCookie) -> Html<String> {
/// // Show the display field in AdminstratorCookie as defined above
/// Html( format!("Welcome adminstrator {}!", admin.display) )
/// }
/// #[get("/administrator", rank=2)]
/// fn admin_login_form() -> Html<String> {
/// // Html form here, see the example directory for a complete example
/// }
///
/// fn main() {
/// rocket::ignite().mount("/", routes![admin_page, admin_login_form]).launc();
/// }
///
/// ```
///
pub trait AuthorizeCookie : CookieId {
/// Serialize the cookie data type - must be implemented by cookie data type
fn store_cookie(&self) -> String;
/// Deserialize the cookie data type - must be implemented by cookie data type
fn retrieve_cookie(String) -> Option<Self> where Self: Sized;
/// Deletes a cookie. This does not need to be implemented, it defaults to removing
/// the private key with the named specified by cookie_id() method.
fn delete_cookie(cookies: &mut Cookies) {
cookies.remove_private(
Cookie::named( Self::cookie_id() )
);
}
}
/// ## Form Data
/// The AuthorizeForm trait handles collecting a submitted login form into a
/// data structure and authenticating the credentials inside. It also contains
/// default methods to process the login and conditionally redirecting the user
/// to the correct page depending upon successful authentication or failure.
///
/// ### Authentication Failure
/// Upon failure the user is redirected to a page with a query string specified
/// by the `fail_url()` method. This allows the specified username to persist
/// across attempts.
///
/// ### Flash Message
/// The `flash_redirect()` method redirects the user but also adds a cookie
/// called a flash message that once read is deleted immediately. This is used
/// to indicate why the authentication failed. If the user refreshes the page
/// after failing to login the message that appears above the login indicating
/// why it failed will disappear. To redirect without a flash message use the
/// `redirect()` method instead of `flash_redirect()`.
///
/// ## Example
/// ```
///
/// use rocket::{Request, Outcome};
/// use std::collections::HashMap;
/// use auth::authorization::*;
/// // Create the structure that will contain the login form data
/// #[derive(Debug, Clone, Serialize, Deserialize)]
/// pub struct AdministratorForm {
/// pub username: String,
/// pub password: String,
/// }
///
/// // Ipmlement CookieId for the form structure
/// impl CookieId for AdministratorForm {
/// fn cookie_id<'a>() -> &'a str {
/// "acid"
/// }
/// }
///
/// // Implement the AuthorizeForm for the form structure
/// impl AuthorizeForm for AdministratorForm {
/// type CookieType = AdministratorCookie;
///
/// /// Authenticate the credentials inside the login form
/// fn authenticate(&self) -> Result<Self::CookieType, AuthFail> {
/// // The code in this function should be replace with whatever
/// // you use to authenticate users.
/// println!("Authenticating {} with password: {}", &self.username, &self.password);
/// if &self.username == "administrator" && &self.password != "" {
/// Ok(
/// AdministratorCookie {
/// userid: 1,
/// username: "administrator".to_string(),
/// display: Some("Administrator".to_string()),
/// }
/// )
/// } else {
/// Err(
/// AuthFail::new(self.username.to_string(), "Incorrect username".to_string())
/// )
/// }
/// }
///
/// /// Create a new login form instance
/// fn new_form(user: &str, pass: &str, _extras: Option<HashMap<String, String>>) -> Self {
/// AdministratorForm {
/// username: user.to_string(),
/// password: pass.to_string(),
/// }
/// }
/// }
///
/// # fn main() {}
///
/// ```
///
/// # Example Code
/// For more detailed example please see the example directory.
/// The example directory contains a fully working example of processing
/// and checking login information.
///
pub trait AuthorizeForm : CookieId {
type CookieType: AuthorizeCookie;
/// Determine whether the login form structure containts
/// valid credentials, otherwise send back the username and
/// a message indicating why it failed in the `AuthFail` struct
///
/// Must be implemented on the login form structure
fn authenticate(&self) -> Result<Self::CookieType, AuthFail>;
/// Create a new login form Structure with
/// the specified username and password.
/// The first parameter is the username, then password,
/// and then optionally a HashMap containing any extra fields.
///
/// Must be implemented on the login form structure
///
// /// The password is a u8 slice, allowing passwords to be stored without
// /// being converted to hex. The slice is sufficient because new_form()
// /// is called within the from_form() function, so when the password is
// /// collected as a vector of bytes the reference to those bytes are sent
// /// to the new_form() method.
fn new_form(&str, &str, Option<HashMap<String, String>>) -> Self;
/// The `fail_url()` method is used to create a url that the user is sent
/// to when the authentication fails. The default implementation
/// redirects the user to the /page?user=<ateempted_username>
/// which enables the form to display the username that was attempted
/// and unlike FlashMessages it will persist across refreshes
fn fail_url(user: &str) -> String {
let mut output = String::with_capacity(user.len() + 10);
output.push_str("?user=");
output.push_str(user);
output
}
/// Sanitizes the username before storing in the login form structure.
/// The default implementation uses the `sanitize()` function in the
/// `sanitization` module. This can be overriden in your
/// `impl AuthorizeForm for {login structure}` implementation to
/// customize how the text is cleaned/sanitized | cookie_id | identifier_name |
|
authorization.rs | &'a str {
/// "asid"
/// }
/// }
///
/// // Implement AuthorizeCookie for the AdministratorCookie
/// // This code can be changed to use other serialization formats
/// impl AuthorizeCookie for AdministratorCookie {
/// fn store_cookie(&self) -> String {
/// ::serde_json::to_string(self).expect("Could not serialize structure")
/// }
/// fn retrieve_cookie(string: String) -> Option<Self> {
/// let mut des_buf = string.clone();
/// let des: Result<AdministratorCookie, _> = ::serde_json::from_str(&mut des_buf);
/// if let Ok(cooky) = des {
/// Some(cooky)
/// } else {
/// None
/// }
/// }
/// }
///
/// // Implement FromRequest for the Cookie type to allow direct
/// // use of the type in routes, instead of through AuthCont
/// //
/// // The only part that needs to be changed is the impl and
/// // function return type; the type should match your struct
/// impl<'a, 'r> FromRequest<'a, 'r> for AdministratorCookie { | /// // Change the return type to match your type
/// fn from_request(request: &'a Request<'r>) -> ::rocket::request::Outcome<AdministratorCookie,Self::Error>{
/// let cid = AdministratorCookie::cookie_id();
/// let mut cookies = request.cookies();
///
/// match cookies.get_private(cid) {
/// Some(cookie) => {
/// if let Some(cookie_deserialized) = AdministratorCookie::retrieve_cookie(cookie.value().to_string()) {
/// Outcome::Success(
/// cookie_deserialized
/// )
/// } else {
/// Outcome::Forward(())
/// }
/// },
/// None => Outcome::Forward(())
/// }
/// }
/// }
///
/// // In your route use the AdministratorCookie request guard to ensure
/// // that only verified administrators can reach a page
/// #[get("/administrator", rank=1)]
/// fn admin_page(admin: AdministratorCookie) -> Html<String> {
/// // Show the display field in AdminstratorCookie as defined above
/// Html( format!("Welcome adminstrator {}!", admin.display) )
/// }
/// #[get("/administrator", rank=2)]
/// fn admin_login_form() -> Html<String> {
/// // Html form here, see the example directory for a complete example
/// }
///
/// fn main() {
/// rocket::ignite().mount("/", routes![admin_page, admin_login_form]).launc();
/// }
///
/// ```
///
pub trait AuthorizeCookie : CookieId {
/// Serialize the cookie data type - must be implemented by cookie data type
fn store_cookie(&self) -> String;
/// Deserialize the cookie data type - must be implemented by cookie data type
fn retrieve_cookie(String) -> Option<Self> where Self: Sized;
/// Deletes a cookie. This does not need to be implemented, it defaults to removing
/// the private key with the named specified by cookie_id() method.
fn delete_cookie(cookies: &mut Cookies) {
cookies.remove_private(
Cookie::named( Self::cookie_id() )
);
}
}
/// ## Form Data
/// The AuthorizeForm trait handles collecting a submitted login form into a
/// data structure and authenticating the credentials inside. It also contains
/// default methods to process the login and conditionally redirecting the user
/// to the correct page depending upon successful authentication or failure.
///
/// ### Authentication Failure
/// Upon failure the user is redirected to a page with a query string specified
/// by the `fail_url()` method. This allows the specified username to persist
/// across attempts.
///
/// ### Flash Message
/// The `flash_redirect()` method redirects the user but also adds a cookie
/// called a flash message that once read is deleted immediately. This is used
/// to indicate why the authentication failed. If the user refreshes the page
/// after failing to login the message that appears above the login indicating
/// why it failed will disappear. To redirect without a flash message use the
/// `redirect()` method instead of `flash_redirect()`.
///
/// ## Example
/// ```
///
/// use rocket::{Request, Outcome};
/// use std::collections::HashMap;
/// use auth::authorization::*;
/// // Create the structure that will contain the login form data
/// #[derive(Debug, Clone, Serialize, Deserialize)]
/// pub struct AdministratorForm {
/// pub username: String,
/// pub password: String,
/// }
///
/// // Ipmlement CookieId for the form structure
/// impl CookieId for AdministratorForm {
/// fn cookie_id<'a>() -> &'a str {
/// "acid"
/// }
/// }
///
/// // Implement the AuthorizeForm for the form structure
/// impl AuthorizeForm for AdministratorForm {
/// type CookieType = AdministratorCookie;
///
/// /// Authenticate the credentials inside the login form
/// fn authenticate(&self) -> Result<Self::CookieType, AuthFail> {
/// // The code in this function should be replace with whatever
/// // you use to authenticate users.
/// println!("Authenticating {} with password: {}", &self.username, &self.password);
/// if &self.username == "administrator" && &self.password != "" {
/// Ok(
/// AdministratorCookie {
/// userid: 1,
/// username: "administrator".to_string(),
/// display: Some("Administrator".to_string()),
/// }
/// )
/// } else {
/// Err(
/// AuthFail::new(self.username.to_string(), "Incorrect username".to_string())
/// )
/// }
/// }
///
/// /// Create a new login form instance
/// fn new_form(user: &str, pass: &str, _extras: Option<HashMap<String, String>>) -> Self {
/// AdministratorForm {
/// username: user.to_string(),
/// password: pass.to_string(),
/// }
/// }
/// }
///
/// # fn main() {}
///
/// ```
///
/// # Example Code
/// For more detailed example please see the example directory.
/// The example directory contains a fully working example of processing
/// and checking login information.
///
pub trait AuthorizeForm : CookieId {
type CookieType: AuthorizeCookie;
/// Determine whether the login form structure containts
/// valid credentials, otherwise send back the username and
/// a message indicating why it failed in the `AuthFail` struct
///
/// Must be implemented on the login form structure
fn authenticate(&self) -> Result<Self::CookieType, AuthFail>;
/// Create a new login form Structure with
/// the specified username and password.
/// The first parameter is the username, then password,
/// and then optionally a HashMap containing any extra fields.
///
/// Must be implemented on the login form structure
///
// /// The password is a u8 slice, allowing passwords to be stored without
// /// being converted to hex. The slice is sufficient because new_form()
// /// is called within the from_form() function, so when the password is
// /// collected as a vector of bytes the reference to those bytes are sent
// /// to the new_form() method.
fn new_form(&str, &str, Option<HashMap<String, String>>) -> Self;
/// The `fail_url()` method is used to create a url that the user is sent
/// to when the authentication fails. The default implementation
/// redirects the user to the /page?user=<ateempted_username>
/// which enables the form to display the username that was attempted
/// and unlike FlashMessages it will persist across refreshes
fn fail_url(user: &str) -> String {
let mut output = String::with_capacity(user.len() + 10);
output.push_str("?user=");
output.push_str(user);
output
}
/// Sanitizes the username before storing in the login form structure.
/// The default implementation uses the `sanitize()` function in the
/// `sanitization` module. This can be overriden in your
/// `impl AuthorizeForm for {login structure}` implementation to
/// customize how the text is cleaned/sanitized.
fn clean_username(string: &str) -> String {
sanitize(string)
}
/// Sanitizes the password before storing in the login form structure.
/// The default implementation uses the `sanitize_oassword()` function in the
/// `sanitization` module. This can be overriden in your
/// `impl AuthorizeForm for {login structure}` implementation to
/// customize how the text is cleaned/sanitized.
fn clean_password(string: &str) -> String {
sanitize_password(string)
}
/// Sanitizes any extra variables before storing in the login form structure.
/// The default implementation uses the `sanitize()` function in the
/// `sanitization` module. This can be overriden in your
/// `impl AuthorizeForm for {login structure}` implementation to
/// customize how the text is cleaned/sanitized.
fn clean_extras(string: &str) -> String {
sanitize(string)
}
/// Redirect the user to one page on successful authentication or | /// type Error = (); | random_line_split |
authorization.rs | .username, &self.password);
/// if &self.username == "administrator" && &self.password != "" {
/// Ok(
/// AdministratorCookie {
/// userid: 1,
/// username: "administrator".to_string(),
/// display: Some("Administrator".to_string()),
/// }
/// )
/// } else {
/// Err(
/// AuthFail::new(self.username.to_string(), "Incorrect username".to_string())
/// )
/// }
/// }
///
/// /// Create a new login form instance
/// fn new_form(user: &str, pass: &str, _extras: Option<HashMap<String, String>>) -> Self {
/// AdministratorForm {
/// username: user.to_string(),
/// password: pass.to_string(),
/// }
/// }
/// }
///
/// # fn main() {}
///
/// ```
///
/// # Example Code
/// For more detailed example please see the example directory.
/// The example directory contains a fully working example of processing
/// and checking login information.
///
pub trait AuthorizeForm : CookieId {
type CookieType: AuthorizeCookie;
/// Determine whether the login form structure containts
/// valid credentials, otherwise send back the username and
/// a message indicating why it failed in the `AuthFail` struct
///
/// Must be implemented on the login form structure
fn authenticate(&self) -> Result<Self::CookieType, AuthFail>;
/// Create a new login form Structure with
/// the specified username and password.
/// The first parameter is the username, then password,
/// and then optionally a HashMap containing any extra fields.
///
/// Must be implemented on the login form structure
///
// /// The password is a u8 slice, allowing passwords to be stored without
// /// being converted to hex. The slice is sufficient because new_form()
// /// is called within the from_form() function, so when the password is
// /// collected as a vector of bytes the reference to those bytes are sent
// /// to the new_form() method.
fn new_form(&str, &str, Option<HashMap<String, String>>) -> Self;
/// The `fail_url()` method is used to create a url that the user is sent
/// to when the authentication fails. The default implementation
/// redirects the user to the /page?user=<ateempted_username>
/// which enables the form to display the username that was attempted
/// and unlike FlashMessages it will persist across refreshes
fn fail_url(user: &str) -> String {
let mut output = String::with_capacity(user.len() + 10);
output.push_str("?user=");
output.push_str(user);
output
}
/// Sanitizes the username before storing in the login form structure.
/// The default implementation uses the `sanitize()` function in the
/// `sanitization` module. This can be overriden in your
/// `impl AuthorizeForm for {login structure}` implementation to
/// customize how the text is cleaned/sanitized.
fn clean_username(string: &str) -> String {
sanitize(string)
}
/// Sanitizes the password before storing in the login form structure.
/// The default implementation uses the `sanitize_oassword()` function in the
/// `sanitization` module. This can be overriden in your
/// `impl AuthorizeForm for {login structure}` implementation to
/// customize how the text is cleaned/sanitized.
fn clean_password(string: &str) -> String {
sanitize_password(string)
}
/// Sanitizes any extra variables before storing in the login form structure.
/// The default implementation uses the `sanitize()` function in the
/// `sanitization` module. This can be overriden in your
/// `impl AuthorizeForm for {login structure}` implementation to
/// customize how the text is cleaned/sanitized.
fn clean_extras(string: &str) -> String {
sanitize(string)
}
/// Redirect the user to one page on successful authentication or
/// another page (with a `FlashMessage` indicating why) if authentication fails.
///
/// `FlashMessage` is used to indicate why the authentication failed
/// this is so that the user can see why it failed but when they refresh
/// it will disappear, enabling a clean start, but with the user name
/// from the url's query string (determined by `fail_url()`)
fn flash_redirect(&self, ok_redir: impl Into<String>, err_redir: impl Into<String>, cookies: &mut Cookies) -> Result<Redirect, Flash<Redirect>> {
match self.authenticate() {
Ok(cooky) => {
let cid = Self::cookie_id();
let contents = cooky.store_cookie();
cookies.add_private(Cookie::new(cid, contents));
Ok(Redirect::to(ok_redir.into()))
},
Err(fail) => {
let mut furl = err_redir.into();
if &fail.user != "" {
let furl_qrystr = Self::fail_url(&fail.user);
furl.push_str(&furl_qrystr);
}
Err( Flash::error(Redirect::to(furl), &fail.msg) )
},
}
}
/// Redirect the user to one page on successful authentication or
/// another page if authentication fails.
fn redirect(&self, ok_redir: &str, err_redir: &str, cookies: &mut Cookies) -> Result<Redirect, Redirect> {
match self.authenticate() {
Ok(cooky) => {
let cid = Self::cookie_id();
let contents = cooky.store_cookie();
cookies.add_private(Cookie::new(cid, contents));
Ok(Redirect::to(ok_redir.to_string()))
},
Err(fail) => {
let mut furl = String::from(err_redir);
if &fail.user != "" {
let furl_qrystr = Self::fail_url(&fail.user);
furl.push_str(&furl_qrystr);
}
Err( Redirect::to(furl) )
},
}
}
}
impl<T: AuthorizeCookie + Clone> AuthCont<T> {
pub fn cookie_data(&self) -> T {
// Todo: change the signature from &self to self
// and remove the .clone() method call
self.cookie.clone()
}
}
/// # Request Guard
/// Request guard for the AuthCont (Authentication Container).
/// This allows a route to call a user type like:
///
/// ```rust,no_run
///
/// use auth::authorization::*;
/// # use administration:*;
/// use rocket;
/// #[get("/protected")]
/// fn protected(container: AuthCont<AdministratorCookie>) -> Html<String> {
/// let admin = container.cookie;
/// String::new()
/// }
///
/// # fn main() {
/// # rocket::ignite().mount("/", routes![]).launch();
/// # }
///
/// ```
///
impl<'a, 'r, T: AuthorizeCookie> FromRequest<'a, 'r> for AuthCont<T> {
type Error = ();
fn from_request(request: &'a Request<'r>) -> ::rocket::request::Outcome<AuthCont<T>,Self::Error>{
let cid = T::cookie_id();
let mut cookies = request.cookies();
match cookies.get_private(cid) {
Some(cookie) => {
if let Some(cookie_deserialized) = T::retrieve_cookie(cookie.value().to_string()) {
Outcome::Success(
AuthCont {
cookie: cookie_deserialized,
}
)
} else {
Outcome::Forward(())
}
},
None => Outcome::Forward(())
}
}
}
/// #Collecting Login Form Data
/// If your login form requires more than just a username and password the
/// extras parameter, in `AuthorizeForm::new_form(user, pass, extras)`, holds
/// all other fields in a `HashMap<String, String>` to allow processing any
/// field that was submitted. The username and password are separate because
/// those are universal fields.
///
/// ## Custom Username/Password Field Names
/// By default the function will look for a username and a password field.
/// If your form does not use those particular names you can always use the
/// extras `HashMap` to retrieve the username and password when using different
/// input box names. The function will return `Ok()` even if no username or
/// password was entered, this is to allow custom field names to be accessed
/// and authenticated by the `authenticate()` method.
impl<'f, A: AuthorizeForm> FromForm<'f> for LoginCont<A> {
type Error = &'static str;
fn from_form(form_items: &mut FormItems<'f>, _strict: bool) -> Result<Self, Self::Error> {
let mut user: String = String::new();
let mut pass: String = String::new();
let mut extras: HashMap<String, String> = HashMap::new();
for FormItem { key, value, .. } in form_items {
match key.as_str(){
"username" => {
user = A::clean_username(&value.url_decode().unwrap_or(String::new()));
},
"password" => | {
pass = A::clean_password(&value.url_decode().unwrap_or(String::new()));
} | conditional_block |
|
authorization.rs | -> &'a str {
/// "asid"
/// }
/// }
///
/// // Implement AuthorizeCookie for the AdministratorCookie
/// // This code can be changed to use other serialization formats
/// impl AuthorizeCookie for AdministratorCookie {
/// fn store_cookie(&self) -> String {
/// ::serde_json::to_string(self).expect("Could not serialize structure")
/// }
/// fn retrieve_cookie(string: String) -> Option<Self> {
/// let mut des_buf = string.clone();
/// let des: Result<AdministratorCookie, _> = ::serde_json::from_str(&mut des_buf);
/// if let Ok(cooky) = des {
/// Some(cooky)
/// } else {
/// None
/// }
/// }
/// }
///
/// // Implement FromRequest for the Cookie type to allow direct
/// // use of the type in routes, instead of through AuthCont
/// //
/// // The only part that needs to be changed is the impl and
/// // function return type; the type should match your struct
/// impl<'a, 'r> FromRequest<'a, 'r> for AdministratorCookie {
/// type Error = ();
/// // Change the return type to match your type
/// fn from_request(request: &'a Request<'r>) -> ::rocket::request::Outcome<AdministratorCookie,Self::Error>{
/// let cid = AdministratorCookie::cookie_id();
/// let mut cookies = request.cookies();
///
/// match cookies.get_private(cid) {
/// Some(cookie) => {
/// if let Some(cookie_deserialized) = AdministratorCookie::retrieve_cookie(cookie.value().to_string()) {
/// Outcome::Success(
/// cookie_deserialized
/// )
/// } else {
/// Outcome::Forward(())
/// }
/// },
/// None => Outcome::Forward(())
/// }
/// }
/// }
///
/// // In your route use the AdministratorCookie request guard to ensure
/// // that only verified administrators can reach a page
/// #[get("/administrator", rank=1)]
/// fn admin_page(admin: AdministratorCookie) -> Html<String> {
/// // Show the display field in AdminstratorCookie as defined above
/// Html( format!("Welcome adminstrator {}!", admin.display) )
/// }
/// #[get("/administrator", rank=2)]
/// fn admin_login_form() -> Html<String> {
/// // Html form here, see the example directory for a complete example
/// }
///
/// fn main() {
/// rocket::ignite().mount("/", routes![admin_page, admin_login_form]).launc();
/// }
///
/// ```
///
pub trait AuthorizeCookie : CookieId {
/// Serialize the cookie data type - must be implemented by cookie data type
fn store_cookie(&self) -> String;
/// Deserialize the cookie data type - must be implemented by cookie data type
fn retrieve_cookie(String) -> Option<Self> where Self: Sized;
/// Deletes a cookie. This does not need to be implemented, it defaults to removing
/// the private key with the named specified by cookie_id() method.
fn delete_cookie(cookies: &mut Cookies) |
}
/// ## Form Data
/// The AuthorizeForm trait handles collecting a submitted login form into a
/// data structure and authenticating the credentials inside. It also contains
/// default methods to process the login and conditionally redirecting the user
/// to the correct page depending upon successful authentication or failure.
///
/// ### Authentication Failure
/// Upon failure the user is redirected to a page with a query string specified
/// by the `fail_url()` method. This allows the specified username to persist
/// across attempts.
///
/// ### Flash Message
/// The `flash_redirect()` method redirects the user but also adds a cookie
/// called a flash message that once read is deleted immediately. This is used
/// to indicate why the authentication failed. If the user refreshes the page
/// after failing to login the message that appears above the login indicating
/// why it failed will disappear. To redirect without a flash message use the
/// `redirect()` method instead of `flash_redirect()`.
///
/// ## Example
/// ```
///
/// use rocket::{Request, Outcome};
/// use std::collections::HashMap;
/// use auth::authorization::*;
/// // Create the structure that will contain the login form data
/// #[derive(Debug, Clone, Serialize, Deserialize)]
/// pub struct AdministratorForm {
/// pub username: String,
/// pub password: String,
/// }
///
/// // Ipmlement CookieId for the form structure
/// impl CookieId for AdministratorForm {
/// fn cookie_id<'a>() -> &'a str {
/// "acid"
/// }
/// }
///
/// // Implement the AuthorizeForm for the form structure
/// impl AuthorizeForm for AdministratorForm {
/// type CookieType = AdministratorCookie;
///
/// /// Authenticate the credentials inside the login form
/// fn authenticate(&self) -> Result<Self::CookieType, AuthFail> {
/// // The code in this function should be replace with whatever
/// // you use to authenticate users.
/// println!("Authenticating {} with password: {}", &self.username, &self.password);
/// if &self.username == "administrator" && &self.password != "" {
/// Ok(
/// AdministratorCookie {
/// userid: 1,
/// username: "administrator".to_string(),
/// display: Some("Administrator".to_string()),
/// }
/// )
/// } else {
/// Err(
/// AuthFail::new(self.username.to_string(), "Incorrect username".to_string())
/// )
/// }
/// }
///
/// /// Create a new login form instance
/// fn new_form(user: &str, pass: &str, _extras: Option<HashMap<String, String>>) -> Self {
/// AdministratorForm {
/// username: user.to_string(),
/// password: pass.to_string(),
/// }
/// }
/// }
///
/// # fn main() {}
///
/// ```
///
/// # Example Code
/// For more detailed example please see the example directory.
/// The example directory contains a fully working example of processing
/// and checking login information.
///
pub trait AuthorizeForm : CookieId {
type CookieType: AuthorizeCookie;
/// Determine whether the login form structure containts
/// valid credentials, otherwise send back the username and
/// a message indicating why it failed in the `AuthFail` struct
///
/// Must be implemented on the login form structure
fn authenticate(&self) -> Result<Self::CookieType, AuthFail>;
/// Create a new login form Structure with
/// the specified username and password.
/// The first parameter is the username, then password,
/// and then optionally a HashMap containing any extra fields.
///
/// Must be implemented on the login form structure
///
// /// The password is a u8 slice, allowing passwords to be stored without
// /// being converted to hex. The slice is sufficient because new_form()
// /// is called within the from_form() function, so when the password is
// /// collected as a vector of bytes the reference to those bytes are sent
// /// to the new_form() method.
fn new_form(&str, &str, Option<HashMap<String, String>>) -> Self;
/// The `fail_url()` method is used to create a url that the user is sent
/// to when the authentication fails. The default implementation
/// redirects the user to the /page?user=<ateempted_username>
/// which enables the form to display the username that was attempted
/// and unlike FlashMessages it will persist across refreshes
fn fail_url(user: &str) -> String {
let mut output = String::with_capacity(user.len() + 10);
output.push_str("?user=");
output.push_str(user);
output
}
/// Sanitizes the username before storing in the login form structure.
/// The default implementation uses the `sanitize()` function in the
/// `sanitization` module. This can be overriden in your
/// `impl AuthorizeForm for {login structure}` implementation to
/// customize how the text is cleaned/sanitized.
fn clean_username(string: &str) -> String {
sanitize(string)
}
/// Sanitizes the password before storing in the login form structure.
/// The default implementation uses the `sanitize_oassword()` function in the
/// `sanitization` module. This can be overriden in your
/// `impl AuthorizeForm for {login structure}` implementation to
/// customize how the text is cleaned/sanitized.
fn clean_password(string: &str) -> String {
sanitize_password(string)
}
/// Sanitizes any extra variables before storing in the login form structure.
/// The default implementation uses the `sanitize()` function in the
/// `sanitization` module. This can be overriden in your
/// `impl AuthorizeForm for {login structure}` implementation to
/// customize how the text is cleaned/sanitized.
fn clean_extras(string: &str) -> String {
sanitize(string)
}
/// Redirect the user to one page on | {
cookies.remove_private(
Cookie::named( Self::cookie_id() )
);
} | identifier_body |
binder.rs | capability::{
CapabilityProvider, CapabilitySource, ComponentCapability, InternalCapability,
OptionalTask,
},
channel,
model::{
component::{BindReason, WeakComponentInstance},
error::ModelError,
hooks::{Event, EventPayload, EventType, Hook, HooksRegistration},
model::Model,
routing::report_routing_failure,
},
},
async_trait::async_trait,
cm_rust::{CapabilityName, CapabilityPath, ProtocolDecl},
fuchsia_async as fasync, fuchsia_zircon as zx,
lazy_static::lazy_static,
moniker::{AbsoluteMoniker, AbsoluteMonikerBase, ExtendedMoniker},
std::{
path::PathBuf,
sync::{Arc, Weak},
},
};
lazy_static! {
pub static ref BINDER_SERVICE: CapabilityName = "fuchsia.component.Binder".into();
pub static ref BINDER_CAPABILITY: ComponentCapability =
ComponentCapability::Protocol(ProtocolDecl {
name: BINDER_SERVICE.clone(),
source_path: Some(CapabilityPath {
basename: "fuchsia.component.Binder".into(),
dirname: "svc".into()
}),
});
}
/// Implementation of `fuchsia.component.Binder` FIDL protocol.
pub struct BinderCapabilityProvider {
source: WeakComponentInstance,
target: WeakComponentInstance,
host: Arc<BinderCapabilityHost>,
}
impl BinderCapabilityProvider {
pub fn new(
source: WeakComponentInstance,
target: WeakComponentInstance,
host: Arc<BinderCapabilityHost>,
) -> Self {
Self { source, target, host }
}
}
#[async_trait]
impl CapabilityProvider for BinderCapabilityProvider {
async fn open(
self: Box<Self>,
_flags: u32,
_open_mode: u32,
_relative_path: PathBuf,
server_end: &mut zx::Channel,
) -> Result<OptionalTask, ModelError> {
let host = self.host.clone();
let target = self.target.clone();
let source = self.source.clone();
let server_end = channel::take_channel(server_end);
Ok(fasync::Task::spawn(async move {
if let Err(err) = host.bind(source).await {
let res = target.upgrade().map_err(|e| ModelError::from(e));
match res {
Ok(target) => {
report_routing_failure(&target, &*BINDER_CAPABILITY, &err, server_end)
.await;
}
Err(err) => {
log::warn!("failed to upgrade reference to {}: {}", target.moniker, err);
}
}
}
})
.into())
}
}
// A `Hook` that serves the `fuchsia.component.Binder` FIDL protocol.
#[derive(Clone)]
pub struct BinderCapabilityHost {
model: Weak<Model>,
}
impl BinderCapabilityHost {
pub fn new(model: Weak<Model>) -> Self {
Self { model }
}
pub fn hooks(self: &Arc<Self>) -> Vec<HooksRegistration> {
vec![HooksRegistration::new(
"BinderCapabilityHost",
vec![EventType::CapabilityRouted],
Arc::downgrade(self) as Weak<dyn Hook>,
)]
}
pub async fn bind(&self, source: WeakComponentInstance) -> Result<(), ModelError> {
let source = source.upgrade().map_err(|e| ModelError::from(e))?;
source.bind(&BindReason::Binder).await?;
Ok(())
}
async fn on_scoped_framework_capability_routed_async<'a>(
self: Arc<Self>,
source: WeakComponentInstance,
target_moniker: AbsoluteMoniker,
capability: &'a InternalCapability,
capability_provider: Option<Box<dyn CapabilityProvider>>,
) -> Result<Option<Box<dyn CapabilityProvider>>, ModelError> {
// If some other capability has already been installed, then there's nothing to
// do here.
if capability_provider.is_none() && capability.matches_protocol(&BINDER_SERVICE) {
let model = self.model.upgrade().ok_or(ModelError::ModelNotAvailable)?;
let target =
WeakComponentInstance::new(&model.look_up(&target_moniker.to_partial()).await?);
Ok(Some(Box::new(BinderCapabilityProvider::new(source, target, self.clone()))
as Box<dyn CapabilityProvider>))
} else {
Ok(capability_provider)
}
}
}
#[async_trait]
impl Hook for BinderCapabilityHost {
async fn on(self: Arc<Self>, event: &Event) -> Result<(), ModelError> {
if let Ok(EventPayload::CapabilityRouted {
source: CapabilitySource::Framework { capability, component },
capability_provider,
}) = &event.result
{
let target_moniker = match &event.target_moniker {
ExtendedMoniker::ComponentManager => {
Err(ModelError::UnexpectedComponentManagerMoniker)
}
ExtendedMoniker::ComponentInstance(moniker) => Ok(moniker),
}?;
let mut capability_provider = capability_provider.lock().await;
*capability_provider = self
.on_scoped_framework_capability_routed_async(
component.clone(),
target_moniker.clone(),
&capability,
capability_provider.take(),
)
.await?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use {
super::*,
crate::{
builtin_environment::BuiltinEnvironment,
capability::CapabilityProvider,
model::{
events::{source::EventSource, stream::EventStream},
testing::test_helpers::*,
},
},
cm_rust::{self, CapabilityName, ComponentDecl, EventMode},
cm_rust_testing::*,
fidl::{client::Client, handle::AsyncChannel},
fuchsia_zircon as zx,
futures::{lock::Mutex, StreamExt},
matches::assert_matches,
moniker::AbsoluteMoniker,
std::path::PathBuf,
};
struct BinderCapabilityTestFixture {
builtin_environment: Arc<Mutex<BuiltinEnvironment>>,
}
impl BinderCapabilityTestFixture {
async fn new(components: Vec<(&'static str, ComponentDecl)>) -> Self {
let TestModelResult { builtin_environment, .. } =
TestEnvironmentBuilder::new().set_components(components).build().await;
BinderCapabilityTestFixture { builtin_environment }
}
async fn new_event_stream(
&self,
events: Vec<CapabilityName>,
mode: EventMode,
) -> (EventSource, EventStream) {
new_event_stream(self.builtin_environment.clone(), events, mode).await
}
async fn provider(
&self,
source: AbsoluteMoniker,
target: AbsoluteMoniker,
) -> Box<BinderCapabilityProvider> {
let builtin_environment = self.builtin_environment.lock().await;
let host = builtin_environment.binder_capability_host.clone();
let source = builtin_environment
.model
.look_up(&source.to_partial())
.await
.expect("failed to look up source moniker");
let target = builtin_environment
.model
.look_up(&target.to_partial())
.await
.expect("failed to look up target moniker");
Box::new(BinderCapabilityProvider::new(
WeakComponentInstance::new(&source),
WeakComponentInstance::new(&target),
host,
))
}
}
#[fuchsia::test]
async fn component_starts_on_open() {
let fixture = BinderCapabilityTestFixture::new(vec![
(
"root",
ComponentDeclBuilder::new()
.add_lazy_child("source")
.add_lazy_child("target")
.build(),
),
("source", component_decl_with_test_runner()),
("target", component_decl_with_test_runner()),
])
.await;
let (_event_source, mut event_stream) = fixture
.new_event_stream(
vec![EventType::Resolved.into(), EventType::Started.into()],
EventMode::Async,
)
.await;
let (_client_end, mut server_end) =
zx::Channel::create().expect("failed to create channels");
let moniker: AbsoluteMoniker = vec!["source:0"].into();
let () = fixture
.provider(moniker.clone(), vec!["target:0"].into())
.await
.open(0, 0, PathBuf::new(), &mut server_end)
.await
.expect("failed to call open()")
.take()
.expect("task is empty")
.await;
assert!(event_stream.wait_until(EventType::Resolved, moniker.clone()).await.is_some());
assert!(event_stream.wait_until(EventType::Started, moniker.clone()).await.is_some());
}
// TODO(yaneury): Figure out a way to test this behavior.
#[ignore]
#[fuchsia::test]
async fn channel_is_closed_if_component_does_not_exist() | .expect("task is empty")
.await;
let client | {
let fixture = BinderCapabilityTestFixture::new(vec![(
"root",
ComponentDeclBuilder::new()
.add_lazy_child("target")
.add_lazy_child("unresolvable")
.build(),
)])
.await;
let (client_end, mut server_end) =
zx::Channel::create().expect("failed to create channels");
let moniker: AbsoluteMoniker = AbsoluteMoniker::from(vec!["foo:0"]);
let () = fixture
.provider(moniker, vec![].into())
.await
.open(0, 0, PathBuf::new(), &mut server_end)
.await
.expect("failed to call open()")
.take() | identifier_body |
binder.rs | capability::{
CapabilityProvider, CapabilitySource, ComponentCapability, InternalCapability,
OptionalTask,
},
channel,
model::{
component::{BindReason, WeakComponentInstance},
error::ModelError,
hooks::{Event, EventPayload, EventType, Hook, HooksRegistration},
model::Model,
routing::report_routing_failure,
},
},
async_trait::async_trait,
cm_rust::{CapabilityName, CapabilityPath, ProtocolDecl},
fuchsia_async as fasync, fuchsia_zircon as zx,
lazy_static::lazy_static,
moniker::{AbsoluteMoniker, AbsoluteMonikerBase, ExtendedMoniker},
std::{
path::PathBuf,
sync::{Arc, Weak},
},
};
lazy_static! {
pub static ref BINDER_SERVICE: CapabilityName = "fuchsia.component.Binder".into();
pub static ref BINDER_CAPABILITY: ComponentCapability =
ComponentCapability::Protocol(ProtocolDecl {
name: BINDER_SERVICE.clone(),
source_path: Some(CapabilityPath {
basename: "fuchsia.component.Binder".into(),
dirname: "svc".into()
}),
});
}
/// Implementation of `fuchsia.component.Binder` FIDL protocol.
pub struct BinderCapabilityProvider {
source: WeakComponentInstance,
target: WeakComponentInstance,
host: Arc<BinderCapabilityHost>,
}
impl BinderCapabilityProvider {
pub fn new(
source: WeakComponentInstance,
target: WeakComponentInstance,
host: Arc<BinderCapabilityHost>,
) -> Self {
Self { source, target, host }
} |
#[async_trait]
impl CapabilityProvider for BinderCapabilityProvider {
async fn open(
self: Box<Self>,
_flags: u32,
_open_mode: u32,
_relative_path: PathBuf,
server_end: &mut zx::Channel,
) -> Result<OptionalTask, ModelError> {
let host = self.host.clone();
let target = self.target.clone();
let source = self.source.clone();
let server_end = channel::take_channel(server_end);
Ok(fasync::Task::spawn(async move {
if let Err(err) = host.bind(source).await {
let res = target.upgrade().map_err(|e| ModelError::from(e));
match res {
Ok(target) => {
report_routing_failure(&target, &*BINDER_CAPABILITY, &err, server_end)
.await;
}
Err(err) => {
log::warn!("failed to upgrade reference to {}: {}", target.moniker, err);
}
}
}
})
.into())
}
}
// A `Hook` that serves the `fuchsia.component.Binder` FIDL protocol.
#[derive(Clone)]
pub struct BinderCapabilityHost {
model: Weak<Model>,
}
impl BinderCapabilityHost {
pub fn new(model: Weak<Model>) -> Self {
Self { model }
}
pub fn hooks(self: &Arc<Self>) -> Vec<HooksRegistration> {
vec![HooksRegistration::new(
"BinderCapabilityHost",
vec![EventType::CapabilityRouted],
Arc::downgrade(self) as Weak<dyn Hook>,
)]
}
pub async fn bind(&self, source: WeakComponentInstance) -> Result<(), ModelError> {
let source = source.upgrade().map_err(|e| ModelError::from(e))?;
source.bind(&BindReason::Binder).await?;
Ok(())
}
async fn on_scoped_framework_capability_routed_async<'a>(
self: Arc<Self>,
source: WeakComponentInstance,
target_moniker: AbsoluteMoniker,
capability: &'a InternalCapability,
capability_provider: Option<Box<dyn CapabilityProvider>>,
) -> Result<Option<Box<dyn CapabilityProvider>>, ModelError> {
// If some other capability has already been installed, then there's nothing to
// do here.
if capability_provider.is_none() && capability.matches_protocol(&BINDER_SERVICE) {
let model = self.model.upgrade().ok_or(ModelError::ModelNotAvailable)?;
let target =
WeakComponentInstance::new(&model.look_up(&target_moniker.to_partial()).await?);
Ok(Some(Box::new(BinderCapabilityProvider::new(source, target, self.clone()))
as Box<dyn CapabilityProvider>))
} else {
Ok(capability_provider)
}
}
}
#[async_trait]
impl Hook for BinderCapabilityHost {
async fn on(self: Arc<Self>, event: &Event) -> Result<(), ModelError> {
if let Ok(EventPayload::CapabilityRouted {
source: CapabilitySource::Framework { capability, component },
capability_provider,
}) = &event.result
{
let target_moniker = match &event.target_moniker {
ExtendedMoniker::ComponentManager => {
Err(ModelError::UnexpectedComponentManagerMoniker)
}
ExtendedMoniker::ComponentInstance(moniker) => Ok(moniker),
}?;
let mut capability_provider = capability_provider.lock().await;
*capability_provider = self
.on_scoped_framework_capability_routed_async(
component.clone(),
target_moniker.clone(),
&capability,
capability_provider.take(),
)
.await?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use {
super::*,
crate::{
builtin_environment::BuiltinEnvironment,
capability::CapabilityProvider,
model::{
events::{source::EventSource, stream::EventStream},
testing::test_helpers::*,
},
},
cm_rust::{self, CapabilityName, ComponentDecl, EventMode},
cm_rust_testing::*,
fidl::{client::Client, handle::AsyncChannel},
fuchsia_zircon as zx,
futures::{lock::Mutex, StreamExt},
matches::assert_matches,
moniker::AbsoluteMoniker,
std::path::PathBuf,
};
struct BinderCapabilityTestFixture {
builtin_environment: Arc<Mutex<BuiltinEnvironment>>,
}
impl BinderCapabilityTestFixture {
async fn new(components: Vec<(&'static str, ComponentDecl)>) -> Self {
let TestModelResult { builtin_environment, .. } =
TestEnvironmentBuilder::new().set_components(components).build().await;
BinderCapabilityTestFixture { builtin_environment }
}
async fn new_event_stream(
&self,
events: Vec<CapabilityName>,
mode: EventMode,
) -> (EventSource, EventStream) {
new_event_stream(self.builtin_environment.clone(), events, mode).await
}
async fn provider(
&self,
source: AbsoluteMoniker,
target: AbsoluteMoniker,
) -> Box<BinderCapabilityProvider> {
let builtin_environment = self.builtin_environment.lock().await;
let host = builtin_environment.binder_capability_host.clone();
let source = builtin_environment
.model
.look_up(&source.to_partial())
.await
.expect("failed to look up source moniker");
let target = builtin_environment
.model
.look_up(&target.to_partial())
.await
.expect("failed to look up target moniker");
Box::new(BinderCapabilityProvider::new(
WeakComponentInstance::new(&source),
WeakComponentInstance::new(&target),
host,
))
}
}
#[fuchsia::test]
async fn component_starts_on_open() {
let fixture = BinderCapabilityTestFixture::new(vec![
(
"root",
ComponentDeclBuilder::new()
.add_lazy_child("source")
.add_lazy_child("target")
.build(),
),
("source", component_decl_with_test_runner()),
("target", component_decl_with_test_runner()),
])
.await;
let (_event_source, mut event_stream) = fixture
.new_event_stream(
vec![EventType::Resolved.into(), EventType::Started.into()],
EventMode::Async,
)
.await;
let (_client_end, mut server_end) =
zx::Channel::create().expect("failed to create channels");
let moniker: AbsoluteMoniker = vec!["source:0"].into();
let () = fixture
.provider(moniker.clone(), vec!["target:0"].into())
.await
.open(0, 0, PathBuf::new(), &mut server_end)
.await
.expect("failed to call open()")
.take()
.expect("task is empty")
.await;
assert!(event_stream.wait_until(EventType::Resolved, moniker.clone()).await.is_some());
assert!(event_stream.wait_until(EventType::Started, moniker.clone()).await.is_some());
}
// TODO(yaneury): Figure out a way to test this behavior.
#[ignore]
#[fuchsia::test]
async fn channel_is_closed_if_component_does_not_exist() {
let fixture = BinderCapabilityTestFixture::new(vec![(
"root",
ComponentDeclBuilder::new()
.add_lazy_child("target")
.add_lazy_child("unresolvable")
.build(),
)])
.await;
let (client_end, mut server_end) =
zx::Channel::create().expect("failed to create channels");
let moniker: AbsoluteMoniker = AbsoluteMoniker::from(vec!["foo:0"]);
let () = fixture
.provider(moniker, vec![].into())
.await
.open(0, 0, PathBuf::new(), &mut server_end)
.await
.expect("failed to call open()")
.take()
.expect("task is empty")
.await;
let client | } | random_line_split |
binder.rs | capability::{
CapabilityProvider, CapabilitySource, ComponentCapability, InternalCapability,
OptionalTask,
},
channel,
model::{
component::{BindReason, WeakComponentInstance},
error::ModelError,
hooks::{Event, EventPayload, EventType, Hook, HooksRegistration},
model::Model,
routing::report_routing_failure,
},
},
async_trait::async_trait,
cm_rust::{CapabilityName, CapabilityPath, ProtocolDecl},
fuchsia_async as fasync, fuchsia_zircon as zx,
lazy_static::lazy_static,
moniker::{AbsoluteMoniker, AbsoluteMonikerBase, ExtendedMoniker},
std::{
path::PathBuf,
sync::{Arc, Weak},
},
};
lazy_static! {
pub static ref BINDER_SERVICE: CapabilityName = "fuchsia.component.Binder".into();
pub static ref BINDER_CAPABILITY: ComponentCapability =
ComponentCapability::Protocol(ProtocolDecl {
name: BINDER_SERVICE.clone(),
source_path: Some(CapabilityPath {
basename: "fuchsia.component.Binder".into(),
dirname: "svc".into()
}),
});
}
/// Implementation of `fuchsia.component.Binder` FIDL protocol.
pub struct BinderCapabilityProvider {
source: WeakComponentInstance,
target: WeakComponentInstance,
host: Arc<BinderCapabilityHost>,
}
impl BinderCapabilityProvider {
pub fn new(
source: WeakComponentInstance,
target: WeakComponentInstance,
host: Arc<BinderCapabilityHost>,
) -> Self {
Self { source, target, host }
}
}
#[async_trait]
impl CapabilityProvider for BinderCapabilityProvider {
async fn open(
self: Box<Self>,
_flags: u32,
_open_mode: u32,
_relative_path: PathBuf,
server_end: &mut zx::Channel,
) -> Result<OptionalTask, ModelError> {
let host = self.host.clone();
let target = self.target.clone();
let source = self.source.clone();
let server_end = channel::take_channel(server_end);
Ok(fasync::Task::spawn(async move {
if let Err(err) = host.bind(source).await {
let res = target.upgrade().map_err(|e| ModelError::from(e));
match res {
Ok(target) => {
report_routing_failure(&target, &*BINDER_CAPABILITY, &err, server_end)
.await;
}
Err(err) => {
log::warn!("failed to upgrade reference to {}: {}", target.moniker, err);
}
}
}
})
.into())
}
}
// A `Hook` that serves the `fuchsia.component.Binder` FIDL protocol.
#[derive(Clone)]
pub struct BinderCapabilityHost {
model: Weak<Model>,
}
impl BinderCapabilityHost {
pub fn new(model: Weak<Model>) -> Self {
Self { model }
}
pub fn hooks(self: &Arc<Self>) -> Vec<HooksRegistration> {
vec![HooksRegistration::new(
"BinderCapabilityHost",
vec![EventType::CapabilityRouted],
Arc::downgrade(self) as Weak<dyn Hook>,
)]
}
pub async fn bind(&self, source: WeakComponentInstance) -> Result<(), ModelError> {
let source = source.upgrade().map_err(|e| ModelError::from(e))?;
source.bind(&BindReason::Binder).await?;
Ok(())
}
async fn on_scoped_framework_capability_routed_async<'a>(
self: Arc<Self>,
source: WeakComponentInstance,
target_moniker: AbsoluteMoniker,
capability: &'a InternalCapability,
capability_provider: Option<Box<dyn CapabilityProvider>>,
) -> Result<Option<Box<dyn CapabilityProvider>>, ModelError> {
// If some other capability has already been installed, then there's nothing to
// do here.
if capability_provider.is_none() && capability.matches_protocol(&BINDER_SERVICE) | else {
Ok(capability_provider)
}
}
}
#[async_trait]
impl Hook for BinderCapabilityHost {
async fn on(self: Arc<Self>, event: &Event) -> Result<(), ModelError> {
if let Ok(EventPayload::CapabilityRouted {
source: CapabilitySource::Framework { capability, component },
capability_provider,
}) = &event.result
{
let target_moniker = match &event.target_moniker {
ExtendedMoniker::ComponentManager => {
Err(ModelError::UnexpectedComponentManagerMoniker)
}
ExtendedMoniker::ComponentInstance(moniker) => Ok(moniker),
}?;
let mut capability_provider = capability_provider.lock().await;
*capability_provider = self
.on_scoped_framework_capability_routed_async(
component.clone(),
target_moniker.clone(),
&capability,
capability_provider.take(),
)
.await?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use {
super::*,
crate::{
builtin_environment::BuiltinEnvironment,
capability::CapabilityProvider,
model::{
events::{source::EventSource, stream::EventStream},
testing::test_helpers::*,
},
},
cm_rust::{self, CapabilityName, ComponentDecl, EventMode},
cm_rust_testing::*,
fidl::{client::Client, handle::AsyncChannel},
fuchsia_zircon as zx,
futures::{lock::Mutex, StreamExt},
matches::assert_matches,
moniker::AbsoluteMoniker,
std::path::PathBuf,
};
struct BinderCapabilityTestFixture {
builtin_environment: Arc<Mutex<BuiltinEnvironment>>,
}
impl BinderCapabilityTestFixture {
async fn new(components: Vec<(&'static str, ComponentDecl)>) -> Self {
let TestModelResult { builtin_environment, .. } =
TestEnvironmentBuilder::new().set_components(components).build().await;
BinderCapabilityTestFixture { builtin_environment }
}
async fn new_event_stream(
&self,
events: Vec<CapabilityName>,
mode: EventMode,
) -> (EventSource, EventStream) {
new_event_stream(self.builtin_environment.clone(), events, mode).await
}
async fn provider(
&self,
source: AbsoluteMoniker,
target: AbsoluteMoniker,
) -> Box<BinderCapabilityProvider> {
let builtin_environment = self.builtin_environment.lock().await;
let host = builtin_environment.binder_capability_host.clone();
let source = builtin_environment
.model
.look_up(&source.to_partial())
.await
.expect("failed to look up source moniker");
let target = builtin_environment
.model
.look_up(&target.to_partial())
.await
.expect("failed to look up target moniker");
Box::new(BinderCapabilityProvider::new(
WeakComponentInstance::new(&source),
WeakComponentInstance::new(&target),
host,
))
}
}
#[fuchsia::test]
async fn component_starts_on_open() {
let fixture = BinderCapabilityTestFixture::new(vec![
(
"root",
ComponentDeclBuilder::new()
.add_lazy_child("source")
.add_lazy_child("target")
.build(),
),
("source", component_decl_with_test_runner()),
("target", component_decl_with_test_runner()),
])
.await;
let (_event_source, mut event_stream) = fixture
.new_event_stream(
vec![EventType::Resolved.into(), EventType::Started.into()],
EventMode::Async,
)
.await;
let (_client_end, mut server_end) =
zx::Channel::create().expect("failed to create channels");
let moniker: AbsoluteMoniker = vec!["source:0"].into();
let () = fixture
.provider(moniker.clone(), vec!["target:0"].into())
.await
.open(0, 0, PathBuf::new(), &mut server_end)
.await
.expect("failed to call open()")
.take()
.expect("task is empty")
.await;
assert!(event_stream.wait_until(EventType::Resolved, moniker.clone()).await.is_some());
assert!(event_stream.wait_until(EventType::Started, moniker.clone()).await.is_some());
}
// TODO(yaneury): Figure out a way to test this behavior.
#[ignore]
#[fuchsia::test]
async fn channel_is_closed_if_component_does_not_exist() {
let fixture = BinderCapabilityTestFixture::new(vec![(
"root",
ComponentDeclBuilder::new()
.add_lazy_child("target")
.add_lazy_child("unresolvable")
.build(),
)])
.await;
let (client_end, mut server_end) =
zx::Channel::create().expect("failed to create channels");
let moniker: AbsoluteMoniker = AbsoluteMoniker::from(vec!["foo:0"]);
let () = fixture
.provider(moniker, vec![].into())
.await
.open(0, 0, PathBuf::new(), &mut server_end)
.await
.expect("failed to call open()")
.take()
.expect("task is empty")
.await;
let client | {
let model = self.model.upgrade().ok_or(ModelError::ModelNotAvailable)?;
let target =
WeakComponentInstance::new(&model.look_up(&target_moniker.to_partial()).await?);
Ok(Some(Box::new(BinderCapabilityProvider::new(source, target, self.clone()))
as Box<dyn CapabilityProvider>))
} | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.