file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
regions-free-region-ordering-callee.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests that callees correctly infer an ordering between free regions
// that appear in their parameter list. See also
// regions-free-region-ordering-caller.rs
fn | <'a, 'b>(x: &'a &'b uint) -> &'a uint {
// It is safe to assume that 'a <= 'b due to the type of x
let y: &'b uint = &**x;
return y;
}
fn ordering2<'a, 'b>(x: &'a &'b uint, y: &'a uint) -> &'b uint {
// However, it is not safe to assume that 'b <= 'a
&*y //~ ERROR cannot infer
}
fn ordering3<'a, 'b>(x: &'a uint, y: &'b uint) -> &'a &'b uint {
// Do not infer an ordering from the return value.
let z: &'b uint = &*x;
//~^ ERROR cannot infer
fail!();
}
fn ordering4<'a, 'b>(a: &'a uint, b: &'b uint, x: |&'a &'b uint|) {
// Do not infer ordering from closure argument types.
let z: Option<&'a &'b uint> = None;
//~^ ERROR reference has a longer lifetime than the data it references
}
fn ordering5<'a, 'b>(a: &'a uint, b: &'b uint, x: Option<&'a &'b uint>) {
let z: Option<&'a &'b uint> = None;
}
fn main() {}
| ordering1 | identifier_name |
regions-free-region-ordering-callee.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests that callees correctly infer an ordering between free regions
// that appear in their parameter list. See also
// regions-free-region-ordering-caller.rs
fn ordering1<'a, 'b>(x: &'a &'b uint) -> &'a uint {
// It is safe to assume that 'a <= 'b due to the type of x
let y: &'b uint = &**x;
return y;
}
fn ordering2<'a, 'b>(x: &'a &'b uint, y: &'a uint) -> &'b uint {
// However, it is not safe to assume that 'b <= 'a
&*y //~ ERROR cannot infer
}
fn ordering3<'a, 'b>(x: &'a uint, y: &'b uint) -> &'a &'b uint {
// Do not infer an ordering from the return value.
let z: &'b uint = &*x;
//~^ ERROR cannot infer
fail!();
}
fn ordering4<'a, 'b>(a: &'a uint, b: &'b uint, x: |&'a &'b uint|) |
fn ordering5<'a, 'b>(a: &'a uint, b: &'b uint, x: Option<&'a &'b uint>) {
let z: Option<&'a &'b uint> = None;
}
fn main() {}
| {
// Do not infer ordering from closure argument types.
let z: Option<&'a &'b uint> = None;
//~^ ERROR reference has a longer lifetime than the data it references
} | identifier_body |
error.rs | use crate::asm::Token;
use std::fmt;
#[derive(Debug, Clone)]
pub enum Error {
ParseError { error: String },
NoSectionDecl,
MissingSection,
StringConstantWithoutLabel { instr: String },
SymbolAlreadyDeclared { name: String },
InvalidDirectiveName { instr: String },
UnknownDirective { name: String },
UnknownSection { name: String },
UnknownLabel { name: String },
UnexpectedToken { token: Token },
NotAnOpcode,
EmptyString,
UnlabeledString,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::ParseError { ref error } => f.write_str(&format!("Parse error: {}", error)),
Error::NoSectionDecl => f.write_str("No section declared"),
Error::MissingSection => f.write_str("Missing section"),
Error::StringConstantWithoutLabel { ref instr } => f.write_str(&format!(
"String constant declared without label: {}",
instr | f.write_str(&format!("Invalid directive name: {}", instr))
}
Error::UnknownDirective { ref name } => {
f.write_str(&format!("Unknown directive: {}", name))
}
Error::UnknownSection { ref name } => {
f.write_str(&format!("Unknown section: {}", name))
}
Error::UnknownLabel { ref name } => f.write_str(&format!("Unknown label: {}", name)),
Error::UnexpectedToken { ref token } => {
f.write_str(&format!("Unexpected token {:?} in the bagging area", token))
}
Error::NotAnOpcode => f.write_str("Non-opcode found in opcode field"),
Error::EmptyString => f.write_str("Empty string provided"),
Error::UnlabeledString => f.write_str("Unlabeled string cannot be referenced"),
}
}
}
impl std::error::Error for Error {
fn description(&self) -> &str {
match self {
Error::ParseError { .. } => "There was an error parsing the code",
Error::NoSectionDecl => "No section declared",
Error::MissingSection => "Missing section",
Error::StringConstantWithoutLabel { .. } => "String constant declared without label",
Error::SymbolAlreadyDeclared { .. } => "Symbol declared multiple times",
Error::InvalidDirectiveName { .. } => "Invalid directive name",
Error::UnknownDirective { .. } => "Unknown directive",
Error::UnknownSection { .. } => "Unknown section",
Error::UnknownLabel { .. } => "Unknown label",
Error::UnexpectedToken { .. } => "Unexpected token",
Error::NotAnOpcode { .. } => "Not an opcode",
Error::EmptyString { .. } => "Empty string",
Error::UnlabeledString { .. } => "Unlabeled string",
}
}
} | )),
Error::SymbolAlreadyDeclared { ref name } => {
f.write_str(&format!("Symbol {:?} declared multiple times", name))
}
Error::InvalidDirectiveName { ref instr } => { | random_line_split |
error.rs | use crate::asm::Token;
use std::fmt;
#[derive(Debug, Clone)]
pub enum Error {
ParseError { error: String },
NoSectionDecl,
MissingSection,
StringConstantWithoutLabel { instr: String },
SymbolAlreadyDeclared { name: String },
InvalidDirectiveName { instr: String },
UnknownDirective { name: String },
UnknownSection { name: String },
UnknownLabel { name: String },
UnexpectedToken { token: Token },
NotAnOpcode,
EmptyString,
UnlabeledString,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::ParseError { ref error } => f.write_str(&format!("Parse error: {}", error)),
Error::NoSectionDecl => f.write_str("No section declared"),
Error::MissingSection => f.write_str("Missing section"),
Error::StringConstantWithoutLabel { ref instr } => f.write_str(&format!(
"String constant declared without label: {}",
instr
)),
Error::SymbolAlreadyDeclared { ref name } => |
Error::InvalidDirectiveName { ref instr } => {
f.write_str(&format!("Invalid directive name: {}", instr))
}
Error::UnknownDirective { ref name } => {
f.write_str(&format!("Unknown directive: {}", name))
}
Error::UnknownSection { ref name } => {
f.write_str(&format!("Unknown section: {}", name))
}
Error::UnknownLabel { ref name } => f.write_str(&format!("Unknown label: {}", name)),
Error::UnexpectedToken { ref token } => {
f.write_str(&format!("Unexpected token {:?} in the bagging area", token))
}
Error::NotAnOpcode => f.write_str("Non-opcode found in opcode field"),
Error::EmptyString => f.write_str("Empty string provided"),
Error::UnlabeledString => f.write_str("Unlabeled string cannot be referenced"),
}
}
}
impl std::error::Error for Error {
fn description(&self) -> &str {
match self {
Error::ParseError { .. } => "There was an error parsing the code",
Error::NoSectionDecl => "No section declared",
Error::MissingSection => "Missing section",
Error::StringConstantWithoutLabel { .. } => "String constant declared without label",
Error::SymbolAlreadyDeclared { .. } => "Symbol declared multiple times",
Error::InvalidDirectiveName { .. } => "Invalid directive name",
Error::UnknownDirective { .. } => "Unknown directive",
Error::UnknownSection { .. } => "Unknown section",
Error::UnknownLabel { .. } => "Unknown label",
Error::UnexpectedToken { .. } => "Unexpected token",
Error::NotAnOpcode { .. } => "Not an opcode",
Error::EmptyString { .. } => "Empty string",
Error::UnlabeledString { .. } => "Unlabeled string",
}
}
}
| {
f.write_str(&format!("Symbol {:?} declared multiple times", name))
} | conditional_block |
error.rs | use crate::asm::Token;
use std::fmt;
#[derive(Debug, Clone)]
pub enum | {
ParseError { error: String },
NoSectionDecl,
MissingSection,
StringConstantWithoutLabel { instr: String },
SymbolAlreadyDeclared { name: String },
InvalidDirectiveName { instr: String },
UnknownDirective { name: String },
UnknownSection { name: String },
UnknownLabel { name: String },
UnexpectedToken { token: Token },
NotAnOpcode,
EmptyString,
UnlabeledString,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::ParseError { ref error } => f.write_str(&format!("Parse error: {}", error)),
Error::NoSectionDecl => f.write_str("No section declared"),
Error::MissingSection => f.write_str("Missing section"),
Error::StringConstantWithoutLabel { ref instr } => f.write_str(&format!(
"String constant declared without label: {}",
instr
)),
Error::SymbolAlreadyDeclared { ref name } => {
f.write_str(&format!("Symbol {:?} declared multiple times", name))
}
Error::InvalidDirectiveName { ref instr } => {
f.write_str(&format!("Invalid directive name: {}", instr))
}
Error::UnknownDirective { ref name } => {
f.write_str(&format!("Unknown directive: {}", name))
}
Error::UnknownSection { ref name } => {
f.write_str(&format!("Unknown section: {}", name))
}
Error::UnknownLabel { ref name } => f.write_str(&format!("Unknown label: {}", name)),
Error::UnexpectedToken { ref token } => {
f.write_str(&format!("Unexpected token {:?} in the bagging area", token))
}
Error::NotAnOpcode => f.write_str("Non-opcode found in opcode field"),
Error::EmptyString => f.write_str("Empty string provided"),
Error::UnlabeledString => f.write_str("Unlabeled string cannot be referenced"),
}
}
}
impl std::error::Error for Error {
fn description(&self) -> &str {
match self {
Error::ParseError { .. } => "There was an error parsing the code",
Error::NoSectionDecl => "No section declared",
Error::MissingSection => "Missing section",
Error::StringConstantWithoutLabel { .. } => "String constant declared without label",
Error::SymbolAlreadyDeclared { .. } => "Symbol declared multiple times",
Error::InvalidDirectiveName { .. } => "Invalid directive name",
Error::UnknownDirective { .. } => "Unknown directive",
Error::UnknownSection { .. } => "Unknown section",
Error::UnknownLabel { .. } => "Unknown label",
Error::UnexpectedToken { .. } => "Unexpected token",
Error::NotAnOpcode { .. } => "Not an opcode",
Error::EmptyString { .. } => "Empty string",
Error::UnlabeledString { .. } => "Unlabeled string",
}
}
}
| Error | identifier_name |
project-header.tsx | import * as React from 'react';
import clsx from 'clsx';
import Image from 'next/image';
import { ProjectMetadata } from '~/types/projects';
import { Container } from '~/components/layout';
import { PageMetaItem } from '~/components/page';
export interface ProjectHeaderProps extends React.ComponentPropsWithoutRef<'header'> {
project: ProjectMetadata;
}
export const ProjectHeader = React.forwardRef<HTMLDivElement, ProjectHeaderProps>(
({ className, style, project, ...rest }, ref) => {
const { header_image, title, description, tags, project_url } = project;
return (
<header ref={ref} className={clsx('px-4 lg:px-6 pt-12', className)} style={style} {...rest}>
<Container className="space-y-12">
{header_image && (
<div className="relative w-full h-full aspect-video overflow-hidden rounded-md shadow-lg">
<Image
loading="lazy"
src={header_image} | />
</div>
)}
<div className="space-y-4">
{tags ? (
<div className="space-x-4">
{tags.map(tag => (
<PageMetaItem key={tag}>{tag}</PageMetaItem>
))}
</div>
) : null}
<div className="relative space-y-4">
{project_url ? (
<h1 className="p-name text-3xl sm:text-4xl lg:text-5xl font-semibold">
<a
className="group helper-link-cover"
href={project_url}
target="_blank"
rel="noopener noreferrer"
>
<span className="group-hover:underline">{title}</span> →
</a>
</h1>
) : (
<h1 className="p-name text-3xl sm:text-4xl lg:text-5xl font-semibold">{title}</h1>
)}
<p className="p-summary text-lg sm:text-xl lg:text-2xl font-light">{description}</p>
</div>
</div>
</Container>
</header>
);
}
);
ProjectHeader.displayName = 'ProjectHeader';
export default ProjectHeader; | alt={title}
layout="fill"
objectFit="cover" | random_line_split |
UTM2Geo.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 17/2/2015
@author: Antonio Hermosilla Rodrigo.
@contact: [email protected]
@organization: Antonio Hermosilla Rodrigo.
@copyright: (C) 2015 by Antonio Hermosilla Rodrigo
@version: 1.0.0
'''
import sys
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4 import uic
from os import sep,pardir,getcwd
from os.path import normpath
import Geometrias.PuntoUTM
import Proyecciones.UTM2Geo
import Geodesia.EGM.CalcularOndulacion
class UTM2Geo(QtGui.QWidget):
'''
classdocs
'''
__rutaroot=None
__msgBoxErr=None
__pLat=None
__pLon=None
__pw=None
__pN=None
def __init__(self, parent=None):
'''
Constructor
'''
super(UTM2Geo, self).__init__()
#Se carga el formulario para el controlador.
self.__rutaroot=normpath(getcwd() + sep + pardir)
uic.loadUi(self.__rutaroot+'/Formularios/UTM2Geo.ui', self)
self.__msgBoxErr=QtGui.QMessageBox()
self.__msgBoxErr.setWindowTitle("ERROR")
self.__CargarElipsoides()
self.__tabChanged()
self.__setPrecision()
self.connect(self.pushButton, QtCore.SIGNAL("clicked()"), self.Calcular)
self.connect(self.pushButton_4, QtCore.SIGNAL("clicked()"), self.launch)
self.connect(self.tabWidget, QtCore.SIGNAL("currentChanged (int)"), self.__tabChanged)
self.connect(self.pushButton_2, QtCore.SIGNAL("clicked()"), self.AbrirFicheroUTM)
self.connect(self.pushButton_3, QtCore.SIGNAL("clicked()"), self.AbrirFicheroGeo)
self.connect(self.spinBox_2, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_3, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_4, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_5, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
def __CargarElipsoides(self):
'''!
'''
import BasesDeDatos.SQLite.SQLiteManager
try:
db=BasesDeDatos.SQLite.SQLiteManager.SQLiteManager(self.__rutaroot+'/Geodesia/Elipsoides/Elipsoides.db')
Nombres=db.ObtenerColumna('Elipsoides','Nombre')
Nombres=[i[0] for i in Nombres]
Nombres.sort()
self.comboBox.addItems(Nombres)
self.comboBox.setCurrentIndex(28)
self.comboBox_2.addItems(Nombres)
self.comboBox_2.setCurrentIndex(28)
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
def Calcular(self):
'''!
'''
putm=None
if self.lineEdit.text()=="":
self.__msgBoxErr.setText("Debe introducir un valor para la X UTM.")
self.__msgBoxErr.exec_()
return
if self.lineEdit_2.text()=="":
self.__msgBoxErr.setText("Debe introducir un valor para la Y UTM.")
self.__msgBoxErr.exec_()
return
try:
putm=Geometrias.PuntoUTM.PuntoUTM(self.lineEdit.text(),self.lineEdit_2.text(),huso=self.spinBox.value())
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
Sal=None
try:
Sal=Proyecciones.UTM2Geo.UTM2Geo(putm, self.comboBox.currentText())
self.lineEdit_3.setText(str(round(Sal.getLatitud(),self.__pLat)))
self.lineEdit_4.setText(str(round(Sal.getLongitud(),self.__pLon)))
self.lineEdit_5.setText(str(round(putm.getConvergenciaMeridianos(),self.__pw)))
self.lineEdit_6.setText(str(putm.getEscalaLocalPunto()))
try:
self.lineEdit_7.setText(str(round(Geodesia.EGM.CalcularOndulacion.CalcularOndulacion(Sal),self.__pN)))
except:
self.lineEdit_7.setText("")
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
def AbrirFicheroUTM(self):
'''!
'''
ruta = QtGui.QFileDialog.getOpenFileName(self, 'Abrir Archivo', '.')
self.lineEdit_9.setText(ruta)
def AbrirFicheroGeo(self):
'''!
'''
ruta = QtGui.QFileDialog.getSaveFileName(self, 'Guadar Archivo', '.')
self.lineEdit_10.setText(ruta)
def launch(self):
'''!
'''
QtCore.QThread(self.CalcularArchivo()).exec_()
def CalcularArchivo(self):
'''!
'''
pd=QtGui.QProgressDialog()
if self.lineEdit_9.text()=="":
self.__msgBoxErr.setText("Debe introducir un fichero de coordenadas UTM.")
self.__msgBoxErr.exec_()
return
if self.lineEdit_10.text()=="":
self.__msgBoxErr.setText("Debe introducir un fichero de salida para las coordenadas Geodesicas")
self.__msgBoxErr.exec_()
return
#Formato del fichero de coordenadas Geodesicas.
#ID,X,Y,posY,Huso,helip(opcional)
pd.show()
pd.setLabelText("Tarea 1..2 Procesando el fichero.")
try:
QtGui.QApplication.processEvents()
sal=Proyecciones.UTM2Geo.UTM2GeoFromFile(self.lineEdit_9.text(), self.comboBox_2.currentText())
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
pg=QtGui.QProgressBar(pd)
pd.setBar(pg)
pg.setMinimum(0)
pg.setMaximum(len(sal))
g=open(self.lineEdit_10.text(),'w')
pd.setLabelText("Tarea 2..2 Escribiendo nuevo fichero.")
cont=0
pg.show()
for i in sal:
QtGui.QApplication.processEvents()
line=""
line+=i[0]+","
line+=str(round(i[2].getLatitud(),self.__pLat))+","
line+=str(round(i[2].getLongitud(),self.__pLon))+","
h=i[2].getAlturaElipsoidal()
if h==None: | line+=str(round(i[1].getConvergenciaMeridianos(),self.__pw))+","
line+=str(round(i[1].getEscalaLocalPunto(),self.__pw))+","
line+=str(i[1].getZonaUTM())+"\n"
g.write(line)
pg.setValue(cont)
cont+=1
g.close()
pg.hide()
def __setPrecision(self):
'''!
'''
self.__pLat=self.spinBox_2.value()
self.__pLon=self.spinBox_3.value()
self.__pw=self.spinBox_4.value()
self.__pN=self.spinBox_5.value()
def __tabChanged(self):
'''!
'''
if self.tabWidget.currentIndex()==0:
self.setFixedSize ( 319, 490)
elif self.tabWidget.currentIndex()==1:
self.setFixedSize ( 562, 272)
pass
elif self.tabWidget.currentIndex()==2:
self.setFixedSize ( 354, 202)
pass
if __name__ == "__main__":
#arranque del programa.
app = QtGui.QApplication(sys.argv)#requerido en todas las aplicaciones con cuadros de diálogo.
dlg=UTM2Geo()#creo un objeto de nuestro controlador del cuadro.
dlg.show()
## dlg.exec_()
sys.exit(app.exec_())#Requerido. Al cerrar el cuadro termina la aplicación
app.close() | line+","
else:
line+=str(h)+","
line+=str(i[1].getHuso())+"," | random_line_split |
UTM2Geo.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 17/2/2015
@author: Antonio Hermosilla Rodrigo.
@contact: [email protected]
@organization: Antonio Hermosilla Rodrigo.
@copyright: (C) 2015 by Antonio Hermosilla Rodrigo
@version: 1.0.0
'''
import sys
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4 import uic
from os import sep,pardir,getcwd
from os.path import normpath
import Geometrias.PuntoUTM
import Proyecciones.UTM2Geo
import Geodesia.EGM.CalcularOndulacion
class UTM2Geo(QtGui.QWidget):
'''
classdocs
'''
__rutaroot=None
__msgBoxErr=None
__pLat=None
__pLon=None
__pw=None
__pN=None
def | (self, parent=None):
'''
Constructor
'''
super(UTM2Geo, self).__init__()
#Se carga el formulario para el controlador.
self.__rutaroot=normpath(getcwd() + sep + pardir)
uic.loadUi(self.__rutaroot+'/Formularios/UTM2Geo.ui', self)
self.__msgBoxErr=QtGui.QMessageBox()
self.__msgBoxErr.setWindowTitle("ERROR")
self.__CargarElipsoides()
self.__tabChanged()
self.__setPrecision()
self.connect(self.pushButton, QtCore.SIGNAL("clicked()"), self.Calcular)
self.connect(self.pushButton_4, QtCore.SIGNAL("clicked()"), self.launch)
self.connect(self.tabWidget, QtCore.SIGNAL("currentChanged (int)"), self.__tabChanged)
self.connect(self.pushButton_2, QtCore.SIGNAL("clicked()"), self.AbrirFicheroUTM)
self.connect(self.pushButton_3, QtCore.SIGNAL("clicked()"), self.AbrirFicheroGeo)
self.connect(self.spinBox_2, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_3, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_4, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_5, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
def __CargarElipsoides(self):
'''!
'''
import BasesDeDatos.SQLite.SQLiteManager
try:
db=BasesDeDatos.SQLite.SQLiteManager.SQLiteManager(self.__rutaroot+'/Geodesia/Elipsoides/Elipsoides.db')
Nombres=db.ObtenerColumna('Elipsoides','Nombre')
Nombres=[i[0] for i in Nombres]
Nombres.sort()
self.comboBox.addItems(Nombres)
self.comboBox.setCurrentIndex(28)
self.comboBox_2.addItems(Nombres)
self.comboBox_2.setCurrentIndex(28)
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
def Calcular(self):
'''!
'''
putm=None
if self.lineEdit.text()=="":
self.__msgBoxErr.setText("Debe introducir un valor para la X UTM.")
self.__msgBoxErr.exec_()
return
if self.lineEdit_2.text()=="":
self.__msgBoxErr.setText("Debe introducir un valor para la Y UTM.")
self.__msgBoxErr.exec_()
return
try:
putm=Geometrias.PuntoUTM.PuntoUTM(self.lineEdit.text(),self.lineEdit_2.text(),huso=self.spinBox.value())
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
Sal=None
try:
Sal=Proyecciones.UTM2Geo.UTM2Geo(putm, self.comboBox.currentText())
self.lineEdit_3.setText(str(round(Sal.getLatitud(),self.__pLat)))
self.lineEdit_4.setText(str(round(Sal.getLongitud(),self.__pLon)))
self.lineEdit_5.setText(str(round(putm.getConvergenciaMeridianos(),self.__pw)))
self.lineEdit_6.setText(str(putm.getEscalaLocalPunto()))
try:
self.lineEdit_7.setText(str(round(Geodesia.EGM.CalcularOndulacion.CalcularOndulacion(Sal),self.__pN)))
except:
self.lineEdit_7.setText("")
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
def AbrirFicheroUTM(self):
'''!
'''
ruta = QtGui.QFileDialog.getOpenFileName(self, 'Abrir Archivo', '.')
self.lineEdit_9.setText(ruta)
def AbrirFicheroGeo(self):
'''!
'''
ruta = QtGui.QFileDialog.getSaveFileName(self, 'Guadar Archivo', '.')
self.lineEdit_10.setText(ruta)
def launch(self):
'''!
'''
QtCore.QThread(self.CalcularArchivo()).exec_()
def CalcularArchivo(self):
'''!
'''
pd=QtGui.QProgressDialog()
if self.lineEdit_9.text()=="":
self.__msgBoxErr.setText("Debe introducir un fichero de coordenadas UTM.")
self.__msgBoxErr.exec_()
return
if self.lineEdit_10.text()=="":
self.__msgBoxErr.setText("Debe introducir un fichero de salida para las coordenadas Geodesicas")
self.__msgBoxErr.exec_()
return
#Formato del fichero de coordenadas Geodesicas.
#ID,X,Y,posY,Huso,helip(opcional)
pd.show()
pd.setLabelText("Tarea 1..2 Procesando el fichero.")
try:
QtGui.QApplication.processEvents()
sal=Proyecciones.UTM2Geo.UTM2GeoFromFile(self.lineEdit_9.text(), self.comboBox_2.currentText())
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
pg=QtGui.QProgressBar(pd)
pd.setBar(pg)
pg.setMinimum(0)
pg.setMaximum(len(sal))
g=open(self.lineEdit_10.text(),'w')
pd.setLabelText("Tarea 2..2 Escribiendo nuevo fichero.")
cont=0
pg.show()
for i in sal:
QtGui.QApplication.processEvents()
line=""
line+=i[0]+","
line+=str(round(i[2].getLatitud(),self.__pLat))+","
line+=str(round(i[2].getLongitud(),self.__pLon))+","
h=i[2].getAlturaElipsoidal()
if h==None:
line+","
else:
line+=str(h)+","
line+=str(i[1].getHuso())+","
line+=str(round(i[1].getConvergenciaMeridianos(),self.__pw))+","
line+=str(round(i[1].getEscalaLocalPunto(),self.__pw))+","
line+=str(i[1].getZonaUTM())+"\n"
g.write(line)
pg.setValue(cont)
cont+=1
g.close()
pg.hide()
def __setPrecision(self):
'''!
'''
self.__pLat=self.spinBox_2.value()
self.__pLon=self.spinBox_3.value()
self.__pw=self.spinBox_4.value()
self.__pN=self.spinBox_5.value()
def __tabChanged(self):
'''!
'''
if self.tabWidget.currentIndex()==0:
self.setFixedSize ( 319, 490)
elif self.tabWidget.currentIndex()==1:
self.setFixedSize ( 562, 272)
pass
elif self.tabWidget.currentIndex()==2:
self.setFixedSize ( 354, 202)
pass
if __name__ == "__main__":
#arranque del programa.
app = QtGui.QApplication(sys.argv)#requerido en todas las aplicaciones con cuadros de diálogo.
dlg=UTM2Geo()#creo un objeto de nuestro controlador del cuadro.
dlg.show()
## dlg.exec_()
sys.exit(app.exec_())#Requerido. Al cerrar el cuadro termina la aplicación
app.close()
| __init__ | identifier_name |
UTM2Geo.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 17/2/2015
@author: Antonio Hermosilla Rodrigo.
@contact: [email protected]
@organization: Antonio Hermosilla Rodrigo.
@copyright: (C) 2015 by Antonio Hermosilla Rodrigo
@version: 1.0.0
'''
import sys
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4 import uic
from os import sep,pardir,getcwd
from os.path import normpath
import Geometrias.PuntoUTM
import Proyecciones.UTM2Geo
import Geodesia.EGM.CalcularOndulacion
class UTM2Geo(QtGui.QWidget):
'''
classdocs
'''
__rutaroot=None
__msgBoxErr=None
__pLat=None
__pLon=None
__pw=None
__pN=None
def __init__(self, parent=None):
'''
Constructor
'''
super(UTM2Geo, self).__init__()
#Se carga el formulario para el controlador.
self.__rutaroot=normpath(getcwd() + sep + pardir)
uic.loadUi(self.__rutaroot+'/Formularios/UTM2Geo.ui', self)
self.__msgBoxErr=QtGui.QMessageBox()
self.__msgBoxErr.setWindowTitle("ERROR")
self.__CargarElipsoides()
self.__tabChanged()
self.__setPrecision()
self.connect(self.pushButton, QtCore.SIGNAL("clicked()"), self.Calcular)
self.connect(self.pushButton_4, QtCore.SIGNAL("clicked()"), self.launch)
self.connect(self.tabWidget, QtCore.SIGNAL("currentChanged (int)"), self.__tabChanged)
self.connect(self.pushButton_2, QtCore.SIGNAL("clicked()"), self.AbrirFicheroUTM)
self.connect(self.pushButton_3, QtCore.SIGNAL("clicked()"), self.AbrirFicheroGeo)
self.connect(self.spinBox_2, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_3, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_4, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_5, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
def __CargarElipsoides(self):
'''!
'''
import BasesDeDatos.SQLite.SQLiteManager
try:
db=BasesDeDatos.SQLite.SQLiteManager.SQLiteManager(self.__rutaroot+'/Geodesia/Elipsoides/Elipsoides.db')
Nombres=db.ObtenerColumna('Elipsoides','Nombre')
Nombres=[i[0] for i in Nombres]
Nombres.sort()
self.comboBox.addItems(Nombres)
self.comboBox.setCurrentIndex(28)
self.comboBox_2.addItems(Nombres)
self.comboBox_2.setCurrentIndex(28)
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
def Calcular(self):
'''!
'''
putm=None
if self.lineEdit.text()=="":
self.__msgBoxErr.setText("Debe introducir un valor para la X UTM.")
self.__msgBoxErr.exec_()
return
if self.lineEdit_2.text()=="":
self.__msgBoxErr.setText("Debe introducir un valor para la Y UTM.")
self.__msgBoxErr.exec_()
return
try:
putm=Geometrias.PuntoUTM.PuntoUTM(self.lineEdit.text(),self.lineEdit_2.text(),huso=self.spinBox.value())
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
Sal=None
try:
Sal=Proyecciones.UTM2Geo.UTM2Geo(putm, self.comboBox.currentText())
self.lineEdit_3.setText(str(round(Sal.getLatitud(),self.__pLat)))
self.lineEdit_4.setText(str(round(Sal.getLongitud(),self.__pLon)))
self.lineEdit_5.setText(str(round(putm.getConvergenciaMeridianos(),self.__pw)))
self.lineEdit_6.setText(str(putm.getEscalaLocalPunto()))
try:
self.lineEdit_7.setText(str(round(Geodesia.EGM.CalcularOndulacion.CalcularOndulacion(Sal),self.__pN)))
except:
self.lineEdit_7.setText("")
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
def AbrirFicheroUTM(self):
'''!
'''
ruta = QtGui.QFileDialog.getOpenFileName(self, 'Abrir Archivo', '.')
self.lineEdit_9.setText(ruta)
def AbrirFicheroGeo(self):
'''!
'''
ruta = QtGui.QFileDialog.getSaveFileName(self, 'Guadar Archivo', '.')
self.lineEdit_10.setText(ruta)
def launch(self):
'''!
'''
QtCore.QThread(self.CalcularArchivo()).exec_()
def CalcularArchivo(self):
| except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
pg=QtGui.QProgressBar(pd)
pd.setBar(pg)
pg.setMinimum(0)
pg.setMaximum(len(sal))
g=open(self.lineEdit_10.text(),'w')
pd.setLabelText("Tarea 2..2 Escribiendo nuevo fichero.")
cont=0
pg.show()
for i in sal:
QtGui.QApplication.processEvents()
line=""
line+=i[0]+","
line+=str(round(i[2].getLatitud(),self.__pLat))+","
line+=str(round(i[2].getLongitud(),self.__pLon))+","
h=i[2].getAlturaElipsoidal()
if h==None:
line+","
else:
line+=str(h)+","
line+=str(i[1].getHuso())+","
line+=str(round(i[1].getConvergenciaMeridianos(),self.__pw))+","
line+=str(round(i[1].getEscalaLocalPunto(),self.__pw))+","
line+=str(i[1].getZonaUTM())+"\n"
g.write(line)
pg.setValue(cont)
cont+=1
g.close()
pg.hide()
def __setPrecision(self):
'''!
'''
self.__pLat=self.spinBox_2.value()
self.__pLon=self.spinBox_3.value()
self.__pw=self.spinBox_4.value()
self.__pN=self.spinBox_5.value()
def __tabChanged(self):
'''!
'''
if self.tabWidget.currentIndex()==0:
self.setFixedSize ( 319, 490)
elif self.tabWidget.currentIndex()==1:
self.setFixedSize ( 562, 272)
pass
elif self.tabWidget.currentIndex()==2:
self.setFixedSize ( 354, 202)
pass
if __name__ == "__main__":
#arranque del programa.
app = QtGui.QApplication(sys.argv)#requerido en todas las aplicaciones con cuadros de diálogo.
dlg=UTM2Geo()#creo un objeto de nuestro controlador del cuadro.
dlg.show()
## dlg.exec_()
sys.exit(app.exec_())#Requerido. Al cerrar el cuadro termina la aplicación
app.close()
| '''!
'''
pd=QtGui.QProgressDialog()
if self.lineEdit_9.text()=="":
self.__msgBoxErr.setText("Debe introducir un fichero de coordenadas UTM.")
self.__msgBoxErr.exec_()
return
if self.lineEdit_10.text()=="":
self.__msgBoxErr.setText("Debe introducir un fichero de salida para las coordenadas Geodesicas")
self.__msgBoxErr.exec_()
return
#Formato del fichero de coordenadas Geodesicas.
#ID,X,Y,posY,Huso,helip(opcional)
pd.show()
pd.setLabelText("Tarea 1..2 Procesando el fichero.")
try:
QtGui.QApplication.processEvents()
sal=Proyecciones.UTM2Geo.UTM2GeoFromFile(self.lineEdit_9.text(), self.comboBox_2.currentText()) | identifier_body |
UTM2Geo.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 17/2/2015
@author: Antonio Hermosilla Rodrigo.
@contact: [email protected]
@organization: Antonio Hermosilla Rodrigo.
@copyright: (C) 2015 by Antonio Hermosilla Rodrigo
@version: 1.0.0
'''
import sys
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4 import uic
from os import sep,pardir,getcwd
from os.path import normpath
import Geometrias.PuntoUTM
import Proyecciones.UTM2Geo
import Geodesia.EGM.CalcularOndulacion
class UTM2Geo(QtGui.QWidget):
'''
classdocs
'''
__rutaroot=None
__msgBoxErr=None
__pLat=None
__pLon=None
__pw=None
__pN=None
def __init__(self, parent=None):
'''
Constructor
'''
super(UTM2Geo, self).__init__()
#Se carga el formulario para el controlador.
self.__rutaroot=normpath(getcwd() + sep + pardir)
uic.loadUi(self.__rutaroot+'/Formularios/UTM2Geo.ui', self)
self.__msgBoxErr=QtGui.QMessageBox()
self.__msgBoxErr.setWindowTitle("ERROR")
self.__CargarElipsoides()
self.__tabChanged()
self.__setPrecision()
self.connect(self.pushButton, QtCore.SIGNAL("clicked()"), self.Calcular)
self.connect(self.pushButton_4, QtCore.SIGNAL("clicked()"), self.launch)
self.connect(self.tabWidget, QtCore.SIGNAL("currentChanged (int)"), self.__tabChanged)
self.connect(self.pushButton_2, QtCore.SIGNAL("clicked()"), self.AbrirFicheroUTM)
self.connect(self.pushButton_3, QtCore.SIGNAL("clicked()"), self.AbrirFicheroGeo)
self.connect(self.spinBox_2, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_3, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_4, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_5, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
def __CargarElipsoides(self):
'''!
'''
import BasesDeDatos.SQLite.SQLiteManager
try:
db=BasesDeDatos.SQLite.SQLiteManager.SQLiteManager(self.__rutaroot+'/Geodesia/Elipsoides/Elipsoides.db')
Nombres=db.ObtenerColumna('Elipsoides','Nombre')
Nombres=[i[0] for i in Nombres]
Nombres.sort()
self.comboBox.addItems(Nombres)
self.comboBox.setCurrentIndex(28)
self.comboBox_2.addItems(Nombres)
self.comboBox_2.setCurrentIndex(28)
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
def Calcular(self):
'''!
'''
putm=None
if self.lineEdit.text()=="":
self.__msgBoxErr.setText("Debe introducir un valor para la X UTM.")
self.__msgBoxErr.exec_()
return
if self.lineEdit_2.text()=="":
self.__msgBoxErr.setText("Debe introducir un valor para la Y UTM.")
self.__msgBoxErr.exec_()
return
try:
putm=Geometrias.PuntoUTM.PuntoUTM(self.lineEdit.text(),self.lineEdit_2.text(),huso=self.spinBox.value())
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
Sal=None
try:
Sal=Proyecciones.UTM2Geo.UTM2Geo(putm, self.comboBox.currentText())
self.lineEdit_3.setText(str(round(Sal.getLatitud(),self.__pLat)))
self.lineEdit_4.setText(str(round(Sal.getLongitud(),self.__pLon)))
self.lineEdit_5.setText(str(round(putm.getConvergenciaMeridianos(),self.__pw)))
self.lineEdit_6.setText(str(putm.getEscalaLocalPunto()))
try:
self.lineEdit_7.setText(str(round(Geodesia.EGM.CalcularOndulacion.CalcularOndulacion(Sal),self.__pN)))
except:
self.lineEdit_7.setText("")
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
def AbrirFicheroUTM(self):
'''!
'''
ruta = QtGui.QFileDialog.getOpenFileName(self, 'Abrir Archivo', '.')
self.lineEdit_9.setText(ruta)
def AbrirFicheroGeo(self):
'''!
'''
ruta = QtGui.QFileDialog.getSaveFileName(self, 'Guadar Archivo', '.')
self.lineEdit_10.setText(ruta)
def launch(self):
'''!
'''
QtCore.QThread(self.CalcularArchivo()).exec_()
def CalcularArchivo(self):
'''!
'''
pd=QtGui.QProgressDialog()
if self.lineEdit_9.text()=="":
self.__msgBoxErr.setText("Debe introducir un fichero de coordenadas UTM.")
self.__msgBoxErr.exec_()
return
if self.lineEdit_10.text()=="":
self.__msgBoxErr.setText("Debe introducir un fichero de salida para las coordenadas Geodesicas")
self.__msgBoxErr.exec_()
return
#Formato del fichero de coordenadas Geodesicas.
#ID,X,Y,posY,Huso,helip(opcional)
pd.show()
pd.setLabelText("Tarea 1..2 Procesando el fichero.")
try:
QtGui.QApplication.processEvents()
sal=Proyecciones.UTM2Geo.UTM2GeoFromFile(self.lineEdit_9.text(), self.comboBox_2.currentText())
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
pg=QtGui.QProgressBar(pd)
pd.setBar(pg)
pg.setMinimum(0)
pg.setMaximum(len(sal))
g=open(self.lineEdit_10.text(),'w')
pd.setLabelText("Tarea 2..2 Escribiendo nuevo fichero.")
cont=0
pg.show()
for i in sal:
QtGui.QApplication.processEvents()
line=""
line+=i[0]+","
line+=str(round(i[2].getLatitud(),self.__pLat))+","
line+=str(round(i[2].getLongitud(),self.__pLon))+","
h=i[2].getAlturaElipsoidal()
if h==None:
line+","
else:
|
line+=str(i[1].getHuso())+","
line+=str(round(i[1].getConvergenciaMeridianos(),self.__pw))+","
line+=str(round(i[1].getEscalaLocalPunto(),self.__pw))+","
line+=str(i[1].getZonaUTM())+"\n"
g.write(line)
pg.setValue(cont)
cont+=1
g.close()
pg.hide()
def __setPrecision(self):
'''!
'''
self.__pLat=self.spinBox_2.value()
self.__pLon=self.spinBox_3.value()
self.__pw=self.spinBox_4.value()
self.__pN=self.spinBox_5.value()
def __tabChanged(self):
'''!
'''
if self.tabWidget.currentIndex()==0:
self.setFixedSize ( 319, 490)
elif self.tabWidget.currentIndex()==1:
self.setFixedSize ( 562, 272)
pass
elif self.tabWidget.currentIndex()==2:
self.setFixedSize ( 354, 202)
pass
if __name__ == "__main__":
#arranque del programa.
app = QtGui.QApplication(sys.argv)#requerido en todas las aplicaciones con cuadros de diálogo.
dlg=UTM2Geo()#creo un objeto de nuestro controlador del cuadro.
dlg.show()
## dlg.exec_()
sys.exit(app.exec_())#Requerido. Al cerrar el cuadro termina la aplicación
app.close()
| line+=str(h)+"," | conditional_block |
reverserlist.py |
from linkedlist import SinglyLinkedListNode
def reverseList(head):
tail=None
last=None
tempNode = head
while tempNode is not None:
currentNode, tempNode = tempNode, tempNode.next
currentNode.next = tail
tail = currentNode
return tail
def reverseListKNode(head,k):
tempHead= None
tempTail= None
while head is not None:
tempNode = head
last = None
tk=k
while tempNode is not None and tk > 0:
currentNode,nextNode = tempNode,tempNode.next
currentNode.next = last
last=currentNode
tempNode = nextNode
tk-=1
if tempHead is not None:
tempTail.next = last
head.next = nextNode
else:
tempHead = last
head.next= nextNode
tempTail = head
head=nextNode
return tempHead
def printLinkedList(head):
while head is not None:
print head.data,
head=head.next
print ''
def createList(list):
|
a=(i for i in xrange(1,11))
list = createList(a)
printLinkedList(list)
newList=reverseListKNode(list,2)
printLinkedList(newList)
| lastNode=None
head=None
for i in list:
node= SinglyLinkedListNode(i)
if lastNode == None:
lastNode = node
head = node
else:
lastNode.next = node
lastNode=node
return head | identifier_body |
reverserlist.py |
from linkedlist import SinglyLinkedListNode
def reverseList(head):
tail=None
last=None
tempNode = head
while tempNode is not None:
currentNode, tempNode = tempNode, tempNode.next
currentNode.next = tail
tail = currentNode
return tail
def reverseListKNode(head,k):
tempHead= None
tempTail= None
while head is not None:
tempNode = head
last = None
tk=k
while tempNode is not None and tk > 0:
currentNode,nextNode = tempNode,tempNode.next
currentNode.next = last
last=currentNode
tempNode = nextNode
tk-=1
if tempHead is not None:
tempTail.next = last
head.next = nextNode
else:
tempHead = last
head.next= nextNode
tempTail = head
head=nextNode
return tempHead
def printLinkedList(head):
while head is not None:
print head.data,
head=head.next
print ''
def createList(list):
lastNode=None
head=None
for i in list:
node= SinglyLinkedListNode(i)
if lastNode == None:
|
else:
lastNode.next = node
lastNode=node
return head
a=(i for i in xrange(1,11))
list = createList(a)
printLinkedList(list)
newList=reverseListKNode(list,2)
printLinkedList(newList)
| lastNode = node
head = node | conditional_block |
reverserlist.py |
from linkedlist import SinglyLinkedListNode
def reverseList(head):
tail=None
last=None
tempNode = head
while tempNode is not None:
currentNode, tempNode = tempNode, tempNode.next
currentNode.next = tail
tail = currentNode
return tail
def reverseListKNode(head,k):
tempHead= None
tempTail= None
while head is not None:
tempNode = head
last = None
tk=k
while tempNode is not None and tk > 0:
currentNode,nextNode = tempNode,tempNode.next
currentNode.next = last
last=currentNode
tempNode = nextNode
tk-=1
if tempHead is not None:
tempTail.next = last
head.next = nextNode
else:
tempHead = last
head.next= nextNode
tempTail = head
head=nextNode
return tempHead
def printLinkedList(head):
while head is not None:
print head.data,
head=head.next
print ''
def | (list):
lastNode=None
head=None
for i in list:
node= SinglyLinkedListNode(i)
if lastNode == None:
lastNode = node
head = node
else:
lastNode.next = node
lastNode=node
return head
a=(i for i in xrange(1,11))
list = createList(a)
printLinkedList(list)
newList=reverseListKNode(list,2)
printLinkedList(newList)
| createList | identifier_name |
reverserlist.py | from linkedlist import SinglyLinkedListNode
def reverseList(head):
tail=None
last=None
tempNode = head
while tempNode is not None:
currentNode, tempNode = tempNode, tempNode.next
currentNode.next = tail
tail = currentNode
return tail
def reverseListKNode(head,k):
tempHead= None
tempTail= None
while head is not None: | last = None
tk=k
while tempNode is not None and tk > 0:
currentNode,nextNode = tempNode,tempNode.next
currentNode.next = last
last=currentNode
tempNode = nextNode
tk-=1
if tempHead is not None:
tempTail.next = last
head.next = nextNode
else:
tempHead = last
head.next= nextNode
tempTail = head
head=nextNode
return tempHead
def printLinkedList(head):
while head is not None:
print head.data,
head=head.next
print ''
def createList(list):
lastNode=None
head=None
for i in list:
node= SinglyLinkedListNode(i)
if lastNode == None:
lastNode = node
head = node
else:
lastNode.next = node
lastNode=node
return head
a=(i for i in xrange(1,11))
list = createList(a)
printLinkedList(list)
newList=reverseListKNode(list,2)
printLinkedList(newList) | tempNode = head | random_line_split |
config.py | it under the terms of the GNU General Public License, Version 3,
# as published by the Free Software Foundation
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import os
import os.path
import optparse, ConfigParser
import snap | from snap.exceptions import ArgError
class ConfigOptions:
"""Container holding all the configuration options available
to the Snap system"""
# modes of operation
RESTORE = 0
BACKUP = 1
def __init__(self):
'''initialize configuration'''
# mode of operation
self.mode = None
# mapping of targets to lists of backends to use when backing up / restoring them
self.target_backends = {}
# mapping of targets to lists of entities to include when backing up
self.target_includes = {}
# mapping of targets to lists of entities to exclude when backing up
self.target_excludes = {}
# output log level
# currently supports 'quiet', 'normal', 'verbose', 'debug'
self.log_level = 'normal'
# output format to backup / restore
self.outputformat = 'snapfile'
# location of the snapfile to backup to / restore from
self.snapfile = None
# Encryption/decryption password to use, if left as None, encryption will be disabled
self.encryption_password = None
# hash of key/value pairs of service-specific options
self.service_options = {}
for backend in SnapshotTarget.BACKENDS:
self.target_backends[backend] = False
self.target_includes[backend] = []
self.target_excludes[backend] = []
def log_level_at_least(self, comparison):
return (comparison == 'quiet') or \
(comparison == 'normal' and self.log_level != 'quiet') or \
(comparison == 'verbose' and (self.log_level == 'verbose' or self.log_level == 'debug')) or \
(comparison == 'debug' and self.log_level == 'debug')
class ConfigFile:
"""Represents the snap config file to be read and parsed"""
parser = None
def __init__(self, config_file):
'''
Initialize the config file, specifying its path
@param file - the path to the file to load
'''
# if config file doesn't exist, just ignore
if not os.path.exists(config_file):
if snap.config.options.log_level_at_least("verbose"):
snap.callback.snapcallback.warn("Config file " + config_file + " not found")
else:
self.parser = ConfigParser.ConfigParser()
self.parser.read(config_file)
self.__parse()
def string_to_bool(string):
'''Static helper to convert a string to a boolean value'''
if string == 'True' or string == 'true' or string == '1':
return True
elif string == 'False' or string == 'false' or string == '0':
return False
return None
string_to_bool = staticmethod(string_to_bool)
def string_to_array(string):
'''Static helper to convert a colon deliminated string to an array of strings'''
return string.split(':')
string_to_array = staticmethod(string_to_array)
def __get_bool(self, key, section='main'):
'''
Retreive the indicated boolean value from the config file
@param key - the string key corresponding to the boolean value to retrieve
@param section - the section to retrieve the value from
@returns - the value or False if not found
'''
try:
return ConfigFile.string_to_bool(self.parser.get(section, key))
except:
return None
def __get_string(self, key, section='main'):
'''
Retreive the indicated string value from the config file
@param key - the string key corresponding to the string value to retrieve
@param section - the section to retrieve the value from
@returns - the value or None if not found
'''
try:
return self.parser.get(section, key)
except:
return None
def __get_array(self, section='main'):
'''return array of key/value pairs from the config file section
@param section - the section which to retrieve the key / values from
@returns - the array of key / value pairs or None if not found
'''
try:
return self.parser.items(section)
except:
return None
def __parse(self):
'''parse configuration out of the config file'''
for backend in SnapshotTarget.BACKENDS:
val = self.__get_bool(backend)
if val is not None:
snap.config.options.target_backends[backend] = val
else:
val = self.__get_string(backend)
if val:
snap.config.options.target_backends[backend] = True
val = ConfigFile.string_to_array(val)
for include in val:
if include[0] == '!':
snap.config.options.target_excludes[backend].append(include[1:])
else:
snap.config.options.target_includes[backend].append(include)
else:
val = self.__get_bool('no' + backend)
if val:
snap.config.options.target_backends[backend] = False
of = self.__get_string('outputformat')
sf = self.__get_string('snapfile')
ll = self.__get_string('loglevel')
enp = self.__get_string('encryption_password')
if of != None:
snap.config.options.outputformat = of
if sf != None:
snap.config.options.snapfile = sf
if ll != None:
snap.config.options.log_level = ll
if enp != None:
snap.config.options.encryption_password = enp
services = self.__get_array('services')
if services:
for k, v in services:
snap.config.options.service_options[k] = v
class Config:
"""The configuration manager, used to set and verify snap config values
from the config file and command line. Primary interface to the
Configuration System"""
configoptions = None
parser = None
# read values from the config files and set them in the target ConfigOptions
def read_config(self):
# add conf stored in resources if running from local checkout
CONFIG_FILES.append(os.path.join(os.path.dirname(__file__), "..", "resources", "snap.conf"))
for config_file in CONFIG_FILES:
ConfigFile(config_file)
def parse_cli(self):
'''
parses the command line an set them in the target ConfigOptions
'''
usage = "usage: %prog [options] arg"
self.parser = optparse.OptionParser(usage, version=SNAP_VERSION)
self.parser.add_option('', '--restore', dest='restore', action='store_true', default=False, help='Restore snapshot')
self.parser.add_option('', '--backup', dest='backup', action='store_true', default=False, help='Take snapshot')
self.parser.add_option('-l', '--log-level', dest='log_level', action='store', default="normal", help='Log level (quiet, normal, verbose, debug)')
self.parser.add_option('-o', '--outputformat', dest='outputformat', action='store', default=None, help='Output file format')
self.parser.add_option('-f', '--snapfile', dest='snapfile', action='store', default=None, help='Snapshot file, use - for stdout')
self.parser.add_option('-p', '--password', dest='encryption_password', action='store', default=None, help='Snapshot File Encryption/Decryption Password')
# FIXME how to permit parameter lists for some of these
for backend in SnapshotTarget.BACKENDS:
self.parser.add_option('', '--' + backend, dest=backend, action='store_true', help='Enable ' + backend + ' snapshots/restoration')
self.parser.add_option('', '--no' + backend, dest=backend, action='store_false', help='Disable ' + backend + ' snapshots/restoration')
(options, args) = self.parser.parse_args()
if options.restore != False:
snap.config.options.mode = ConfigOptions.RESTORE
if options.backup != False:
snap.config.options.mode = ConfigOptions.BACKUP
if options.log_level:
snap.config.options.log_level = options.log_level
if options.outputformat != None:
snap.config.options.outputformat = options.outputformat
if options.snapfile != None:
snap.config.options.snapfile = options.snapfile
if options.encryption_password != None:
snap.config.options.encryption_password = options.encryption_password
for backend in SnapshotTarget.BACKENDS:
val = getattr(options, backend)
if val != None:
if type(val) == str:
snap.config.options.target_backends[backend] = True
val = ConfigFile.string_to_array(val)
for include in val:
if include[0] == '!':
snap.config.options.target_excludes[backend].append(include[1:])
else:
snap.config.options.target_includes[backend].append(include)
else:
snap.config.options.target_backends[backend] = val
def verify_integrity(self):
'''
verify the integrity of the current option set
@raises - ArgError if the options are invalid
'''
if snap.config.options.mode | from snap.options import *
from snap.snapshottarget import SnapshotTarget | random_line_split |
config.py | under the terms of the GNU General Public License, Version 3,
# as published by the Free Software Foundation
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import os
import os.path
import optparse, ConfigParser
import snap
from snap.options import *
from snap.snapshottarget import SnapshotTarget
from snap.exceptions import ArgError
class ConfigOptions:
"""Container holding all the configuration options available
to the Snap system"""
# modes of operation
RESTORE = 0
BACKUP = 1
def __init__(self):
'''initialize configuration'''
# mode of operation
self.mode = None
# mapping of targets to lists of backends to use when backing up / restoring them
self.target_backends = {}
# mapping of targets to lists of entities to include when backing up
self.target_includes = {}
# mapping of targets to lists of entities to exclude when backing up
self.target_excludes = {}
# output log level
# currently supports 'quiet', 'normal', 'verbose', 'debug'
self.log_level = 'normal'
# output format to backup / restore
self.outputformat = 'snapfile'
# location of the snapfile to backup to / restore from
self.snapfile = None
# Encryption/decryption password to use, if left as None, encryption will be disabled
self.encryption_password = None
# hash of key/value pairs of service-specific options
self.service_options = {}
for backend in SnapshotTarget.BACKENDS:
self.target_backends[backend] = False
self.target_includes[backend] = []
self.target_excludes[backend] = []
def log_level_at_least(self, comparison):
return (comparison == 'quiet') or \
(comparison == 'normal' and self.log_level != 'quiet') or \
(comparison == 'verbose' and (self.log_level == 'verbose' or self.log_level == 'debug')) or \
(comparison == 'debug' and self.log_level == 'debug')
class ConfigFile:
"""Represents the snap config file to be read and parsed"""
parser = None
def __init__(self, config_file):
'''
Initialize the config file, specifying its path
@param file - the path to the file to load
'''
# if config file doesn't exist, just ignore
if not os.path.exists(config_file):
if snap.config.options.log_level_at_least("verbose"):
snap.callback.snapcallback.warn("Config file " + config_file + " not found")
else:
self.parser = ConfigParser.ConfigParser()
self.parser.read(config_file)
self.__parse()
def string_to_bool(string):
'''Static helper to convert a string to a boolean value'''
if string == 'True' or string == 'true' or string == '1':
return True
elif string == 'False' or string == 'false' or string == '0':
return False
return None
string_to_bool = staticmethod(string_to_bool)
def string_to_array(string):
'''Static helper to convert a colon deliminated string to an array of strings'''
return string.split(':')
string_to_array = staticmethod(string_to_array)
def __get_bool(self, key, section='main'):
'''
Retreive the indicated boolean value from the config file
@param key - the string key corresponding to the boolean value to retrieve
@param section - the section to retrieve the value from
@returns - the value or False if not found
'''
try:
return ConfigFile.string_to_bool(self.parser.get(section, key))
except:
return None
def __get_string(self, key, section='main'):
'''
Retreive the indicated string value from the config file
@param key - the string key corresponding to the string value to retrieve
@param section - the section to retrieve the value from
@returns - the value or None if not found
'''
try:
return self.parser.get(section, key)
except:
return None
def | (self, section='main'):
'''return array of key/value pairs from the config file section
@param section - the section which to retrieve the key / values from
@returns - the array of key / value pairs or None if not found
'''
try:
return self.parser.items(section)
except:
return None
def __parse(self):
'''parse configuration out of the config file'''
for backend in SnapshotTarget.BACKENDS:
val = self.__get_bool(backend)
if val is not None:
snap.config.options.target_backends[backend] = val
else:
val = self.__get_string(backend)
if val:
snap.config.options.target_backends[backend] = True
val = ConfigFile.string_to_array(val)
for include in val:
if include[0] == '!':
snap.config.options.target_excludes[backend].append(include[1:])
else:
snap.config.options.target_includes[backend].append(include)
else:
val = self.__get_bool('no' + backend)
if val:
snap.config.options.target_backends[backend] = False
of = self.__get_string('outputformat')
sf = self.__get_string('snapfile')
ll = self.__get_string('loglevel')
enp = self.__get_string('encryption_password')
if of != None:
snap.config.options.outputformat = of
if sf != None:
snap.config.options.snapfile = sf
if ll != None:
snap.config.options.log_level = ll
if enp != None:
snap.config.options.encryption_password = enp
services = self.__get_array('services')
if services:
for k, v in services:
snap.config.options.service_options[k] = v
class Config:
"""The configuration manager, used to set and verify snap config values
from the config file and command line. Primary interface to the
Configuration System"""
configoptions = None
parser = None
# read values from the config files and set them in the target ConfigOptions
def read_config(self):
# add conf stored in resources if running from local checkout
CONFIG_FILES.append(os.path.join(os.path.dirname(__file__), "..", "resources", "snap.conf"))
for config_file in CONFIG_FILES:
ConfigFile(config_file)
def parse_cli(self):
'''
parses the command line an set them in the target ConfigOptions
'''
usage = "usage: %prog [options] arg"
self.parser = optparse.OptionParser(usage, version=SNAP_VERSION)
self.parser.add_option('', '--restore', dest='restore', action='store_true', default=False, help='Restore snapshot')
self.parser.add_option('', '--backup', dest='backup', action='store_true', default=False, help='Take snapshot')
self.parser.add_option('-l', '--log-level', dest='log_level', action='store', default="normal", help='Log level (quiet, normal, verbose, debug)')
self.parser.add_option('-o', '--outputformat', dest='outputformat', action='store', default=None, help='Output file format')
self.parser.add_option('-f', '--snapfile', dest='snapfile', action='store', default=None, help='Snapshot file, use - for stdout')
self.parser.add_option('-p', '--password', dest='encryption_password', action='store', default=None, help='Snapshot File Encryption/Decryption Password')
# FIXME how to permit parameter lists for some of these
for backend in SnapshotTarget.BACKENDS:
self.parser.add_option('', '--' + backend, dest=backend, action='store_true', help='Enable ' + backend + ' snapshots/restoration')
self.parser.add_option('', '--no' + backend, dest=backend, action='store_false', help='Disable ' + backend + ' snapshots/restoration')
(options, args) = self.parser.parse_args()
if options.restore != False:
snap.config.options.mode = ConfigOptions.RESTORE
if options.backup != False:
snap.config.options.mode = ConfigOptions.BACKUP
if options.log_level:
snap.config.options.log_level = options.log_level
if options.outputformat != None:
snap.config.options.outputformat = options.outputformat
if options.snapfile != None:
snap.config.options.snapfile = options.snapfile
if options.encryption_password != None:
snap.config.options.encryption_password = options.encryption_password
for backend in SnapshotTarget.BACKENDS:
val = getattr(options, backend)
if val != None:
if type(val) == str:
snap.config.options.target_backends[backend] = True
val = ConfigFile.string_to_array(val)
for include in val:
if include[0] == '!':
snap.config.options.target_excludes[backend].append(include[1:])
else:
snap.config.options.target_includes[backend].append(include)
else:
snap.config.options.target_backends[backend] = val
def verify_integrity(self):
'''
verify the integrity of the current option set
@raises - ArgError if the options are invalid
'''
if snap.config | __get_array | identifier_name |
config.py | under the terms of the GNU General Public License, Version 3,
# as published by the Free Software Foundation
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import os
import os.path
import optparse, ConfigParser
import snap
from snap.options import *
from snap.snapshottarget import SnapshotTarget
from snap.exceptions import ArgError
class ConfigOptions:
"""Container holding all the configuration options available
to the Snap system"""
# modes of operation
RESTORE = 0
BACKUP = 1
def __init__(self):
'''initialize configuration'''
# mode of operation
self.mode = None
# mapping of targets to lists of backends to use when backing up / restoring them
self.target_backends = {}
# mapping of targets to lists of entities to include when backing up
self.target_includes = {}
# mapping of targets to lists of entities to exclude when backing up
self.target_excludes = {}
# output log level
# currently supports 'quiet', 'normal', 'verbose', 'debug'
self.log_level = 'normal'
# output format to backup / restore
self.outputformat = 'snapfile'
# location of the snapfile to backup to / restore from
self.snapfile = None
# Encryption/decryption password to use, if left as None, encryption will be disabled
self.encryption_password = None
# hash of key/value pairs of service-specific options
self.service_options = {}
for backend in SnapshotTarget.BACKENDS:
self.target_backends[backend] = False
self.target_includes[backend] = []
self.target_excludes[backend] = []
def log_level_at_least(self, comparison):
return (comparison == 'quiet') or \
(comparison == 'normal' and self.log_level != 'quiet') or \
(comparison == 'verbose' and (self.log_level == 'verbose' or self.log_level == 'debug')) or \
(comparison == 'debug' and self.log_level == 'debug')
class ConfigFile:
"""Represents the snap config file to be read and parsed"""
parser = None
def __init__(self, config_file):
'''
Initialize the config file, specifying its path
@param file - the path to the file to load
'''
# if config file doesn't exist, just ignore
if not os.path.exists(config_file):
if snap.config.options.log_level_at_least("verbose"):
snap.callback.snapcallback.warn("Config file " + config_file + " not found")
else:
self.parser = ConfigParser.ConfigParser()
self.parser.read(config_file)
self.__parse()
def string_to_bool(string):
'''Static helper to convert a string to a boolean value'''
if string == 'True' or string == 'true' or string == '1':
return True
elif string == 'False' or string == 'false' or string == '0':
return False
return None
string_to_bool = staticmethod(string_to_bool)
def string_to_array(string):
'''Static helper to convert a colon deliminated string to an array of strings'''
return string.split(':')
string_to_array = staticmethod(string_to_array)
def __get_bool(self, key, section='main'):
'''
Retreive the indicated boolean value from the config file
@param key - the string key corresponding to the boolean value to retrieve
@param section - the section to retrieve the value from
@returns - the value or False if not found
'''
try:
return ConfigFile.string_to_bool(self.parser.get(section, key))
except:
return None
def __get_string(self, key, section='main'):
'''
Retreive the indicated string value from the config file
@param key - the string key corresponding to the string value to retrieve
@param section - the section to retrieve the value from
@returns - the value or None if not found
'''
try:
return self.parser.get(section, key)
except:
return None
def __get_array(self, section='main'):
'''return array of key/value pairs from the config file section
@param section - the section which to retrieve the key / values from
@returns - the array of key / value pairs or None if not found
'''
try:
return self.parser.items(section)
except:
return None
def __parse(self):
'''parse configuration out of the config file'''
for backend in SnapshotTarget.BACKENDS:
val = self.__get_bool(backend)
if val is not None:
snap.config.options.target_backends[backend] = val
else:
val = self.__get_string(backend)
if val:
snap.config.options.target_backends[backend] = True
val = ConfigFile.string_to_array(val)
for include in val:
if include[0] == '!':
snap.config.options.target_excludes[backend].append(include[1:])
else:
snap.config.options.target_includes[backend].append(include)
else:
val = self.__get_bool('no' + backend)
if val:
snap.config.options.target_backends[backend] = False
of = self.__get_string('outputformat')
sf = self.__get_string('snapfile')
ll = self.__get_string('loglevel')
enp = self.__get_string('encryption_password')
if of != None:
snap.config.options.outputformat = of
if sf != None:
snap.config.options.snapfile = sf
if ll != None:
snap.config.options.log_level = ll
if enp != None:
snap.config.options.encryption_password = enp
services = self.__get_array('services')
if services:
for k, v in services:
snap.config.options.service_options[k] = v
class Config:
"""The configuration manager, used to set and verify snap config values
from the config file and command line. Primary interface to the
Configuration System"""
configoptions = None
parser = None
# read values from the config files and set them in the target ConfigOptions
def read_config(self):
# add conf stored in resources if running from local checkout
CONFIG_FILES.append(os.path.join(os.path.dirname(__file__), "..", "resources", "snap.conf"))
for config_file in CONFIG_FILES:
ConfigFile(config_file)
def parse_cli(self):
| if options.backup != False:
snap.config.options.mode = ConfigOptions.BACKUP
if options.log_level:
snap.config.options.log_level = options.log_level
if options.outputformat != None:
snap.config.options.outputformat = options.outputformat
if options.snapfile != None:
snap.config.options.snapfile = options.snapfile
if options.encryption_password != None:
snap.config.options.encryption_password = options.encryption_password
for backend in SnapshotTarget.BACKENDS:
val = getattr(options, backend)
if val != None:
if type(val) == str:
snap.config.options.target_backends[backend] = True
val = ConfigFile.string_to_array(val)
for include in val:
if include[0] == '!':
snap.config.options.target_excludes[backend].append(include[1:])
else:
snap.config.options.target_includes[backend].append(include)
else:
snap.config.options.target_backends[backend] = val
def verify_integrity(self):
'''
verify the integrity of the current option set
@raises - ArgError if the options are invalid
'''
if snap.config.options.mode | '''
parses the command line an set them in the target ConfigOptions
'''
usage = "usage: %prog [options] arg"
self.parser = optparse.OptionParser(usage, version=SNAP_VERSION)
self.parser.add_option('', '--restore', dest='restore', action='store_true', default=False, help='Restore snapshot')
self.parser.add_option('', '--backup', dest='backup', action='store_true', default=False, help='Take snapshot')
self.parser.add_option('-l', '--log-level', dest='log_level', action='store', default="normal", help='Log level (quiet, normal, verbose, debug)')
self.parser.add_option('-o', '--outputformat', dest='outputformat', action='store', default=None, help='Output file format')
self.parser.add_option('-f', '--snapfile', dest='snapfile', action='store', default=None, help='Snapshot file, use - for stdout')
self.parser.add_option('-p', '--password', dest='encryption_password', action='store', default=None, help='Snapshot File Encryption/Decryption Password')
# FIXME how to permit parameter lists for some of these
for backend in SnapshotTarget.BACKENDS:
self.parser.add_option('', '--' + backend, dest=backend, action='store_true', help='Enable ' + backend + ' snapshots/restoration')
self.parser.add_option('', '--no' + backend, dest=backend, action='store_false', help='Disable ' + backend + ' snapshots/restoration')
(options, args) = self.parser.parse_args()
if options.restore != False:
snap.config.options.mode = ConfigOptions.RESTORE | identifier_body |
config.py | under the terms of the GNU General Public License, Version 3,
# as published by the Free Software Foundation
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import os
import os.path
import optparse, ConfigParser
import snap
from snap.options import *
from snap.snapshottarget import SnapshotTarget
from snap.exceptions import ArgError
class ConfigOptions:
"""Container holding all the configuration options available
to the Snap system"""
# modes of operation
RESTORE = 0
BACKUP = 1
def __init__(self):
'''initialize configuration'''
# mode of operation
self.mode = None
# mapping of targets to lists of backends to use when backing up / restoring them
self.target_backends = {}
# mapping of targets to lists of entities to include when backing up
self.target_includes = {}
# mapping of targets to lists of entities to exclude when backing up
self.target_excludes = {}
# output log level
# currently supports 'quiet', 'normal', 'verbose', 'debug'
self.log_level = 'normal'
# output format to backup / restore
self.outputformat = 'snapfile'
# location of the snapfile to backup to / restore from
self.snapfile = None
# Encryption/decryption password to use, if left as None, encryption will be disabled
self.encryption_password = None
# hash of key/value pairs of service-specific options
self.service_options = {}
for backend in SnapshotTarget.BACKENDS:
self.target_backends[backend] = False
self.target_includes[backend] = []
self.target_excludes[backend] = []
def log_level_at_least(self, comparison):
return (comparison == 'quiet') or \
(comparison == 'normal' and self.log_level != 'quiet') or \
(comparison == 'verbose' and (self.log_level == 'verbose' or self.log_level == 'debug')) or \
(comparison == 'debug' and self.log_level == 'debug')
class ConfigFile:
"""Represents the snap config file to be read and parsed"""
parser = None
def __init__(self, config_file):
'''
Initialize the config file, specifying its path
@param file - the path to the file to load
'''
# if config file doesn't exist, just ignore
if not os.path.exists(config_file):
if snap.config.options.log_level_at_least("verbose"):
snap.callback.snapcallback.warn("Config file " + config_file + " not found")
else:
self.parser = ConfigParser.ConfigParser()
self.parser.read(config_file)
self.__parse()
def string_to_bool(string):
'''Static helper to convert a string to a boolean value'''
if string == 'True' or string == 'true' or string == '1':
return True
elif string == 'False' or string == 'false' or string == '0':
return False
return None
string_to_bool = staticmethod(string_to_bool)
def string_to_array(string):
'''Static helper to convert a colon deliminated string to an array of strings'''
return string.split(':')
string_to_array = staticmethod(string_to_array)
def __get_bool(self, key, section='main'):
'''
Retreive the indicated boolean value from the config file
@param key - the string key corresponding to the boolean value to retrieve
@param section - the section to retrieve the value from
@returns - the value or False if not found
'''
try:
return ConfigFile.string_to_bool(self.parser.get(section, key))
except:
return None
def __get_string(self, key, section='main'):
'''
Retreive the indicated string value from the config file
@param key - the string key corresponding to the string value to retrieve
@param section - the section to retrieve the value from
@returns - the value or None if not found
'''
try:
return self.parser.get(section, key)
except:
return None
def __get_array(self, section='main'):
'''return array of key/value pairs from the config file section
@param section - the section which to retrieve the key / values from
@returns - the array of key / value pairs or None if not found
'''
try:
return self.parser.items(section)
except:
return None
def __parse(self):
'''parse configuration out of the config file'''
for backend in SnapshotTarget.BACKENDS:
val = self.__get_bool(backend)
if val is not None:
snap.config.options.target_backends[backend] = val
else:
val = self.__get_string(backend)
if val:
snap.config.options.target_backends[backend] = True
val = ConfigFile.string_to_array(val)
for include in val:
if include[0] == '!':
snap.config.options.target_excludes[backend].append(include[1:])
else:
snap.config.options.target_includes[backend].append(include)
else:
val = self.__get_bool('no' + backend)
if val:
snap.config.options.target_backends[backend] = False
of = self.__get_string('outputformat')
sf = self.__get_string('snapfile')
ll = self.__get_string('loglevel')
enp = self.__get_string('encryption_password')
if of != None:
snap.config.options.outputformat = of
if sf != None:
snap.config.options.snapfile = sf
if ll != None:
snap.config.options.log_level = ll
if enp != None:
snap.config.options.encryption_password = enp
services = self.__get_array('services')
if services:
for k, v in services:
snap.config.options.service_options[k] = v
class Config:
"""The configuration manager, used to set and verify snap config values
from the config file and command line. Primary interface to the
Configuration System"""
configoptions = None
parser = None
# read values from the config files and set them in the target ConfigOptions
def read_config(self):
# add conf stored in resources if running from local checkout
CONFIG_FILES.append(os.path.join(os.path.dirname(__file__), "..", "resources", "snap.conf"))
for config_file in CONFIG_FILES:
ConfigFile(config_file)
def parse_cli(self):
'''
parses the command line an set them in the target ConfigOptions
'''
usage = "usage: %prog [options] arg"
self.parser = optparse.OptionParser(usage, version=SNAP_VERSION)
self.parser.add_option('', '--restore', dest='restore', action='store_true', default=False, help='Restore snapshot')
self.parser.add_option('', '--backup', dest='backup', action='store_true', default=False, help='Take snapshot')
self.parser.add_option('-l', '--log-level', dest='log_level', action='store', default="normal", help='Log level (quiet, normal, verbose, debug)')
self.parser.add_option('-o', '--outputformat', dest='outputformat', action='store', default=None, help='Output file format')
self.parser.add_option('-f', '--snapfile', dest='snapfile', action='store', default=None, help='Snapshot file, use - for stdout')
self.parser.add_option('-p', '--password', dest='encryption_password', action='store', default=None, help='Snapshot File Encryption/Decryption Password')
# FIXME how to permit parameter lists for some of these
for backend in SnapshotTarget.BACKENDS:
self.parser.add_option('', '--' + backend, dest=backend, action='store_true', help='Enable ' + backend + ' snapshots/restoration')
self.parser.add_option('', '--no' + backend, dest=backend, action='store_false', help='Disable ' + backend + ' snapshots/restoration')
(options, args) = self.parser.parse_args()
if options.restore != False:
snap.config.options.mode = ConfigOptions.RESTORE
if options.backup != False:
snap.config.options.mode = ConfigOptions.BACKUP
if options.log_level:
snap.config.options.log_level = options.log_level
if options.outputformat != None:
snap.config.options.outputformat = options.outputformat
if options.snapfile != None:
snap.config.options.snapfile = options.snapfile
if options.encryption_password != None:
snap.config.options.encryption_password = options.encryption_password
for backend in SnapshotTarget.BACKENDS:
val = getattr(options, backend)
if val != None:
|
def verify_integrity(self):
'''
verify the integrity of the current option set
@raises - ArgError if the options are invalid
'''
if snap.config.options | if type(val) == str:
snap.config.options.target_backends[backend] = True
val = ConfigFile.string_to_array(val)
for include in val:
if include[0] == '!':
snap.config.options.target_excludes[backend].append(include[1:])
else:
snap.config.options.target_includes[backend].append(include)
else:
snap.config.options.target_backends[backend] = val | conditional_block |
bam.js | {
obj = undefined;
}
if (typeof obj === 'object') {
return map(objectKeys(obj), function(k) {
var ks = encodeURIComponent(stringifyPrimitive(k)) + eq;
if (isArray(obj[k])) {
return map(obj[k], function(v) {
return ks + encodeURIComponent(stringifyPrimitive(v));
}).join(sep);
} else {
return ks + encodeURIComponent(stringifyPrimitive(obj[k]));
}
}).join(sep);
}
if (!name) return '';
return encodeURIComponent(stringifyPrimitive(name)) + eq +
encodeURIComponent(stringifyPrimitive(obj));
};
var isArray = Array.isArray || function (xs) {
return Object.prototype.toString.call(xs) === '[object Array]';
};
function map (xs, f) |
var objectKeys = Object.keys || function (obj) {
var res = [];
for (var key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) res.push(key);
}
return res;
};
},{}],4:[function(require,module,exports){
'use strict';
exports.decode = exports.parse = require('./decode');
exports.encode = exports.stringify = require('./encode');
},{"./decode":2,"./encode":3}],5:[function(require,module,exports){
module.exports = require('backbone');
},{"backbone":1}],6:[function(require,module,exports){
var Backbone, Collection,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
Backbone = require('backbone');
Collection = (function(_super) {
__extends(Collection, _super);
function Collection() {
return Collection.__super__.constructor.apply(this, arguments);
}
/*
Returns the model at the index immediately before the passed in model
instance. If the model instance is the first model in the collection, or
the model instance does not exist in the collection, this will return
null.
*/
Collection.prototype.before = function(model) {
var index;
index = this.indexOf(model);
if (index === -1 || index === 0) {
return null;
}
return this.at(index - 1);
};
/*
Returns the model at the index immediately after the passed in model
instance. If the model instance is the last model in the collection, or
the model instance does not exist in the collection, this will return
null.
*/
Collection.prototype.after = function(model) {
var index;
index = this.indexOf(model);
if (index === -1 || index === this.length - 1) {
return null;
}
return this.at(index + 1);
};
/*
Convenience function for getting an array of all the models in a
collection
*/
Collection.prototype.all = function() {
return this.models.slice();
};
return Collection;
})(Backbone.Collection);
module.exports = Collection;
},{"backbone":1}],7:[function(require,module,exports){
var Bam;
module.exports = Bam = {
Backbone: require('./backbone'),
Router: require('./router'),
View: require('./view'),
Model: require('./model'),
Collection: require('./collection')
};
},{"./backbone":5,"./collection":6,"./model":8,"./router":9,"./view":10}],8:[function(require,module,exports){
var Backbone, DEFAULT_CASTS, Model, any, map, _ref,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
Backbone = require('backbone');
_ref = require('underscore'), map = _ref.map, any = _ref.any;
DEFAULT_CASTS = {
string: function(v) {
return v + '';
},
int: function(v) {
return Math.floor(+v);
},
number: function(v) {
return +v;
},
date: function(v) {
return new Date(v);
},
boolean: function(v) {
return !!v;
}
};
Model = (function(_super) {
__extends(Model, _super);
function Model() {
return Model.__super__.constructor.apply(this, arguments);
}
/*
Allows derived get values. The format is:
derived:
foo:
deps: ['bar', 'baz']
value: (bar, baz) -> bar + ' ' + baz
Your deps define which properties will be passed to the value function and
in what order. They're also used to trigger change events for derived values
i.e., if a dep changes the derived value will trigger a change too.
*/
Model.prototype.derived = {};
/*
Allows casting specific keys. The format is:
cast:
timestamp: (v) -> moment(v)
bar: 'string'
baz: 'int'
You can either provide your own function or use a provided basic cast. These
include:
* `'string'`: `(v) -> v + ''`
* `'int'`: `(v) -> Math.floor(+v)`
* `'number'`: `(v) -> +v`
* `'date'`: `(v) -> new Date(v)`
* `'boolean'`: (v) -> !!v
Doesn't cast derived or null values.
*/
Model.prototype.cast = {};
/*
Returns the model after this model in its collection. If it's not in a
collection this will return null.
*/
Model.prototype.next = function() {
var _ref1;
return (_ref1 = this.collection) != null ? _ref1.after(this) : void 0;
};
/*
Returns the model before this model in its collection. If it's not in a
collection this will return null.
*/
Model.prototype.prev = function() {
var _ref1;
return (_ref1 = this.collection) != null ? _ref1.before(this) : void 0;
};
/*
Returns a clone of the attributes object.
*/
Model.prototype.getAttributes = function() {
return Backbone.$.extend(true, {}, this.attributes);
};
/*
Override get to allow default value and derived values.
*/
Model.prototype.get = function(key, defaultValue) {
var ret;
if (this.derived[key]) {
ret = this._derive(derived[key]);
} else {
ret = Model.__super__.get.call(this, key);
}
if (ret === void 0) {
return defaultValue;
} else {
return ret;
}
};
/*
Derive a value from a definition
*/
Model.prototype._derive = function(definition) {
var args;
args = map(definition.deps, (function(_this) {
return function(key) {
return _this.get('key');
};
})(this));
return definition.value.apply(definition, args);
};
/*
Override the set method to allow for casting as data comes in.
*/
Model.prototype.set = function(key, val, options) {
var attrs, changed, definition, derived, ret, _ref1;
if (typeof key === 'object') {
attrs = key;
options = val;
} else {
attrs = {};
attrs[key] = val;
}
for (key in attrs) {
val = attrs[key];
if (val === null) {
continue;
}
if (this.cast[key]) {
attrs[key] = this._cast(val, this.cast[key]);
}
}
ret = Model.__super__.set.call(this, attrs, options);
_ref1 = this.derived;
for (derived in _ref1) {
definition = _ref1[derived];
changed = map(definition.deps, function(key) {
return attrs.hasOwnProperty(key);
});
if (any(changed)) {
this.trigger("change:" + derived, this._derive(definition));
}
}
return ret;
};
/*
Take a value, and a casting definition and perform the cast
*/
Model.prototype._cast = function(value, cast) {
var error;
try {
return value = this._getCastFunc(cast)(value);
} catch (_error) {
error = _error;
return value = null;
} finally {
return value;
}
};
/*
Given a casting definition, return a function that should perform the cast
*/
Model.prototype._getCastFunc = function(cast) {
var _ref1;
if (typeof cast === 'function') {
return cast;
}
return (_ref1 = DEFAULT_CASTS[cast]) != null ? _ref1 : function(v) {
return v;
};
};
return Model;
})(Backbone.Model);
module | {
if (xs.map) return xs.map(f);
var res = [];
for (var i = 0; i < xs.length; i++) {
res.push(f(xs[i], i));
}
return res;
} | identifier_body |
bam.js | return encodeURIComponent(stringifyPrimitive(name)) + eq +
encodeURIComponent(stringifyPrimitive(obj));
};
var isArray = Array.isArray || function (xs) {
return Object.prototype.toString.call(xs) === '[object Array]';
};
function map (xs, f) {
if (xs.map) return xs.map(f);
var res = [];
for (var i = 0; i < xs.length; i++) {
res.push(f(xs[i], i));
}
return res;
}
var objectKeys = Object.keys || function (obj) {
var res = [];
for (var key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) res.push(key);
}
return res;
};
},{}],4:[function(require,module,exports){
'use strict';
exports.decode = exports.parse = require('./decode');
exports.encode = exports.stringify = require('./encode');
},{"./decode":2,"./encode":3}],5:[function(require,module,exports){
module.exports = require('backbone');
},{"backbone":1}],6:[function(require,module,exports){
var Backbone, Collection,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
Backbone = require('backbone');
Collection = (function(_super) {
__extends(Collection, _super);
function Collection() {
return Collection.__super__.constructor.apply(this, arguments);
}
/*
Returns the model at the index immediately before the passed in model
instance. If the model instance is the first model in the collection, or
the model instance does not exist in the collection, this will return
null.
*/
Collection.prototype.before = function(model) {
var index;
index = this.indexOf(model);
if (index === -1 || index === 0) {
return null;
}
return this.at(index - 1);
};
/*
Returns the model at the index immediately after the passed in model
instance. If the model instance is the last model in the collection, or
the model instance does not exist in the collection, this will return
null.
*/
Collection.prototype.after = function(model) {
var index;
index = this.indexOf(model);
if (index === -1 || index === this.length - 1) {
return null;
}
return this.at(index + 1);
};
/*
Convenience function for getting an array of all the models in a
collection
*/
Collection.prototype.all = function() {
return this.models.slice();
};
return Collection;
})(Backbone.Collection);
module.exports = Collection;
},{"backbone":1}],7:[function(require,module,exports){
var Bam;
module.exports = Bam = {
Backbone: require('./backbone'),
Router: require('./router'),
View: require('./view'),
Model: require('./model'),
Collection: require('./collection')
};
},{"./backbone":5,"./collection":6,"./model":8,"./router":9,"./view":10}],8:[function(require,module,exports){
var Backbone, DEFAULT_CASTS, Model, any, map, _ref,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
Backbone = require('backbone');
_ref = require('underscore'), map = _ref.map, any = _ref.any;
DEFAULT_CASTS = {
string: function(v) {
return v + '';
},
int: function(v) {
return Math.floor(+v);
},
number: function(v) {
return +v;
},
date: function(v) {
return new Date(v);
},
boolean: function(v) {
return !!v;
}
};
Model = (function(_super) {
__extends(Model, _super);
function Model() {
return Model.__super__.constructor.apply(this, arguments);
}
/*
Allows derived get values. The format is:
derived:
foo:
deps: ['bar', 'baz']
value: (bar, baz) -> bar + ' ' + baz
Your deps define which properties will be passed to the value function and
in what order. They're also used to trigger change events for derived values
i.e., if a dep changes the derived value will trigger a change too.
*/
Model.prototype.derived = {};
/*
Allows casting specific keys. The format is:
cast:
timestamp: (v) -> moment(v)
bar: 'string'
baz: 'int'
You can either provide your own function or use a provided basic cast. These
include:
* `'string'`: `(v) -> v + ''`
* `'int'`: `(v) -> Math.floor(+v)`
* `'number'`: `(v) -> +v`
* `'date'`: `(v) -> new Date(v)`
* `'boolean'`: (v) -> !!v
Doesn't cast derived or null values.
*/
Model.prototype.cast = {};
/*
Returns the model after this model in its collection. If it's not in a
collection this will return null.
*/
Model.prototype.next = function() {
var _ref1;
return (_ref1 = this.collection) != null ? _ref1.after(this) : void 0;
};
/*
Returns the model before this model in its collection. If it's not in a
collection this will return null.
*/
Model.prototype.prev = function() {
var _ref1;
return (_ref1 = this.collection) != null ? _ref1.before(this) : void 0;
};
/*
Returns a clone of the attributes object.
*/
Model.prototype.getAttributes = function() {
return Backbone.$.extend(true, {}, this.attributes);
};
/*
Override get to allow default value and derived values.
*/
Model.prototype.get = function(key, defaultValue) {
var ret;
if (this.derived[key]) {
ret = this._derive(derived[key]);
} else {
ret = Model.__super__.get.call(this, key);
}
if (ret === void 0) {
return defaultValue;
} else {
return ret;
}
};
/*
Derive a value from a definition
*/
Model.prototype._derive = function(definition) {
var args;
args = map(definition.deps, (function(_this) {
return function(key) {
return _this.get('key');
};
})(this));
return definition.value.apply(definition, args);
};
/*
Override the set method to allow for casting as data comes in.
*/
Model.prototype.set = function(key, val, options) {
var attrs, changed, definition, derived, ret, _ref1;
if (typeof key === 'object') {
attrs = key;
options = val;
} else {
attrs = {};
attrs[key] = val;
}
for (key in attrs) {
val = attrs[key];
if (val === null) {
continue;
}
if (this.cast[key]) {
attrs[key] = this._cast(val, this.cast[key]);
}
}
ret = Model.__super__.set.call(this, attrs, options);
_ref1 = this.derived;
for (derived in _ref1) {
definition = _ref1[derived];
changed = map(definition.deps, function(key) {
return attrs.hasOwnProperty(key);
});
if (any(changed)) {
this.trigger("change:" + derived, this._derive(definition));
}
}
return ret;
};
/*
Take a value, and a casting definition and perform the cast
*/
Model.prototype._cast = function(value, cast) {
var error;
try {
return value = this._getCastFunc(cast)(value);
} catch (_error) {
error = _error;
return value = null;
} finally {
return value;
}
};
/*
Given a casting definition, return a function that should perform the cast
*/
Model.prototype._getCastFunc = function(cast) {
var _ref1;
if (typeof cast === 'function') {
return cast;
}
return (_ref1 = DEFAULT_CASTS[cast]) != null ? _ref1 : function(v) {
return v;
};
};
return Model;
})(Backbone.Model);
module.exports = Model;
},{"backbone":1,"underscore":1}],9:[function(require,module,exports){
var Backbone, Router, difference, extend, getIndexes, getNames, isFunction, isRegExp, keys, map, object, pluck, process, querystring, sortBy, splice, zip, _,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function | ctor | identifier_name |
|
bam.js | = new ctor(); child.__super__ = parent.prototype; return child; },
__slice = [].slice;
Backbone = require('backbone');
querystring = require('querystring');
_ = require('underscore');
extend = _.extend, object = _.object, isRegExp = _.isRegExp, isFunction = _.isFunction, zip = _.zip, pluck = _.pluck, sortBy = _.sortBy, keys = _.keys;
difference = _.difference, map = _.map;
getNames = function(string) {
var ret;
ret = [];
ret.push.apply(ret, process(string, /(\(\?)?:\w+/g));
ret.push.apply(ret, process(string, /\*\w+/g));
return ret;
};
process = function(string, regex) {
var indexes, matches, _ref;
matches = (_ref = string.match(regex)) != null ? _ref : [];
indexes = getIndexes(string, regex);
return zip(matches, indexes);
};
getIndexes = function(string, regex) {
var ret;
ret = [];
while (regex.test(string)) {
ret.push(regex.lastIndex);
}
return ret;
};
splice = function(source, from, to, replacement) {
if (replacement == null) {
replacement = '';
}
return source.slice(0, from) + replacement + source.slice(to);
};
Router = (function(_super) {
__extends(Router, _super);
/*
Override so our _routes object is unique to each router. I hate this side of
js.
*/
function Router() {
var args;
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
this._routes = {};
Router.__super__.constructor.apply(this, args);
}
/*
Override route to perform some subtle tweaks! Namely, storing raw string
routes for reverse routing and passing the name to the buildRequest function
*/
Router.prototype.route = function(route, name, callback) {
if (!isRegExp(route)) {
this._routes[name] = route;
route = this._routeToRegExp(route);
}
if (isFunction(name)) {
callback = name;
name = '';
}
if (!callback) {
callback = this[name];
}
return Backbone.history.route(route, (function(_this) {
return function(fragment) {
var req;
req = _this._buildRequest(route, fragment, name);
_this.execute(callback, req);
_this.trigger.apply(_this, ['route:' + name, req]);
_this.trigger('route', name, req);
return Backbone.history.trigger('route', _this, name, req);
};
})(this));
};
/*
Store names of parameters in a propery of route
*/
Router.prototype._routeToRegExp = function(route) {
var names, ret;
ret = Router.__super__._routeToRegExp.call(this, route);
names = getNames(route);
ret.names = map(pluck(sortBy(names, '1'), '0'), function(s) {
return s.slice(1);
});
return ret;
};
/*
Create a request object. It should have the route name, named params as
keys with their values and a query object which is the query params, an
empty object if no query params available.
*/
Router.prototype._buildRequest = function(route, fragment, name) {
var names, query, req, values, _ref, _ref1;
values = this._extractParameters(route, fragment);
query = fragment.split('?').slice(1).join('?');
if (values[values.length - 1] === query) {
values = values.slice(0, -1);
}
names = (_ref = route.names) != null ? _ref : map(values, function(v, i) {
return i;
});
req = {
route: (_ref1 = this._routes[name]) != null ? _ref1 : route,
fragment: fragment,
name: name,
values: values,
params: object(names, values),
query: querystring.parse(query)
};
return req;
};
/*
No-op to stop the routes propery being used
*/
Router.prototype._bindRoutes = function() {};
/*
Rather than the default backbone behaviour of applying the args to the
callback, call the callback with the request object.
*/
Router.prototype.execute = function(callback, req) {
if (callback) {
return callback.call(this, req);
}
};
/*
Reverse a named route with a barebones request object.
*/
Router.prototype.reverse = function(name, req) {
var diff, lastIndex, nameds, names, optional, optionals, params, query, ret, route, segment, value, _i, _j, _len, _len1, _ref, _ref1, _ref2, _ref3, _ref4;
route = this._routes[name];
if (!route) {
return null;
}
ret = route;
params = (_ref = req.params) != null ? _ref : {};
query = (_ref1 = req.query) != null ? _ref1 : {};
names = keys(params);
optionals = process(route, /\((.*?)\)/g).reverse();
for (_i = 0, _len = optionals.length; _i < _len; _i++) {
_ref2 = optionals[_i], optional = _ref2[0], lastIndex = _ref2[1];
nameds = map(pluck(getNames(optional), '0'), function(s) {
return s.slice(1);
});
diff = difference(nameds, names).length;
if (nameds.length === 0 || diff !== 0) {
route = splice(route, lastIndex - optional.length, lastIndex);
} else {
route = splice(route, lastIndex - optional.length, lastIndex, optional.slice(1, -1));
}
}
nameds = getNames(route).reverse();
for (_j = 0, _len1 = nameds.length; _j < _len1; _j++) {
_ref3 = nameds[_j], segment = _ref3[0], lastIndex = _ref3[1];
value = (_ref4 = params[segment.slice(1)]) != null ? _ref4 : null;
if (value !== null) {
route = splice(route, lastIndex - segment.length, lastIndex, params[segment.slice(1)]);
}
}
query = querystring.stringify(query);
if (query) {
route += '?' + query;
}
return route;
};
return Router;
})(Backbone.Router);
module.exports = Router;
},{"backbone":1,"querystring":4,"underscore":1}],10:[function(require,module,exports){
var Backbone, View, difference, without, _ref,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
__indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; },
__slice = [].slice;
Backbone = require('backbone');
_ref = require('underscore'), without = _ref.without, difference = _ref.difference;
View = (function(_super) {
__extends(View, _super);
View.prototype.parent = null;
View.prototype.children = null;
View.prototype.namespace = '';
/*
Ensure the classname is applied, then set the parent and children if any
are passed in. Does the normal backbone constructor and then does the
first state change.
*/
function View(options) {
var _ref1;
this.children = [];
if (options.className) {
this.className = options.className;
}
if (options.namespace) {
this.namespace = options.namespace;
}
if (options.el) {
this._ensureClass(options.el);
}
if (options.parent) {
this.setParent(options.parent);
}
if ((_ref1 = options.children) != null ? _ref1.length : void 0) {
this.addChildren(options.children);
}
View.__super__.constructor.call(this, options);
}
/*
Used to ensure that the className property of the view is applied to an
el passed in as an option.
*/
View.prototype._ensureClass = function(el, className) {
if (className == null) {
className = this.className;
}
return Backbone.$(el).addClass(className);
};
/*
Adds a list of views as children of this view.
*/
View.prototype.addChildren = function(views) {
var view, _i, _len, _results;
_results = [];
for (_i = 0, _len = views.length; _i < _len; _i++) {
view = views[_i];
_results.push(this.addChild(view));
}
return _results;
};
/*
Adds a view as a child of this view.
*/
| View.prototype.addChild = function(view) { | random_line_split |
|
bam.js | {
obj = undefined;
}
if (typeof obj === 'object') {
return map(objectKeys(obj), function(k) {
var ks = encodeURIComponent(stringifyPrimitive(k)) + eq;
if (isArray(obj[k])) {
return map(obj[k], function(v) {
return ks + encodeURIComponent(stringifyPrimitive(v));
}).join(sep);
} else {
return ks + encodeURIComponent(stringifyPrimitive(obj[k]));
}
}).join(sep);
}
if (!name) return '';
return encodeURIComponent(stringifyPrimitive(name)) + eq +
encodeURIComponent(stringifyPrimitive(obj));
};
var isArray = Array.isArray || function (xs) {
return Object.prototype.toString.call(xs) === '[object Array]';
};
function map (xs, f) {
if (xs.map) return xs.map(f);
var res = [];
for (var i = 0; i < xs.length; i++) {
res.push(f(xs[i], i));
}
return res;
}
var objectKeys = Object.keys || function (obj) {
var res = [];
for (var key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) res.push(key);
}
return res;
};
},{}],4:[function(require,module,exports){
'use strict';
exports.decode = exports.parse = require('./decode');
exports.encode = exports.stringify = require('./encode');
},{"./decode":2,"./encode":3}],5:[function(require,module,exports){
module.exports = require('backbone');
},{"backbone":1}],6:[function(require,module,exports){
var Backbone, Collection,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
Backbone = require('backbone');
Collection = (function(_super) {
__extends(Collection, _super);
function Collection() {
return Collection.__super__.constructor.apply(this, arguments);
}
/*
Returns the model at the index immediately before the passed in model
instance. If the model instance is the first model in the collection, or
the model instance does not exist in the collection, this will return
null.
*/
Collection.prototype.before = function(model) {
var index;
index = this.indexOf(model);
if (index === -1 || index === 0) {
return null;
}
return this.at(index - 1);
};
/*
Returns the model at the index immediately after the passed in model
instance. If the model instance is the last model in the collection, or
the model instance does not exist in the collection, this will return
null.
*/
Collection.prototype.after = function(model) {
var index;
index = this.indexOf(model);
if (index === -1 || index === this.length - 1) {
return null;
}
return this.at(index + 1);
};
/*
Convenience function for getting an array of all the models in a
collection
*/
Collection.prototype.all = function() {
return this.models.slice();
};
return Collection;
})(Backbone.Collection);
module.exports = Collection;
},{"backbone":1}],7:[function(require,module,exports){
var Bam;
module.exports = Bam = {
Backbone: require('./backbone'),
Router: require('./router'),
View: require('./view'),
Model: require('./model'),
Collection: require('./collection')
};
},{"./backbone":5,"./collection":6,"./model":8,"./router":9,"./view":10}],8:[function(require,module,exports){
var Backbone, DEFAULT_CASTS, Model, any, map, _ref,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
Backbone = require('backbone');
_ref = require('underscore'), map = _ref.map, any = _ref.any;
DEFAULT_CASTS = {
string: function(v) {
return v + '';
},
int: function(v) {
return Math.floor(+v);
},
number: function(v) {
return +v;
},
date: function(v) {
return new Date(v);
},
boolean: function(v) {
return !!v;
}
};
Model = (function(_super) {
__extends(Model, _super);
function Model() {
return Model.__super__.constructor.apply(this, arguments);
}
/*
Allows derived get values. The format is:
derived:
foo:
deps: ['bar', 'baz']
value: (bar, baz) -> bar + ' ' + baz
Your deps define which properties will be passed to the value function and
in what order. They're also used to trigger change events for derived values
i.e., if a dep changes the derived value will trigger a change too.
*/
Model.prototype.derived = {};
/*
Allows casting specific keys. The format is:
cast:
timestamp: (v) -> moment(v)
bar: 'string'
baz: 'int'
You can either provide your own function or use a provided basic cast. These
include:
* `'string'`: `(v) -> v + ''`
* `'int'`: `(v) -> Math.floor(+v)`
* `'number'`: `(v) -> +v`
* `'date'`: `(v) -> new Date(v)`
* `'boolean'`: (v) -> !!v
Doesn't cast derived or null values.
*/
Model.prototype.cast = {};
/*
Returns the model after this model in its collection. If it's not in a
collection this will return null.
*/
Model.prototype.next = function() {
var _ref1;
return (_ref1 = this.collection) != null ? _ref1.after(this) : void 0;
};
/*
Returns the model before this model in its collection. If it's not in a
collection this will return null.
*/
Model.prototype.prev = function() {
var _ref1;
return (_ref1 = this.collection) != null ? _ref1.before(this) : void 0;
};
/*
Returns a clone of the attributes object.
*/
Model.prototype.getAttributes = function() {
return Backbone.$.extend(true, {}, this.attributes);
};
/*
Override get to allow default value and derived values.
*/
Model.prototype.get = function(key, defaultValue) {
var ret;
if (this.derived[key]) {
ret = this._derive(derived[key]);
} else {
ret = Model.__super__.get.call(this, key);
}
if (ret === void 0) {
return defaultValue;
} else {
return ret;
}
};
/*
Derive a value from a definition
*/
Model.prototype._derive = function(definition) {
var args;
args = map(definition.deps, (function(_this) {
return function(key) {
return _this.get('key');
};
})(this));
return definition.value.apply(definition, args);
};
/*
Override the set method to allow for casting as data comes in.
*/
Model.prototype.set = function(key, val, options) {
var attrs, changed, definition, derived, ret, _ref1;
if (typeof key === 'object') {
attrs = key;
options = val;
} else {
attrs = {};
attrs[key] = val;
}
for (key in attrs) {
val = attrs[key];
if (val === null) {
continue;
}
if (this.cast[key]) {
attrs[key] = this._cast(val, this.cast[key]);
}
}
ret = Model.__super__.set.call(this, attrs, options);
_ref1 = this.derived;
for (derived in _ref1) {
definition = _ref1[derived];
changed = map(definition.deps, function(key) {
return attrs.hasOwnProperty(key);
});
if (any(changed)) |
}
return ret;
};
/*
Take a value, and a casting definition and perform the cast
*/
Model.prototype._cast = function(value, cast) {
var error;
try {
return value = this._getCastFunc(cast)(value);
} catch (_error) {
error = _error;
return value = null;
} finally {
return value;
}
};
/*
Given a casting definition, return a function that should perform the cast
*/
Model.prototype._getCastFunc = function(cast) {
var _ref1;
if (typeof cast === 'function') {
return cast;
}
return (_ref1 = DEFAULT_CASTS[cast]) != null ? _ref1 : function(v) {
return v;
};
};
return Model;
})(Backbone.Model);
module | {
this.trigger("change:" + derived, this._derive(definition));
} | conditional_block |
main.py | of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# This is the adaptation of the program found on:
# https://gist.github.com/nakagami/7a7d799bd4bd4ad8fcea96135c4af179
import os, sys, random, itertools, time
os.chdir(os.path.dirname(os.path.realpath(__file__)))
sys.path.append("../../atlastk")
import atlastk
EMPTY = 0
BLACK = -1
WHITE = 1
# http://uguisu.skr.jp/othello/5-1.html
WEIGHT_MATRIX = [
[120, -20, 20, 5, 5, 20, -20, 120],
[-20, -40, -5, -5, -5, -5, -40, -20],
[20, -5, 15, 3, 3, 15, -5, 20],
[5, -5, 3, 3, 3, 3, -5, 5],
[5, -5, 3, 3, 3, 3, -5, 5],
[20, -5, 15, 3, 3, 15, -5, 20],
[-20, -40, -5, -5, -5, -5, -40, -20],
[120, -20, 20, 5, 5, 20, -20, 120],
]
class Reversi:
def reset(self):
self.board = []
for _ in range(8):
self.board.append([EMPTY] * 8)
self.board[3][3] = self.board[4][4] = BLACK
self.board[4][3] = self.board[3][4] = WHITE
def | (self, orig=None):
self.reset()
# copy constructor
if orig:
assert isinstance(orig, Reversi)
for i in range(8):
for j in range(8):
self.board[i][j] = orig.board[i][j]
def count(self, bwe):
"Count pieces or empty spaces in the board"
assert bwe in (BLACK, WHITE, EMPTY)
n = 0
for i in range(8):
for j in range(8):
if self.board[i][j] == bwe:
n += 1
return n
def _has_my_piece(self, bw, x, y, delta_x, delta_y):
"There is my piece in the direction of (delta_x, delta_y) from (x, y)."
assert bw in (BLACK, WHITE)
assert delta_x in (-1, 0, 1)
assert delta_y in (-1, 0, 1)
x += delta_x
y += delta_y
if x < 0 or x > 7 or y < 0 or y > 7 or self.board[x][y] == EMPTY:
return False
if self.board[x][y] == bw:
return True
return self._has_my_piece(bw, x, y, delta_x, delta_y)
def reversible_directions(self, bw, x, y):
"Can put piece on (x, y) ? Return list of reversible direction tuple"
assert bw in (BLACK, WHITE)
directions = []
if self.board[x][y] != EMPTY:
return directions
for d in itertools.product([-1, 1, 0], [-1, 1, 0]):
if d == (0, 0):
continue
nx = x + d[0]
ny = y + d[1]
if nx < 0 or nx > 7 or ny < 0 or ny > 7 or self.board[nx][ny] != bw * -1:
continue
if self._has_my_piece(bw, nx, ny, d[0], d[1]):
directions.append(d)
return directions
def _reverse_piece(self, bw, x, y, delta_x, delta_y):
"Reverse pieces in the direction of (delta_x, delta_y) from (x, y) untill bw."
assert bw in (BLACK, WHITE)
x += delta_x
y += delta_y
assert self.board[x][y] in (BLACK, WHITE)
if self.board[x][y] == bw:
return
self.board[x][y] = bw
return self._reverse_piece(bw, x, y, delta_x, delta_y)
def isAllowed(self, x, y, bw):
return len(self.reversible_directions(bw, x, y)) != 0
def put(self, x, y, bw):
"""
True: Put bw's piece on (x, y) and change board status.
False: Can't put bw's piece on (x, y)
"""
assert bw in (BLACK, WHITE)
directions = self.reversible_directions(bw, x, y)
if len(directions) == 0:
return False
self.board[x][y] = bw
for delta in directions:
self._reverse_piece(bw, x, y, delta[0], delta[1])
return True
def _calc_score(self, bw, weight_matrix):
assert bw in (BLACK, WHITE)
my_score = 0
against_score = 0
for i in range(8):
for j in range(8):
if self.board[i][j] == bw:
my_score += weight_matrix[i][j]
elif self.board[i][j] == bw * -1:
against_score += weight_matrix[i][j]
return my_score - against_score
def find_best_position(self, bw, weight_matrix):
"Return the best next position."
assert bw in (BLACK, WHITE)
next_positions = {}
for i in range(8):
for j in range(8):
reversi = Reversi(self)
if reversi.put(i, j, bw):
next_positions.setdefault(
reversi._calc_score(bw, weight_matrix), []
).append((i, j))
if next_positions:
next_position = random.choice(next_positions[max(next_positions)])
else:
next_position = None
return next_position
# -------------------------------------------------------------------------------
def drawBoard(reversi, dom, prefetch=False):
board = atlastk.createHTML("tbody")
for y, row in enumerate(reversi.board):
board.push_tag("tr")
for x, r in enumerate(row):
board.push_tag("td")
board.put_attribute("id", str(x) + str(y))
if (r == EMPTY) and (reversi.isAllowed(y, x, reversi.player)):
board.put_attribute("xdh:onevent", "Play")
if (prefetch == True):
r = reversi.player
board.put_attribute(
"style", "opacity: 0.1; background-color: white;")
board.put_attribute(
"class", {EMPTY: 'none', BLACK: 'black', WHITE: 'white'}[r])
board.pop_tag()
board.pop_tag()
dom.inner("board", board)
dom.set_values({
"black": reversi.count(BLACK),
"white": reversi.count(WHITE)
})
def acConnect(reversi, dom):
reversi.player = BLACK
reversi.weight_matrix = WEIGHT_MATRIX
dom.inner("", open("Main.html").read())
drawBoard(reversi, dom)
dom.alert("Welcome to this Reversi (aka Othello) game made with the Atlas toolkit.\n\nYou play against the computer with the black pieces.")
def acPlay(reversi, dom, id):
xy = [int(id[1]), int(id[0])]
player = reversi.player
weight_matrix = reversi.weight_matrix
if (reversi.put(xy[0], xy[1], player)):
drawBoard(reversi, dom, False)
xy = reversi.find_best_position(player * -1, weight_matrix)
if xy:
reversi.put(xy[0], xy[1], player * -1)
time.sleep(1)
drawBoard(reversi, dom)
if (re | __init__ | identifier_name |
main.py | this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# This is the adaptation of the program found on:
# https://gist.github.com/nakagami/7a7d799bd4bd4ad8fcea96135c4af179
import os, sys, random, itertools, time
os.chdir(os.path.dirname(os.path.realpath(__file__)))
sys.path.append("../../atlastk")
import atlastk
EMPTY = 0
BLACK = -1
WHITE = 1
# http://uguisu.skr.jp/othello/5-1.html
WEIGHT_MATRIX = [
[120, -20, 20, 5, 5, 20, -20, 120],
[-20, -40, -5, -5, -5, -5, -40, -20],
[20, -5, 15, 3, 3, 15, -5, 20],
[5, -5, 3, 3, 3, 3, -5, 5],
[5, -5, 3, 3, 3, 3, -5, 5],
[20, -5, 15, 3, 3, 15, -5, 20],
[-20, -40, -5, -5, -5, -5, -40, -20],
[120, -20, 20, 5, 5, 20, -20, 120],
]
class Reversi:
def reset(self):
self.board = []
for _ in range(8):
self.board.append([EMPTY] * 8)
self.board[3][3] = self.board[4][4] = BLACK
self.board[4][3] = self.board[3][4] = WHITE
def __init__(self, orig=None):
self.reset()
# copy constructor
if orig:
assert isinstance(orig, Reversi)
for i in range(8):
for j in range(8):
self.board[i][j] = orig.board[i][j]
def count(self, bwe):
"Count pieces or empty spaces in the board"
assert bwe in (BLACK, WHITE, EMPTY)
n = 0
for i in range(8):
for j in range(8):
if self.board[i][j] == bwe:
n += 1
return n
def _has_my_piece(self, bw, x, y, delta_x, delta_y):
"There is my piece in the direction of (delta_x, delta_y) from (x, y)."
assert bw in (BLACK, WHITE)
assert delta_x in (-1, 0, 1)
assert delta_y in (-1, 0, 1)
x += delta_x
y += delta_y
if x < 0 or x > 7 or y < 0 or y > 7 or self.board[x][y] == EMPTY:
| def reversible_directions(self, bw, x, y):
"Can put piece on (x, y) ? Return list of reversible direction tuple"
assert bw in (BLACK, WHITE)
directions = []
if self.board[x][y] != EMPTY:
return directions
for d in itertools.product([-1, 1, 0], [-1, 1, 0]):
if d == (0, 0):
continue
nx = x + d[0]
ny = y + d[1]
if nx < 0 or nx > 7 or ny < 0 or ny > 7 or self.board[nx][ny] != bw * -1:
continue
if self._has_my_piece(bw, nx, ny, d[0], d[1]):
directions.append(d)
return directions
def _reverse_piece(self, bw, x, y, delta_x, delta_y):
"Reverse pieces in the direction of (delta_x, delta_y) from (x, y) untill bw."
assert bw in (BLACK, WHITE)
x += delta_x
y += delta_y
assert self.board[x][y] in (BLACK, WHITE)
if self.board[x][y] == bw:
return
self.board[x][y] = bw
return self._reverse_piece(bw, x, y, delta_x, delta_y)
def isAllowed(self, x, y, bw):
return len(self.reversible_directions(bw, x, y)) != 0
def put(self, x, y, bw):
"""
True: Put bw's piece on (x, y) and change board status.
False: Can't put bw's piece on (x, y)
"""
assert bw in (BLACK, WHITE)
directions = self.reversible_directions(bw, x, y)
if len(directions) == 0:
return False
self.board[x][y] = bw
for delta in directions:
self._reverse_piece(bw, x, y, delta[0], delta[1])
return True
def _calc_score(self, bw, weight_matrix):
assert bw in (BLACK, WHITE)
my_score = 0
against_score = 0
for i in range(8):
for j in range(8):
if self.board[i][j] == bw:
my_score += weight_matrix[i][j]
elif self.board[i][j] == bw * -1:
against_score += weight_matrix[i][j]
return my_score - against_score
def find_best_position(self, bw, weight_matrix):
"Return the best next position."
assert bw in (BLACK, WHITE)
next_positions = {}
for i in range(8):
for j in range(8):
reversi = Reversi(self)
if reversi.put(i, j, bw):
next_positions.setdefault(
reversi._calc_score(bw, weight_matrix), []
).append((i, j))
if next_positions:
next_position = random.choice(next_positions[max(next_positions)])
else:
next_position = None
return next_position
# -------------------------------------------------------------------------------
def drawBoard(reversi, dom, prefetch=False):
board = atlastk.createHTML("tbody")
for y, row in enumerate(reversi.board):
board.push_tag("tr")
for x, r in enumerate(row):
board.push_tag("td")
board.put_attribute("id", str(x) + str(y))
if (r == EMPTY) and (reversi.isAllowed(y, x, reversi.player)):
board.put_attribute("xdh:onevent", "Play")
if (prefetch == True):
r = reversi.player
board.put_attribute(
"style", "opacity: 0.1; background-color: white;")
board.put_attribute(
"class", {EMPTY: 'none', BLACK: 'black', WHITE: 'white'}[r])
board.pop_tag()
board.pop_tag()
dom.inner("board", board)
dom.set_values({
"black": reversi.count(BLACK),
"white": reversi.count(WHITE)
})
def acConnect(reversi, dom):
reversi.player = BLACK
reversi.weight_matrix = WEIGHT_MATRIX
dom.inner("", open("Main.html").read())
drawBoard(reversi, dom)
dom.alert("Welcome to this Reversi (aka Othello) game made with the Atlas toolkit.\n\nYou play against the computer with the black pieces.")
def acPlay(reversi, dom, id):
xy = [int(id[1]), int(id[0])]
player = reversi.player
weight_matrix = reversi.weight_matrix
if (reversi.put(xy[0], xy[1], player)):
drawBoard(reversi, dom, False)
xy = reversi.find_best_position(player * -1, weight_matrix)
if xy:
reversi.put(xy[0], xy[1], player * -1)
time.sleep(1)
drawBoard(reversi, dom)
if (re | return False
if self.board[x][y] == bw:
return True
return self._has_my_piece(bw, x, y, delta_x, delta_y)
| random_line_split |
main.py | of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# This is the adaptation of the program found on:
# https://gist.github.com/nakagami/7a7d799bd4bd4ad8fcea96135c4af179
import os, sys, random, itertools, time
os.chdir(os.path.dirname(os.path.realpath(__file__)))
sys.path.append("../../atlastk")
import atlastk
EMPTY = 0
BLACK = -1
WHITE = 1
# http://uguisu.skr.jp/othello/5-1.html
WEIGHT_MATRIX = [
[120, -20, 20, 5, 5, 20, -20, 120],
[-20, -40, -5, -5, -5, -5, -40, -20],
[20, -5, 15, 3, 3, 15, -5, 20],
[5, -5, 3, 3, 3, 3, -5, 5],
[5, -5, 3, 3, 3, 3, -5, 5],
[20, -5, 15, 3, 3, 15, -5, 20],
[-20, -40, -5, -5, -5, -5, -40, -20],
[120, -20, 20, 5, 5, 20, -20, 120],
]
class Reversi:
def reset(self):
self.board = []
for _ in range(8):
self.board.append([EMPTY] * 8)
self.board[3][3] = self.board[4][4] = BLACK
self.board[4][3] = self.board[3][4] = WHITE
def __init__(self, orig=None):
self.reset()
# copy constructor
if orig:
assert isinstance(orig, Reversi)
for i in range(8):
for j in range(8):
self.board[i][j] = orig.board[i][j]
def count(self, bwe):
"Count pieces or empty spaces in the board"
assert bwe in (BLACK, WHITE, EMPTY)
n = 0
for i in range(8):
for j in range(8):
if self.board[i][j] == bwe:
n += 1
return n
def _has_my_piece(self, bw, x, y, delta_x, delta_y):
"There is my piece in the direction of (delta_x, delta_y) from (x, y)."
assert bw in (BLACK, WHITE)
assert delta_x in (-1, 0, 1)
assert delta_y in (-1, 0, 1)
x += delta_x
y += delta_y
if x < 0 or x > 7 or y < 0 or y > 7 or self.board[x][y] == EMPTY:
return False
if self.board[x][y] == bw:
return True
return self._has_my_piece(bw, x, y, delta_x, delta_y)
def reversible_directions(self, bw, x, y):
|
def _reverse_piece(self, bw, x, y, delta_x, delta_y):
"Reverse pieces in the direction of (delta_x, delta_y) from (x, y) untill bw."
assert bw in (BLACK, WHITE)
x += delta_x
y += delta_y
assert self.board[x][y] in (BLACK, WHITE)
if self.board[x][y] == bw:
return
self.board[x][y] = bw
return self._reverse_piece(bw, x, y, delta_x, delta_y)
def isAllowed(self, x, y, bw):
return len(self.reversible_directions(bw, x, y)) != 0
def put(self, x, y, bw):
"""
True: Put bw's piece on (x, y) and change board status.
False: Can't put bw's piece on (x, y)
"""
assert bw in (BLACK, WHITE)
directions = self.reversible_directions(bw, x, y)
if len(directions) == 0:
return False
self.board[x][y] = bw
for delta in directions:
self._reverse_piece(bw, x, y, delta[0], delta[1])
return True
def _calc_score(self, bw, weight_matrix):
assert bw in (BLACK, WHITE)
my_score = 0
against_score = 0
for i in range(8):
for j in range(8):
if self.board[i][j] == bw:
my_score += weight_matrix[i][j]
elif self.board[i][j] == bw * -1:
against_score += weight_matrix[i][j]
return my_score - against_score
def find_best_position(self, bw, weight_matrix):
"Return the best next position."
assert bw in (BLACK, WHITE)
next_positions = {}
for i in range(8):
for j in range(8):
reversi = Reversi(self)
if reversi.put(i, j, bw):
next_positions.setdefault(
reversi._calc_score(bw, weight_matrix), []
).append((i, j))
if next_positions:
next_position = random.choice(next_positions[max(next_positions)])
else:
next_position = None
return next_position
# -------------------------------------------------------------------------------
def drawBoard(reversi, dom, prefetch=False):
board = atlastk.createHTML("tbody")
for y, row in enumerate(reversi.board):
board.push_tag("tr")
for x, r in enumerate(row):
board.push_tag("td")
board.put_attribute("id", str(x) + str(y))
if (r == EMPTY) and (reversi.isAllowed(y, x, reversi.player)):
board.put_attribute("xdh:onevent", "Play")
if (prefetch == True):
r = reversi.player
board.put_attribute(
"style", "opacity: 0.1; background-color: white;")
board.put_attribute(
"class", {EMPTY: 'none', BLACK: 'black', WHITE: 'white'}[r])
board.pop_tag()
board.pop_tag()
dom.inner("board", board)
dom.set_values({
"black": reversi.count(BLACK),
"white": reversi.count(WHITE)
})
def acConnect(reversi, dom):
reversi.player = BLACK
reversi.weight_matrix = WEIGHT_MATRIX
dom.inner("", open("Main.html").read())
drawBoard(reversi, dom)
dom.alert("Welcome to this Reversi (aka Othello) game made with the Atlas toolkit.\n\nYou play against the computer with the black pieces.")
def acPlay(reversi, dom, id):
xy = [int(id[1]), int(id[0])]
player = reversi.player
weight_matrix = reversi.weight_matrix
if (reversi.put(xy[0], xy[1], player)):
drawBoard(reversi, dom, False)
xy = reversi.find_best_position(player * -1, weight_matrix)
if xy:
reversi.put(xy[0], xy[1], player * -1)
time.sleep(1)
drawBoard(reversi, dom)
if (revers | "Can put piece on (x, y) ? Return list of reversible direction tuple"
assert bw in (BLACK, WHITE)
directions = []
if self.board[x][y] != EMPTY:
return directions
for d in itertools.product([-1, 1, 0], [-1, 1, 0]):
if d == (0, 0):
continue
nx = x + d[0]
ny = y + d[1]
if nx < 0 or nx > 7 or ny < 0 or ny > 7 or self.board[nx][ny] != bw * -1:
continue
if self._has_my_piece(bw, nx, ny, d[0], d[1]):
directions.append(d)
return directions | identifier_body |
main.py | of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# This is the adaptation of the program found on:
# https://gist.github.com/nakagami/7a7d799bd4bd4ad8fcea96135c4af179
import os, sys, random, itertools, time
os.chdir(os.path.dirname(os.path.realpath(__file__)))
sys.path.append("../../atlastk")
import atlastk
EMPTY = 0
BLACK = -1
WHITE = 1
# http://uguisu.skr.jp/othello/5-1.html
WEIGHT_MATRIX = [
[120, -20, 20, 5, 5, 20, -20, 120],
[-20, -40, -5, -5, -5, -5, -40, -20],
[20, -5, 15, 3, 3, 15, -5, 20],
[5, -5, 3, 3, 3, 3, -5, 5],
[5, -5, 3, 3, 3, 3, -5, 5],
[20, -5, 15, 3, 3, 15, -5, 20],
[-20, -40, -5, -5, -5, -5, -40, -20],
[120, -20, 20, 5, 5, 20, -20, 120],
]
class Reversi:
def reset(self):
self.board = []
for _ in range(8):
self.board.append([EMPTY] * 8)
self.board[3][3] = self.board[4][4] = BLACK
self.board[4][3] = self.board[3][4] = WHITE
def __init__(self, orig=None):
self.reset()
# copy constructor
if orig:
assert isinstance(orig, Reversi)
for i in range(8):
for j in range(8):
self.board[i][j] = orig.board[i][j]
def count(self, bwe):
"Count pieces or empty spaces in the board"
assert bwe in (BLACK, WHITE, EMPTY)
n = 0
for i in range(8):
for j in range(8):
if self.board[i][j] == bwe:
n += 1
return n
def _has_my_piece(self, bw, x, y, delta_x, delta_y):
"There is my piece in the direction of (delta_x, delta_y) from (x, y)."
assert bw in (BLACK, WHITE)
assert delta_x in (-1, 0, 1)
assert delta_y in (-1, 0, 1)
x += delta_x
y += delta_y
if x < 0 or x > 7 or y < 0 or y > 7 or self.board[x][y] == EMPTY:
return False
if self.board[x][y] == bw:
return True
return self._has_my_piece(bw, x, y, delta_x, delta_y)
def reversible_directions(self, bw, x, y):
"Can put piece on (x, y) ? Return list of reversible direction tuple"
assert bw in (BLACK, WHITE)
directions = []
if self.board[x][y] != EMPTY:
return directions
for d in itertools.product([-1, 1, 0], [-1, 1, 0]):
if d == (0, 0):
continue
nx = x + d[0]
ny = y + d[1]
if nx < 0 or nx > 7 or ny < 0 or ny > 7 or self.board[nx][ny] != bw * -1:
continue
if self._has_my_piece(bw, nx, ny, d[0], d[1]):
directions.append(d)
return directions
def _reverse_piece(self, bw, x, y, delta_x, delta_y):
"Reverse pieces in the direction of (delta_x, delta_y) from (x, y) untill bw."
assert bw in (BLACK, WHITE)
x += delta_x
y += delta_y
assert self.board[x][y] in (BLACK, WHITE)
if self.board[x][y] == bw:
return
self.board[x][y] = bw
return self._reverse_piece(bw, x, y, delta_x, delta_y)
def isAllowed(self, x, y, bw):
return len(self.reversible_directions(bw, x, y)) != 0
def put(self, x, y, bw):
"""
True: Put bw's piece on (x, y) and change board status.
False: Can't put bw's piece on (x, y)
"""
assert bw in (BLACK, WHITE)
directions = self.reversible_directions(bw, x, y)
if len(directions) == 0:
return False
self.board[x][y] = bw
for delta in directions:
self._reverse_piece(bw, x, y, delta[0], delta[1])
return True
def _calc_score(self, bw, weight_matrix):
assert bw in (BLACK, WHITE)
my_score = 0
against_score = 0
for i in range(8):
for j in range(8):
if self.board[i][j] == bw:
my_score += weight_matrix[i][j]
elif self.board[i][j] == bw * -1:
against_score += weight_matrix[i][j]
return my_score - against_score
def find_best_position(self, bw, weight_matrix):
"Return the best next position."
assert bw in (BLACK, WHITE)
next_positions = {}
for i in range(8):
for j in range(8):
reversi = Reversi(self)
if reversi.put(i, j, bw):
next_positions.setdefault(
reversi._calc_score(bw, weight_matrix), []
).append((i, j))
if next_positions:
next_position = random.choice(next_positions[max(next_positions)])
else:
next_position = None
return next_position
# -------------------------------------------------------------------------------
def drawBoard(reversi, dom, prefetch=False):
board = atlastk.createHTML("tbody")
for y, row in enumerate(reversi.board):
board.push_tag("tr")
for x, r in enumerate(row):
board.push_tag("td")
board.put_attribute("id", str(x) + str(y))
if (r == EMPTY) and (reversi.isAllowed(y, x, reversi.player)):
board.put_attribute("xdh:onevent", "Play")
if (prefetch == True):
|
board.put_attribute(
"class", {EMPTY: 'none', BLACK: 'black', WHITE: 'white'}[r])
board.pop_tag()
board.pop_tag()
dom.inner("board", board)
dom.set_values({
"black": reversi.count(BLACK),
"white": reversi.count(WHITE)
})
def acConnect(reversi, dom):
reversi.player = BLACK
reversi.weight_matrix = WEIGHT_MATRIX
dom.inner("", open("Main.html").read())
drawBoard(reversi, dom)
dom.alert("Welcome to this Reversi (aka Othello) game made with the Atlas toolkit.\n\nYou play against the computer with the black pieces.")
def acPlay(reversi, dom, id):
xy = [int(id[1]), int(id[0])]
player = reversi.player
weight_matrix = reversi.weight_matrix
if (reversi.put(xy[0], xy[1], player)):
drawBoard(reversi, dom, False)
xy = reversi.find_best_position(player * -1, weight_matrix)
if xy:
reversi.put(xy[0], xy[1], player * -1)
time.sleep(1)
drawBoard(reversi, dom)
if (re | r = reversi.player
board.put_attribute(
"style", "opacity: 0.1; background-color: white;") | conditional_block |
Signature.d.ts | declare namespace jsrsasign.KJUR.crypto {
/**
* Signature class which is very similar to java.security.Signature class
* @param params parameters for constructor
* @description
* As for params of constructor's argument, it can be specify following attributes:
* - alg - signature algorithm name (ex. {MD5,SHA1,SHA224,SHA256,SHA384,SHA512,RIPEMD160}with{RSA,ECDSA,DSA})
* - provider - currently 'cryptojs/jsrsa' only
* <h4>SUPPORTED ALGORITHMS AND PROVIDERS</h4>
* This Signature class supports following signature algorithm and provider names:
* - MD5withRSA - cryptojs/jsrsa
* - SHA1withRSA - cryptojs/jsrsa
* - SHA224withRSA - cryptojs/jsrsa
* - SHA256withRSA - cryptojs/jsrsa
* - SHA384withRSA - cryptojs/jsrsa
* - SHA512withRSA - cryptojs/jsrsa
* - RIPEMD160withRSA - cryptojs/jsrsa
* - MD5withECDSA - cryptojs/jsrsa
* - SHA1withECDSA - cryptojs/jsrsa
* - SHA224withECDSA - cryptojs/jsrsa
* - SHA256withECDSA - cryptojs/jsrsa
* - SHA384withECDSA - cryptojs/jsrsa
* - SHA512withECDSA - cryptojs/jsrsa
* - RIPEMD160withECDSA - cryptojs/jsrsa
* - MD5withRSAandMGF1 - cryptojs/jsrsa
* - SHA1withRSAandMGF1 - cryptojs/jsrsa
* - SHA224withRSAandMGF1 - cryptojs/jsrsa
* - SHA256withRSAandMGF1 - cryptojs/jsrsa
* - SHA384withRSAandMGF1 - cryptojs/jsrsa
* - SHA512withRSAandMGF1 - cryptojs/jsrsa
* - RIPEMD160withRSAandMGF1 - cryptojs/jsrsa
* - SHA1withDSA - cryptojs/jsrsa
* - SHA224withDSA - cryptojs/jsrsa
* - SHA256withDSA - cryptojs/jsrsa
* Here are supported elliptic cryptographic curve names and their aliases for ECDSA:
* - secp256k1
* - secp256r1, NIST P-256, P-256, prime256v1
* - secp384r1, NIST P-384, P-384
* NOTE1: DSA signing algorithm is also supported since crypto 1.1.5.
* <h4>EXAMPLES</h4>
* @example
* // RSA signature generation
* var sig = new KJUR.crypto.Signature({"alg": "SHA1withRSA"});
* sig.init(prvKeyPEM);
* sig.updateString('aaa');
* var hSigVal = sig.sign();
*
* // DSA signature validation
* var sig2 = new KJUR.crypto.Signature({"alg": "SHA1withDSA"});
* sig2.init(certPEM);
* sig.updateString('aaa');
* var isValid = sig2.verify(hSigVal);
*
* // ECDSA signing
* var sig = new KJUR.crypto.Signature({'alg':'SHA1withECDSA'});
* sig.init(prvKeyPEM);
* sig.updateString('aaa');
* var sigValueHex = sig.sign();
*
* // ECDSA verifying
* var sig2 = new KJUR.crypto.Signature({'alg':'SHA1withECDSA'});
* sig.init(certPEM);
* sig.updateString('aaa');
* var isValid = sig.verify(sigValueHex);
*/
class | {
/** Current state of this signature object whether 'SIGN', 'VERIFY' or null */
static readonly state: 'SIGN' | 'VERIFY' | null;
constructor(params?: { alg?: string });
/**
* set signature algorithm and provider
* @param alg signature algorithm name
* @param prov provider name
* @example
* md.setAlgAndProvider('SHA1withRSA', 'cryptojs/jsrsa');
*/
setAlgAndProvider(alg: string, prov: string): void;
/**
* Initialize this object for signing or verifying depends on key
* @param key specifying public or private key as plain/encrypted PKCS#5/8 PEM file, certificate PEM or `RSAKey`, `KJUR.crypto.DSA` or `KJUR.crypto.ECDSA` object
* @param pass (OPTION) passcode for encrypted private key
* @description
* This method is very useful initialize method for Signature class since
* you just specify key then this method will automatically initialize it
* using `KEYUTIL.getKey` method.
* As for 'key', following argument type are supported:
* __signing__
* - PEM formatted PKCS#8 encrypted RSA/ECDSA private key concluding "BEGIN ENCRYPTED PRIVATE KEY"
* - PEM formatted PKCS#5 encrypted RSA/DSA private key concluding "BEGIN RSA/DSA PRIVATE KEY" and ",ENCRYPTED"
* - PEM formatted PKCS#8 plain RSA/ECDSA private key concluding "BEGIN PRIVATE KEY"
* - PEM formatted PKCS#5 plain RSA/DSA private key concluding "BEGIN RSA/DSA PRIVATE KEY" without ",ENCRYPTED"
* - RSAKey object of private key
* - KJUR.crypto.ECDSA object of private key
* - KJUR.crypto.DSA object of private key
*
* __verification__
* - PEM formatted PKCS#8 RSA/EC/DSA public key concluding "BEGIN PUBLIC KEY"
* - PEM formatted X.509 certificate with RSA/EC/DSA public key concluding
* "BEGIN CERTIFICATE", "BEGIN X509 CERTIFICATE" or "BEGIN TRUSTED CERTIFICATE".
* - RSAKey object of public key
* - KJUR.crypto.ECDSA object of public key
* - KJUR.crypto.DSA object of public key
* @example
* sig.init(sCertPEM)
*/
init(key: string | RSAKey | DSA | ECDSA | ECCPrivateKey, pass?: string): void;
/**
* Updates the data to be signed or verified by a string
* @param str string to use for the update
* @example
* sig.updateString('aaa')
*/
updateString(str: string): void;
/**
* Updates the data to be signed or verified by a hexadecimal string
* @param hex hexadecimal string to use for the update
* @example
* sig.updateHex('1f2f3f')
*/
updateHex(hex: string): void;
/**
* Returns the signature bytes of all data updates as a hexadecimal string
* @return the signature bytes as a hexadecimal string
* @example
* var hSigValue = sig.sign()
*/
sign(): string;
/**
* performs final update on the sign using string, then returns the signature bytes of all data updates as a hexadecimal string
* @param str string to final update
* @return the signature bytes of a hexadecimal string
* @example
* var hSigValue = sig.signString('aaa')
*/
signString(str: string): string;
/**
* performs final update on the sign using hexadecimal string, then returns the signature bytes of all data updates as a hexadecimal string
* @param hex hexadecimal string to final update
* @return the signature bytes of a hexadecimal string
* @example
* var hSigValue = sig.signHex('1fdc33')
*/
signHex(hex: string): string;
/**
* verifies the passed-in signature.
* @param hSigVal string to final update
* @return true if the signature was verified, otherwise false
* @example
* var isValid = sig.verify('1fbcefdca4823a7(snip)')
*/
verify(hSigVal: string): boolean;
}
}
| Signature | identifier_name |
Signature.d.ts | declare namespace jsrsasign.KJUR.crypto {
/**
* Signature class which is very similar to java.security.Signature class
* @param params parameters for constructor
* @description
* As for params of constructor's argument, it can be specify following attributes:
* - alg - signature algorithm name (ex. {MD5,SHA1,SHA224,SHA256,SHA384,SHA512,RIPEMD160}with{RSA,ECDSA,DSA})
* - provider - currently 'cryptojs/jsrsa' only
* <h4>SUPPORTED ALGORITHMS AND PROVIDERS</h4>
* This Signature class supports following signature algorithm and provider names:
* - MD5withRSA - cryptojs/jsrsa
* - SHA1withRSA - cryptojs/jsrsa
* - SHA224withRSA - cryptojs/jsrsa
* - SHA256withRSA - cryptojs/jsrsa
* - SHA384withRSA - cryptojs/jsrsa
* - SHA512withRSA - cryptojs/jsrsa
* - RIPEMD160withRSA - cryptojs/jsrsa
* - MD5withECDSA - cryptojs/jsrsa
* - SHA1withECDSA - cryptojs/jsrsa
* - SHA224withECDSA - cryptojs/jsrsa
* - SHA256withECDSA - cryptojs/jsrsa
* - SHA384withECDSA - cryptojs/jsrsa
* - SHA512withECDSA - cryptojs/jsrsa
* - RIPEMD160withECDSA - cryptojs/jsrsa
* - MD5withRSAandMGF1 - cryptojs/jsrsa
* - SHA1withRSAandMGF1 - cryptojs/jsrsa
* - SHA224withRSAandMGF1 - cryptojs/jsrsa
* - SHA256withRSAandMGF1 - cryptojs/jsrsa
* - SHA384withRSAandMGF1 - cryptojs/jsrsa
* - SHA512withRSAandMGF1 - cryptojs/jsrsa
* - RIPEMD160withRSAandMGF1 - cryptojs/jsrsa
* - SHA1withDSA - cryptojs/jsrsa
* - SHA224withDSA - cryptojs/jsrsa
* - SHA256withDSA - cryptojs/jsrsa
* Here are supported elliptic cryptographic curve names and their aliases for ECDSA:
* - secp256k1
* - secp256r1, NIST P-256, P-256, prime256v1
* - secp384r1, NIST P-384, P-384
* NOTE1: DSA signing algorithm is also supported since crypto 1.1.5.
* <h4>EXAMPLES</h4>
* @example
* // RSA signature generation | * sig.init(prvKeyPEM);
* sig.updateString('aaa');
* var hSigVal = sig.sign();
*
* // DSA signature validation
* var sig2 = new KJUR.crypto.Signature({"alg": "SHA1withDSA"});
* sig2.init(certPEM);
* sig.updateString('aaa');
* var isValid = sig2.verify(hSigVal);
*
* // ECDSA signing
* var sig = new KJUR.crypto.Signature({'alg':'SHA1withECDSA'});
* sig.init(prvKeyPEM);
* sig.updateString('aaa');
* var sigValueHex = sig.sign();
*
* // ECDSA verifying
* var sig2 = new KJUR.crypto.Signature({'alg':'SHA1withECDSA'});
* sig.init(certPEM);
* sig.updateString('aaa');
* var isValid = sig.verify(sigValueHex);
*/
class Signature {
/** Current state of this signature object whether 'SIGN', 'VERIFY' or null */
static readonly state: 'SIGN' | 'VERIFY' | null;
constructor(params?: { alg?: string });
/**
* set signature algorithm and provider
* @param alg signature algorithm name
* @param prov provider name
* @example
* md.setAlgAndProvider('SHA1withRSA', 'cryptojs/jsrsa');
*/
setAlgAndProvider(alg: string, prov: string): void;
/**
* Initialize this object for signing or verifying depends on key
* @param key specifying public or private key as plain/encrypted PKCS#5/8 PEM file, certificate PEM or `RSAKey`, `KJUR.crypto.DSA` or `KJUR.crypto.ECDSA` object
* @param pass (OPTION) passcode for encrypted private key
* @description
* This method is very useful initialize method for Signature class since
* you just specify key then this method will automatically initialize it
* using `KEYUTIL.getKey` method.
* As for 'key', following argument type are supported:
* __signing__
* - PEM formatted PKCS#8 encrypted RSA/ECDSA private key concluding "BEGIN ENCRYPTED PRIVATE KEY"
* - PEM formatted PKCS#5 encrypted RSA/DSA private key concluding "BEGIN RSA/DSA PRIVATE KEY" and ",ENCRYPTED"
* - PEM formatted PKCS#8 plain RSA/ECDSA private key concluding "BEGIN PRIVATE KEY"
* - PEM formatted PKCS#5 plain RSA/DSA private key concluding "BEGIN RSA/DSA PRIVATE KEY" without ",ENCRYPTED"
* - RSAKey object of private key
* - KJUR.crypto.ECDSA object of private key
* - KJUR.crypto.DSA object of private key
*
* __verification__
* - PEM formatted PKCS#8 RSA/EC/DSA public key concluding "BEGIN PUBLIC KEY"
* - PEM formatted X.509 certificate with RSA/EC/DSA public key concluding
* "BEGIN CERTIFICATE", "BEGIN X509 CERTIFICATE" or "BEGIN TRUSTED CERTIFICATE".
* - RSAKey object of public key
* - KJUR.crypto.ECDSA object of public key
* - KJUR.crypto.DSA object of public key
* @example
* sig.init(sCertPEM)
*/
init(key: string | RSAKey | DSA | ECDSA | ECCPrivateKey, pass?: string): void;
/**
* Updates the data to be signed or verified by a string
* @param str string to use for the update
* @example
* sig.updateString('aaa')
*/
updateString(str: string): void;
/**
* Updates the data to be signed or verified by a hexadecimal string
* @param hex hexadecimal string to use for the update
* @example
* sig.updateHex('1f2f3f')
*/
updateHex(hex: string): void;
/**
* Returns the signature bytes of all data updates as a hexadecimal string
* @return the signature bytes as a hexadecimal string
* @example
* var hSigValue = sig.sign()
*/
sign(): string;
/**
* performs final update on the sign using string, then returns the signature bytes of all data updates as a hexadecimal string
* @param str string to final update
* @return the signature bytes of a hexadecimal string
* @example
* var hSigValue = sig.signString('aaa')
*/
signString(str: string): string;
/**
* performs final update on the sign using hexadecimal string, then returns the signature bytes of all data updates as a hexadecimal string
* @param hex hexadecimal string to final update
* @return the signature bytes of a hexadecimal string
* @example
* var hSigValue = sig.signHex('1fdc33')
*/
signHex(hex: string): string;
/**
* verifies the passed-in signature.
* @param hSigVal string to final update
* @return true if the signature was verified, otherwise false
* @example
* var isValid = sig.verify('1fbcefdca4823a7(snip)')
*/
verify(hSigVal: string): boolean;
}
} | * var sig = new KJUR.crypto.Signature({"alg": "SHA1withRSA"}); | random_line_split |
common.py | #!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# ---------------------------------------------------------------------------
#
# Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
# Copyright (C) 2003 Mt. Hood Playing Card Co.
# Copyright (C) 2005-2009 Skomoroh
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------
import os
import traceback
from pysollib.mygettext import _
from pysollib.settings import TITLE
from pysollib.settings import VERSION
from pysollib.settings import TOOLKIT, USE_TILE
from pysollib.settings import DEBUG
from pysollib.mfxutil import print_err
if TOOLKIT == 'tk':
if USE_TILE:
from pysollib.tile import ttk
def init_tile(app, top):
# load available themes
d = os.path.join(app.dataloader.dir, 'themes')
if os.path.isdir(d):
top.tk.eval('global auto_path; lappend auto_path {%s}' % d)
for t in os.listdir(d):
if os.path.exists(os.path.join(d, t, 'pkgIndex.tcl')):
try:
top.tk.eval('package require ttk::theme::'+t)
# print 'load theme:', t
except Exception:
traceback.print_exc()
pass
def set_theme(app, top, theme):
# set theme
style = ttk.Style(top)
try:
style.theme_use(theme)
except Exception:
print_err(_('invalid theme name: ') + theme)
style.theme_use(app.opt.default_tile_theme)
def | (font):
# create font name
# i.e. "helvetica 12" -> ("helvetica", 12, "roman", "normal")
if (TOOLKIT == 'kivy'):
return "helvetica 12"
from six.moves.tkinter_font import Font
font_name = None
try:
f = Font(font=font)
except Exception:
print_err(_('invalid font name: ') + font)
if DEBUG:
traceback.print_exc()
else:
fa = f.actual()
font_name = (fa['family'],
fa['size'],
fa['slant'],
fa['weight'])
return font_name
def base_init_root_window(root, app):
# root.wm_group(root)
root.wm_title(TITLE + ' ' + VERSION)
root.wm_iconname(TITLE + ' ' + VERSION)
# set minsize
sw, sh = (root.winfo_screenwidth(), root.winfo_screenheight())
if sw < 640 or sh < 480:
root.wm_minsize(400, 300)
else:
root.wm_minsize(520, 360)
if TOOLKIT == 'gtk':
pass
if TOOLKIT == 'kivy':
pass
elif USE_TILE:
theme = app.opt.tile_theme
init_tile(app, root)
set_theme(app, root, theme)
else:
pass
class BaseTkSettings:
canvas_padding = (0, 0)
horizontal_toolbar_padding = (0, 0)
vertical_toolbar_padding = (0, 1)
toolbar_button_padding = (2, 2)
toolbar_label_padding = (4, 4)
if USE_TILE:
toolbar_relief = 'flat'
toolbar_borderwidth = 0
else:
toolbar_relief = 'raised'
toolbar_button_relief = 'flat'
toolbar_separator_relief = 'sunken'
toolbar_borderwidth = 1
toolbar_button_borderwidth = 1
| get_font_name | identifier_name |
common.py | #!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# ---------------------------------------------------------------------------
#
# Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
# Copyright (C) 2003 Mt. Hood Playing Card Co.
# Copyright (C) 2005-2009 Skomoroh
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------
import os
import traceback
from pysollib.mygettext import _
from pysollib.settings import TITLE
from pysollib.settings import VERSION
from pysollib.settings import TOOLKIT, USE_TILE
from pysollib.settings import DEBUG
from pysollib.mfxutil import print_err
if TOOLKIT == 'tk':
if USE_TILE:
from pysollib.tile import ttk
def init_tile(app, top):
# load available themes
d = os.path.join(app.dataloader.dir, 'themes')
if os.path.isdir(d):
top.tk.eval('global auto_path; lappend auto_path {%s}' % d)
for t in os.listdir(d):
if os.path.exists(os.path.join(d, t, 'pkgIndex.tcl')):
try:
top.tk.eval('package require ttk::theme::'+t)
# print 'load theme:', t
except Exception:
traceback.print_exc()
pass
def set_theme(app, top, theme):
# set theme
style = ttk.Style(top)
try:
style.theme_use(theme)
except Exception:
print_err(_('invalid theme name: ') + theme)
style.theme_use(app.opt.default_tile_theme)
def get_font_name(font):
# create font name
# i.e. "helvetica 12" -> ("helvetica", 12, "roman", "normal")
if (TOOLKIT == 'kivy'):
return "helvetica 12"
from six.moves.tkinter_font import Font
font_name = None
try:
f = Font(font=font)
except Exception:
print_err(_('invalid font name: ') + font)
if DEBUG:
traceback.print_exc()
else:
fa = f.actual()
font_name = (fa['family'],
fa['size'],
fa['slant'],
fa['weight'])
return font_name |
def base_init_root_window(root, app):
# root.wm_group(root)
root.wm_title(TITLE + ' ' + VERSION)
root.wm_iconname(TITLE + ' ' + VERSION)
# set minsize
sw, sh = (root.winfo_screenwidth(), root.winfo_screenheight())
if sw < 640 or sh < 480:
root.wm_minsize(400, 300)
else:
root.wm_minsize(520, 360)
if TOOLKIT == 'gtk':
pass
if TOOLKIT == 'kivy':
pass
elif USE_TILE:
theme = app.opt.tile_theme
init_tile(app, root)
set_theme(app, root, theme)
else:
pass
class BaseTkSettings:
canvas_padding = (0, 0)
horizontal_toolbar_padding = (0, 0)
vertical_toolbar_padding = (0, 1)
toolbar_button_padding = (2, 2)
toolbar_label_padding = (4, 4)
if USE_TILE:
toolbar_relief = 'flat'
toolbar_borderwidth = 0
else:
toolbar_relief = 'raised'
toolbar_button_relief = 'flat'
toolbar_separator_relief = 'sunken'
toolbar_borderwidth = 1
toolbar_button_borderwidth = 1 | random_line_split |
|
common.py | #!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# ---------------------------------------------------------------------------
#
# Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
# Copyright (C) 2003 Mt. Hood Playing Card Co.
# Copyright (C) 2005-2009 Skomoroh
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------
import os
import traceback
from pysollib.mygettext import _
from pysollib.settings import TITLE
from pysollib.settings import VERSION
from pysollib.settings import TOOLKIT, USE_TILE
from pysollib.settings import DEBUG
from pysollib.mfxutil import print_err
if TOOLKIT == 'tk':
if USE_TILE:
from pysollib.tile import ttk
def init_tile(app, top):
# load available themes
d = os.path.join(app.dataloader.dir, 'themes')
if os.path.isdir(d):
top.tk.eval('global auto_path; lappend auto_path {%s}' % d)
for t in os.listdir(d):
if os.path.exists(os.path.join(d, t, 'pkgIndex.tcl')):
try:
top.tk.eval('package require ttk::theme::'+t)
# print 'load theme:', t
except Exception:
traceback.print_exc()
pass
def set_theme(app, top, theme):
# set theme
style = ttk.Style(top)
try:
style.theme_use(theme)
except Exception:
print_err(_('invalid theme name: ') + theme)
style.theme_use(app.opt.default_tile_theme)
def get_font_name(font):
# create font name
# i.e. "helvetica 12" -> ("helvetica", 12, "roman", "normal")
if (TOOLKIT == 'kivy'):
return "helvetica 12"
from six.moves.tkinter_font import Font
font_name = None
try:
f = Font(font=font)
except Exception:
print_err(_('invalid font name: ') + font)
if DEBUG:
traceback.print_exc()
else:
fa = f.actual()
font_name = (fa['family'],
fa['size'],
fa['slant'],
fa['weight'])
return font_name
def base_init_root_window(root, app):
# root.wm_group(root)
root.wm_title(TITLE + ' ' + VERSION)
root.wm_iconname(TITLE + ' ' + VERSION)
# set minsize
sw, sh = (root.winfo_screenwidth(), root.winfo_screenheight())
if sw < 640 or sh < 480:
root.wm_minsize(400, 300)
else:
root.wm_minsize(520, 360)
if TOOLKIT == 'gtk':
pass
if TOOLKIT == 'kivy':
pass
elif USE_TILE:
theme = app.opt.tile_theme
init_tile(app, root)
set_theme(app, root, theme)
else:
pass
class BaseTkSettings:
| canvas_padding = (0, 0)
horizontal_toolbar_padding = (0, 0)
vertical_toolbar_padding = (0, 1)
toolbar_button_padding = (2, 2)
toolbar_label_padding = (4, 4)
if USE_TILE:
toolbar_relief = 'flat'
toolbar_borderwidth = 0
else:
toolbar_relief = 'raised'
toolbar_button_relief = 'flat'
toolbar_separator_relief = 'sunken'
toolbar_borderwidth = 1
toolbar_button_borderwidth = 1 | identifier_body |
|
common.py | #!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# ---------------------------------------------------------------------------
#
# Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
# Copyright (C) 2003 Mt. Hood Playing Card Co.
# Copyright (C) 2005-2009 Skomoroh
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------
import os
import traceback
from pysollib.mygettext import _
from pysollib.settings import TITLE
from pysollib.settings import VERSION
from pysollib.settings import TOOLKIT, USE_TILE
from pysollib.settings import DEBUG
from pysollib.mfxutil import print_err
if TOOLKIT == 'tk':
if USE_TILE:
from pysollib.tile import ttk
def init_tile(app, top):
# load available themes
d = os.path.join(app.dataloader.dir, 'themes')
if os.path.isdir(d):
top.tk.eval('global auto_path; lappend auto_path {%s}' % d)
for t in os.listdir(d):
if os.path.exists(os.path.join(d, t, 'pkgIndex.tcl')):
try:
top.tk.eval('package require ttk::theme::'+t)
# print 'load theme:', t
except Exception:
traceback.print_exc()
pass
def set_theme(app, top, theme):
# set theme
style = ttk.Style(top)
try:
style.theme_use(theme)
except Exception:
print_err(_('invalid theme name: ') + theme)
style.theme_use(app.opt.default_tile_theme)
def get_font_name(font):
# create font name
# i.e. "helvetica 12" -> ("helvetica", 12, "roman", "normal")
if (TOOLKIT == 'kivy'):
|
from six.moves.tkinter_font import Font
font_name = None
try:
f = Font(font=font)
except Exception:
print_err(_('invalid font name: ') + font)
if DEBUG:
traceback.print_exc()
else:
fa = f.actual()
font_name = (fa['family'],
fa['size'],
fa['slant'],
fa['weight'])
return font_name
def base_init_root_window(root, app):
# root.wm_group(root)
root.wm_title(TITLE + ' ' + VERSION)
root.wm_iconname(TITLE + ' ' + VERSION)
# set minsize
sw, sh = (root.winfo_screenwidth(), root.winfo_screenheight())
if sw < 640 or sh < 480:
root.wm_minsize(400, 300)
else:
root.wm_minsize(520, 360)
if TOOLKIT == 'gtk':
pass
if TOOLKIT == 'kivy':
pass
elif USE_TILE:
theme = app.opt.tile_theme
init_tile(app, root)
set_theme(app, root, theme)
else:
pass
class BaseTkSettings:
canvas_padding = (0, 0)
horizontal_toolbar_padding = (0, 0)
vertical_toolbar_padding = (0, 1)
toolbar_button_padding = (2, 2)
toolbar_label_padding = (4, 4)
if USE_TILE:
toolbar_relief = 'flat'
toolbar_borderwidth = 0
else:
toolbar_relief = 'raised'
toolbar_button_relief = 'flat'
toolbar_separator_relief = 'sunken'
toolbar_borderwidth = 1
toolbar_button_borderwidth = 1
| return "helvetica 12" | conditional_block |
Zinser_Assignment8.py | intended letter as key, and observed letters with frequencies as value
emis_freq = {}
#Fill dictionary with dictionaries, and those with letter entries (init to 0)
for i in letters:
emis_freq[i] = {}
for j in letters:
emis_freq[i][j] = 0
#Transition dictionary
#Dictionary that stores the first letter (t) as the key, and second letter (t+1) as the second key with frequencies as value
tran_freq = {}
#Fill dictionary with dictionaries, and those with letter entries (init to 0)
for i in (letters+"_"):
tran_freq[i] = {}
for j in (letters+"_"):
tran_freq[i][j] = 0
#Initial dictionary
#Dictionary to store frequency that a letter occurs in the first col (hidden, actual)
init_freq = {}
#Fill dictionary with letter entries (init to 0)
for i in (letters+"_"):
init_freq[i] = 0
#Open the file
with open(name,"r") as data_in:
#Store the last char
last_char = ""
#Bool to see if this is the rist char
first_char = True
#Iterate through the file line by line
for i in data_in.readlines():
#Initial
#Increment the first col characters frequency in the intial dict
init_freq[i[0]] += 1
#Transition
#Make sure this isn't the first
if first_char:
first_char = False
#Otherwise add to the transition frequency dict
else:
tran_freq[last_char][i[0]] += 1
#Set the last char to be the current first col char that we have added to the dict
last_char = i[0]
#Check if this line is a separation between words ("_")
if i[0] == "_":
#Append word to list of words
first_col.append(word1)
second_col.append(word2)
#Reset temperary word storage
word1 = ""
word2 = ""
#Otherwise line is letter
else:
#Append letters to their temporary storage containers
word1 += i[0]
word2 += i[2]
if i[2] in emis_freq[i[0]]:
emis_freq[i[0]][i[2]] += 1
else:
emis_freq[i[0]][i[2]] = 1
#Cleanup since data file doesn't end in a "_ _" line
first_col.append(word1)
second_col.append(word2)
'''Emission Calulations'''
#Add entry to dict 'tot' that holds the total number of times the letter appears
#Iterate through keys (actual letters)
for i in emis_freq:
#Reset total
tot = 0
#Iterate through evidence keys for letter i
for j in emis_freq[i]:
tot += emis_freq[i][j]
#Add 'tot' entry to dict
emis_freq[i]["tot"] = tot
#Now take this data (total) and create a probability dictionary
emis_prob = {}
#Iterate through keys (actual letters)
for i in emis_freq:
#Create dictionary for this actual letter in new dict
emis_prob[i] = {}
#Iterate through evidence keys for letter i
for j in emis_freq[i]:
#Add one to the numerator and 26 (num of letters) to the denominator
emis_prob[i][j] = (emis_freq[i][j]+1)/(emis_freq[i]["tot"]+26)
#Add the very small, basically 0 chance of a "_" getting in the mix (chance is 0 in reality)
emis_prob[i]["_"] = 1/(emis_freq[i]["tot"]+26)
#Remove 'tot' key from probability dict
del emis_prob[i]["tot"]
'''Spaces are immutable, uncorruptable beasts, and have an emission probability of 1. They are not counted'''
emis_prob['_'] = {}
emis_prob['_']['_'] = 0.9999999999999999
for i in letters:
emis_prob['_'][i] = 0.0000000000000001
'''Transition Calulations'''
#Add entry to dict 'tot' that holds the total number of times the letter appears
#Iterate through keys (actual letters)
for i in tran_freq:
#Reset total
tot = 0
#Iterate through evidence keys for letter i
for j in tran_freq[i]:
tot += tran_freq[i][j]
#Add 'tot' entry to dict
tran_freq[i]["tot"] = tot
#Now take this data (total) and create a probability dictionary
tran_prob = {}
#Iterate through keys (actual letters)
for i in tran_freq:
#Create dictionary for this actual letter in new dict
tran_prob[i] = {}
#Iterate through evidence keys for letter i
for j in tran_freq[i]:
#Add one to the numerator and 27 (num of letters + '_') to the denominator
tran_prob[i][j] = (tran_freq[i][j]+1)/(tran_freq[i]["tot"]+27)
#Remove 'tot' key from probability dict
del tran_prob[i]["tot"]
'''Initial Calculations'''
#Count the total number of characters in the first col (hidden)
tot = 0
for i in init_freq:
tot += init_freq[i]
#Dict that stores the probabilities of each letter
init_prob = {}
for i in init_freq:
init_prob[i] = (init_freq[i]/tot)#(init_freq[i]/len("_".join(first_col)))
#Return both lists as and probability dtionary
return first_col,second_col,emis_prob,tran_prob,init_prob
#Viterbi algorithm, returns final prob of getting to end and likely route (sequence of letters)
#Takes in: Evid (observed state sequence, one giant string with underscores for spaces), hidd (list of hidden states, eg. list of possible letters), star (dict of starting probabilities), tran (transition probability dict), emis (emission probability dict)
#Tran must be in format tran[prev][cur]
#Emis must be in format emis[hidden][observed]
def | (evid, hidd, star, tran, emis):
'''Spaces have a 1.0 emission prob, since they are uncorrupted'''
'''Use math libraries log2 to convert to log base 2 for math. Convert back with math libraries pow(2, num) if desired'''
'''Log2 can still use max. log2(0.8) > log2(0.2)'''
#Create list that uses the time as the index and the value is a dict to store probability
P = [{}]
#Create a dict for the path
path = {}
#Create dict for t(0) (seed dict with inital entries)
#Iterate through start dict (Contains all states that sequence can start with)
for i in star:
#Calculate probability with start[letter]*emission (add instead of multiply with log numbers)
P[0][i] = log2(star[i])+log2(emis[i][evid[0]])
path[i] = [i]
#Run for t > 1, start at second letter
for i in range(1,len(evid)):
#Create new dict at end of list of dicts (dict for each time value)
P.append({})
#Dict to temporarily store path for this iteration
temp_path = {}
#Iterate through all possible states that are connected to the previous state chosen
for j in hidd:
#Use list comprehension to iterate through states, calculate trans*emis*P[t-1] for each possible state, find max and store that in path
(prob, state) = max((P[i-1][k] + log2(tran[k][j]) + log2(emis[j][evid[i]]), k) for k in hidd)
P[i][j] = prob
temp_path[j] = path[state] + [j]
# Don't need to remember the old paths
path = temp_path
#Find max prob in the last iteration of the list of dicts (P)
n = len(evid)-1
(prob, state) = max((P[n][y], y) for y in hidd)
#Return the probability for the best last state and the path for it as a list of 1 char strings
return prob,path[state]
#Function that takes in 2 strings of equal length and returns the error percent. String 1 is the correct string, string 2 is checked for errors
def error_rate(correct, check):
errors = 0
for i in range(0,len(correct)):
if correct[i] != check[i]:
| furby | identifier_name |
Zinser_Assignment8.py | the intended letter as key, and observed letters with frequencies as value
emis_freq = {}
#Fill dictionary with dictionaries, and those with letter entries (init to 0)
for i in letters:
emis_freq[i] = {}
for j in letters:
emis_freq[i][j] = 0
#Transition dictionary
#Dictionary that stores the first letter (t) as the key, and second letter (t+1) as the second key with frequencies as value
tran_freq = {}
#Fill dictionary with dictionaries, and those with letter entries (init to 0)
for i in (letters+"_"):
tran_freq[i] = {}
for j in (letters+"_"):
tran_freq[i][j] = 0
#Initial dictionary
#Dictionary to store frequency that a letter occurs in the first col (hidden, actual)
init_freq = {}
#Fill dictionary with letter entries (init to 0)
for i in (letters+"_"):
init_freq[i] = 0
#Open the file
with open(name,"r") as data_in:
#Store the last char
last_char = ""
#Bool to see if this is the rist char
first_char = True
#Iterate through the file line by line
for i in data_in.readlines():
#Initial
#Increment the first col characters frequency in the intial dict
init_freq[i[0]] += 1
#Transition
#Make sure this isn't the first
if first_char:
first_char = False
#Otherwise add to the transition frequency dict
else:
tran_freq[last_char][i[0]] += 1
#Set the last char to be the current first col char that we have added to the dict
last_char = i[0]
#Check if this line is a separation between words ("_")
if i[0] == "_":
#Append word to list of words
first_col.append(word1)
second_col.append(word2)
#Reset temperary word storage
word1 = ""
word2 = ""
#Otherwise line is letter
else:
#Append letters to their temporary storage containers
word1 += i[0]
word2 += i[2]
if i[2] in emis_freq[i[0]]:
emis_freq[i[0]][i[2]] += 1
else:
emis_freq[i[0]][i[2]] = 1
#Cleanup since data file doesn't end in a "_ _" line
first_col.append(word1)
second_col.append(word2)
'''Emission Calulations'''
#Add entry to dict 'tot' that holds the total number of times the letter appears
#Iterate through keys (actual letters)
for i in emis_freq:
#Reset total
tot = 0
#Iterate through evidence keys for letter i
for j in emis_freq[i]:
tot += emis_freq[i][j]
#Add 'tot' entry to dict
emis_freq[i]["tot"] = tot
#Now take this data (total) and create a probability dictionary
emis_prob = {}
#Iterate through keys (actual letters)
for i in emis_freq:
#Create dictionary for this actual letter in new dict
emis_prob[i] = {}
#Iterate through evidence keys for letter i
for j in emis_freq[i]:
#Add one to the numerator and 26 (num of letters) to the denominator
emis_prob[i][j] = (emis_freq[i][j]+1)/(emis_freq[i]["tot"]+26)
#Add the very small, basically 0 chance of a "_" getting in the mix (chance is 0 in reality)
emis_prob[i]["_"] = 1/(emis_freq[i]["tot"]+26)
#Remove 'tot' key from probability dict
del emis_prob[i]["tot"]
'''Spaces are immutable, uncorruptable beasts, and have an emission probability of 1. They are not counted'''
emis_prob['_'] = {}
emis_prob['_']['_'] = 0.9999999999999999
for i in letters:
emis_prob['_'][i] = 0.0000000000000001
'''Transition Calulations'''
#Add entry to dict 'tot' that holds the total number of times the letter appears
#Iterate through keys (actual letters)
for i in tran_freq:
#Reset total
tot = 0
#Iterate through evidence keys for letter i
for j in tran_freq[i]:
tot += tran_freq[i][j]
#Add 'tot' entry to dict
tran_freq[i]["tot"] = tot
#Now take this data (total) and create a probability dictionary
tran_prob = {}
#Iterate through keys (actual letters)
for i in tran_freq:
#Create dictionary for this actual letter in new dict
tran_prob[i] = {}
#Iterate through evidence keys for letter i
for j in tran_freq[i]:
#Add one to the numerator and 27 (num of letters + '_') to the denominator
tran_prob[i][j] = (tran_freq[i][j]+1)/(tran_freq[i]["tot"]+27)
#Remove 'tot' key from probability dict
del tran_prob[i]["tot"]
'''Initial Calculations'''
#Count the total number of characters in the first col (hidden)
tot = 0
for i in init_freq:
tot += init_freq[i]
#Dict that stores the probabilities of each letter
init_prob = {}
for i in init_freq:
init_prob[i] = (init_freq[i]/tot)#(init_freq[i]/len("_".join(first_col)))
#Return both lists as and probability dtionary
return first_col,second_col,emis_prob,tran_prob,init_prob
#Viterbi algorithm, returns final prob of getting to end and likely route (sequence of letters)
#Takes in: Evid (observed state sequence, one giant string with underscores for spaces), hidd (list of hidden states, eg. list of possible letters), star (dict of starting probabilities), tran (transition probability dict), emis (emission probability dict)
#Tran must be in format tran[prev][cur]
#Emis must be in format emis[hidden][observed]
def furby(evid, hidd, star, tran, emis):
| temp_path = {}
#Iterate through all possible states that are connected to the previous state chosen
for j in hidd:
#Use list comprehension to iterate through states, calculate trans*emis*P[t-1] for each possible state, find max and store that in path
(prob, state) = max((P[i-1][k] + log2(tran[k][j]) + log2(emis[j][evid[i]]), k) for k in hidd)
P[i][j] = prob
temp_path[j] = path[state] + [j]
# Don't need to remember the old paths
path = temp_path
#Find max prob in the last iteration of the list of dicts (P)
n = len(evid)-1
(prob, state) = max((P[n][y], y) for y in hidd)
#Return the probability for the best last state and the path for it as a list of 1 char strings
return prob,path[state]
#Function that takes in 2 strings of equal length and returns the error percent. String 1 is the correct string, string 2 is checked for errors
def error_rate(correct, check):
errors = 0
for i in range(0,len(correct)):
if correct[i] != check[i]:
errors | '''Spaces have a 1.0 emission prob, since they are uncorrupted'''
'''Use math libraries log2 to convert to log base 2 for math. Convert back with math libraries pow(2, num) if desired'''
'''Log2 can still use max. log2(0.8) > log2(0.2)'''
#Create list that uses the time as the index and the value is a dict to store probability
P = [{}]
#Create a dict for the path
path = {}
#Create dict for t(0) (seed dict with inital entries)
#Iterate through start dict (Contains all states that sequence can start with)
for i in star:
#Calculate probability with start[letter]*emission (add instead of multiply with log numbers)
P[0][i] = log2(star[i])+log2(emis[i][evid[0]])
path[i] = [i]
#Run for t > 1, start at second letter
for i in range(1,len(evid)):
#Create new dict at end of list of dicts (dict for each time value)
P.append({})
#Dict to temporarily store path for this iteration | identifier_body |
Zinser_Assignment8.py | intended letter as key, and observed letters with frequencies as value
emis_freq = {}
#Fill dictionary with dictionaries, and those with letter entries (init to 0)
for i in letters:
emis_freq[i] = {}
for j in letters:
emis_freq[i][j] = 0
#Transition dictionary
#Dictionary that stores the first letter (t) as the key, and second letter (t+1) as the second key with frequencies as value
tran_freq = {}
#Fill dictionary with dictionaries, and those with letter entries (init to 0)
for i in (letters+"_"):
tran_freq[i] = {}
for j in (letters+"_"):
tran_freq[i][j] = 0
#Initial dictionary
#Dictionary to store frequency that a letter occurs in the first col (hidden, actual)
init_freq = {}
#Fill dictionary with letter entries (init to 0)
for i in (letters+"_"):
init_freq[i] = 0
#Open the file
with open(name,"r") as data_in:
#Store the last char
last_char = ""
#Bool to see if this is the rist char
first_char = True
#Iterate through the file line by line
for i in data_in.readlines():
#Initial
#Increment the first col characters frequency in the intial dict
init_freq[i[0]] += 1
#Transition
#Make sure this isn't the first
if first_char:
first_char = False
#Otherwise add to the transition frequency dict
else:
tran_freq[last_char][i[0]] += 1
#Set the last char to be the current first col char that we have added to the dict
last_char = i[0]
#Check if this line is a separation between words ("_")
if i[0] == "_":
#Append word to list of words
first_col.append(word1)
second_col.append(word2)
#Reset temperary word storage
word1 = ""
word2 = ""
#Otherwise line is letter
else:
#Append letters to their temporary storage containers
word1 += i[0]
word2 += i[2]
if i[2] in emis_freq[i[0]]:
emis_freq[i[0]][i[2]] += 1
else:
emis_freq[i[0]][i[2]] = 1
#Cleanup since data file doesn't end in a "_ _" line
first_col.append(word1)
second_col.append(word2)
'''Emission Calulations'''
#Add entry to dict 'tot' that holds the total number of times the letter appears
#Iterate through keys (actual letters)
for i in emis_freq:
#Reset total
tot = 0
#Iterate through evidence keys for letter i
for j in emis_freq[i]:
tot += emis_freq[i][j]
#Add 'tot' entry to dict
emis_freq[i]["tot"] = tot
#Now take this data (total) and create a probability dictionary
emis_prob = {}
#Iterate through keys (actual letters)
for i in emis_freq:
#Create dictionary for this actual letter in new dict
emis_prob[i] = {}
#Iterate through evidence keys for letter i
for j in emis_freq[i]:
#Add one to the numerator and 26 (num of letters) to the denominator
emis_prob[i][j] = (emis_freq[i][j]+1)/(emis_freq[i]["tot"]+26)
#Add the very small, basically 0 chance of a "_" getting in the mix (chance is 0 in reality)
emis_prob[i]["_"] = 1/(emis_freq[i]["tot"]+26)
#Remove 'tot' key from probability dict
del emis_prob[i]["tot"]
'''Spaces are immutable, uncorruptable beasts, and have an emission probability of 1. They are not counted'''
emis_prob['_'] = {}
emis_prob['_']['_'] = 0.9999999999999999
for i in letters:
emis_prob['_'][i] = 0.0000000000000001
'''Transition Calulations'''
#Add entry to dict 'tot' that holds the total number of times the letter appears
#Iterate through keys (actual letters)
for i in tran_freq:
#Reset total
tot = 0
#Iterate through evidence keys for letter i
for j in tran_freq[i]:
tot += tran_freq[i][j]
#Add 'tot' entry to dict
tran_freq[i]["tot"] = tot
#Now take this data (total) and create a probability dictionary
tran_prob = {}
#Iterate through keys (actual letters)
for i in tran_freq:
#Create dictionary for this actual letter in new dict
tran_prob[i] = {}
#Iterate through evidence keys for letter i
for j in tran_freq[i]:
#Add one to the numerator and 27 (num of letters + '_') to the denominator
tran_prob[i][j] = (tran_freq[i][j]+1)/(tran_freq[i]["tot"]+27)
#Remove 'tot' key from probability dict
del tran_prob[i]["tot"]
'''Initial Calculations'''
#Count the total number of characters in the first col (hidden)
tot = 0
for i in init_freq:
tot += init_freq[i]
#Dict that stores the probabilities of each letter
init_prob = {}
for i in init_freq:
init_prob[i] = (init_freq[i]/tot)#(init_freq[i]/len("_".join(first_col)))
#Return both lists as and probability dtionary
return first_col,second_col,emis_prob,tran_prob,init_prob
#Viterbi algorithm, returns final prob of getting to end and likely route (sequence of letters)
#Takes in: Evid (observed state sequence, one giant string with underscores for spaces), hidd (list of hidden states, eg. list of possible letters), star (dict of starting probabilities), tran (transition probability dict), emis (emission probability dict)
#Tran must be in format tran[prev][cur]
#Emis must be in format emis[hidden][observed]
def furby(evid, hidd, star, tran, emis):
'''Spaces have a 1.0 emission prob, since they are uncorrupted'''
'''Use math libraries log2 to convert to log base 2 for math. Convert back with math libraries pow(2, num) if desired'''
'''Log2 can still use max. log2(0.8) > log2(0.2)'''
#Create list that uses the time as the index and the value is a dict to store probability
P = [{}]
#Create a dict for the path
path = {} | path[i] = [i]
#Run for t > 1, start at second letter
for i in range(1,len(evid)):
#Create new dict at end of list of dicts (dict for each time value)
P.append({})
#Dict to temporarily store path for this iteration
temp_path = {}
#Iterate through all possible states that are connected to the previous state chosen
for j in hidd:
#Use list comprehension to iterate through states, calculate trans*emis*P[t-1] for each possible state, find max and store that in path
(prob, state) = max((P[i-1][k] + log2(tran[k][j]) + log2(emis[j][evid[i]]), k) for k in hidd)
P[i][j] = prob
temp_path[j] = path[state] + [j]
# Don't need to remember the old paths
path = temp_path
#Find max prob in the last iteration of the list of dicts (P)
n = len(evid)-1
(prob, state) = max((P[n][y], y) for y in hidd)
#Return the probability for the best last state and the path for it as a list of 1 char strings
return prob,path[state]
#Function that takes in 2 strings of equal length and returns the error percent. String 1 is the correct string, string 2 is checked for errors
def error_rate(correct, check):
errors = 0
for i in range(0,len(correct)):
if correct[i] != check[i]:
errors | #Create dict for t(0) (seed dict with inital entries)
#Iterate through start dict (Contains all states that sequence can start with)
for i in star:
#Calculate probability with start[letter]*emission (add instead of multiply with log numbers)
P[0][i] = log2(star[i])+log2(emis[i][evid[0]]) | random_line_split |
Zinser_Assignment8.py | intended letter as key, and observed letters with frequencies as value
emis_freq = {}
#Fill dictionary with dictionaries, and those with letter entries (init to 0)
for i in letters:
emis_freq[i] = {}
for j in letters:
emis_freq[i][j] = 0
#Transition dictionary
#Dictionary that stores the first letter (t) as the key, and second letter (t+1) as the second key with frequencies as value
tran_freq = {}
#Fill dictionary with dictionaries, and those with letter entries (init to 0)
for i in (letters+"_"):
tran_freq[i] = {}
for j in (letters+"_"):
tran_freq[i][j] = 0
#Initial dictionary
#Dictionary to store frequency that a letter occurs in the first col (hidden, actual)
init_freq = {}
#Fill dictionary with letter entries (init to 0)
for i in (letters+"_"):
|
#Open the file
with open(name,"r") as data_in:
#Store the last char
last_char = ""
#Bool to see if this is the rist char
first_char = True
#Iterate through the file line by line
for i in data_in.readlines():
#Initial
#Increment the first col characters frequency in the intial dict
init_freq[i[0]] += 1
#Transition
#Make sure this isn't the first
if first_char:
first_char = False
#Otherwise add to the transition frequency dict
else:
tran_freq[last_char][i[0]] += 1
#Set the last char to be the current first col char that we have added to the dict
last_char = i[0]
#Check if this line is a separation between words ("_")
if i[0] == "_":
#Append word to list of words
first_col.append(word1)
second_col.append(word2)
#Reset temperary word storage
word1 = ""
word2 = ""
#Otherwise line is letter
else:
#Append letters to their temporary storage containers
word1 += i[0]
word2 += i[2]
if i[2] in emis_freq[i[0]]:
emis_freq[i[0]][i[2]] += 1
else:
emis_freq[i[0]][i[2]] = 1
#Cleanup since data file doesn't end in a "_ _" line
first_col.append(word1)
second_col.append(word2)
'''Emission Calulations'''
#Add entry to dict 'tot' that holds the total number of times the letter appears
#Iterate through keys (actual letters)
for i in emis_freq:
#Reset total
tot = 0
#Iterate through evidence keys for letter i
for j in emis_freq[i]:
tot += emis_freq[i][j]
#Add 'tot' entry to dict
emis_freq[i]["tot"] = tot
#Now take this data (total) and create a probability dictionary
emis_prob = {}
#Iterate through keys (actual letters)
for i in emis_freq:
#Create dictionary for this actual letter in new dict
emis_prob[i] = {}
#Iterate through evidence keys for letter i
for j in emis_freq[i]:
#Add one to the numerator and 26 (num of letters) to the denominator
emis_prob[i][j] = (emis_freq[i][j]+1)/(emis_freq[i]["tot"]+26)
#Add the very small, basically 0 chance of a "_" getting in the mix (chance is 0 in reality)
emis_prob[i]["_"] = 1/(emis_freq[i]["tot"]+26)
#Remove 'tot' key from probability dict
del emis_prob[i]["tot"]
'''Spaces are immutable, uncorruptable beasts, and have an emission probability of 1. They are not counted'''
emis_prob['_'] = {}
emis_prob['_']['_'] = 0.9999999999999999
for i in letters:
emis_prob['_'][i] = 0.0000000000000001
'''Transition Calulations'''
#Add entry to dict 'tot' that holds the total number of times the letter appears
#Iterate through keys (actual letters)
for i in tran_freq:
#Reset total
tot = 0
#Iterate through evidence keys for letter i
for j in tran_freq[i]:
tot += tran_freq[i][j]
#Add 'tot' entry to dict
tran_freq[i]["tot"] = tot
#Now take this data (total) and create a probability dictionary
tran_prob = {}
#Iterate through keys (actual letters)
for i in tran_freq:
#Create dictionary for this actual letter in new dict
tran_prob[i] = {}
#Iterate through evidence keys for letter i
for j in tran_freq[i]:
#Add one to the numerator and 27 (num of letters + '_') to the denominator
tran_prob[i][j] = (tran_freq[i][j]+1)/(tran_freq[i]["tot"]+27)
#Remove 'tot' key from probability dict
del tran_prob[i]["tot"]
'''Initial Calculations'''
#Count the total number of characters in the first col (hidden)
tot = 0
for i in init_freq:
tot += init_freq[i]
#Dict that stores the probabilities of each letter
init_prob = {}
for i in init_freq:
init_prob[i] = (init_freq[i]/tot)#(init_freq[i]/len("_".join(first_col)))
#Return both lists as and probability dtionary
return first_col,second_col,emis_prob,tran_prob,init_prob
#Viterbi algorithm, returns final prob of getting to end and likely route (sequence of letters)
#Takes in: Evid (observed state sequence, one giant string with underscores for spaces), hidd (list of hidden states, eg. list of possible letters), star (dict of starting probabilities), tran (transition probability dict), emis (emission probability dict)
#Tran must be in format tran[prev][cur]
#Emis must be in format emis[hidden][observed]
def furby(evid, hidd, star, tran, emis):
'''Spaces have a 1.0 emission prob, since they are uncorrupted'''
'''Use math libraries log2 to convert to log base 2 for math. Convert back with math libraries pow(2, num) if desired'''
'''Log2 can still use max. log2(0.8) > log2(0.2)'''
#Create list that uses the time as the index and the value is a dict to store probability
P = [{}]
#Create a dict for the path
path = {}
#Create dict for t(0) (seed dict with inital entries)
#Iterate through start dict (Contains all states that sequence can start with)
for i in star:
#Calculate probability with start[letter]*emission (add instead of multiply with log numbers)
P[0][i] = log2(star[i])+log2(emis[i][evid[0]])
path[i] = [i]
#Run for t > 1, start at second letter
for i in range(1,len(evid)):
#Create new dict at end of list of dicts (dict for each time value)
P.append({})
#Dict to temporarily store path for this iteration
temp_path = {}
#Iterate through all possible states that are connected to the previous state chosen
for j in hidd:
#Use list comprehension to iterate through states, calculate trans*emis*P[t-1] for each possible state, find max and store that in path
(prob, state) = max((P[i-1][k] + log2(tran[k][j]) + log2(emis[j][evid[i]]), k) for k in hidd)
P[i][j] = prob
temp_path[j] = path[state] + [j]
# Don't need to remember the old paths
path = temp_path
#Find max prob in the last iteration of the list of dicts (P)
n = len(evid)-1
(prob, state) = max((P[n][y], y) for y in hidd)
#Return the probability for the best last state and the path for it as a list of 1 char strings
return prob,path[state]
#Function that takes in 2 strings of equal length and returns the error percent. String 1 is the correct string, string 2 is checked for errors
def error_rate(correct, check):
errors = 0
for i in range(0,len(correct)):
if correct[i] != check[i]:
errors | init_freq[i] = 0 | conditional_block |
ly_proxy_test.py | # Author: Jason Lu
import urllib.request
from bs4 import BeautifulSoup
import time
req_header = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
#'Accept-Language': 'en-US,en;q=0.8,zh-Hans-CN;q=0.5,zh-Hans;q=0.3',
'Accept-Charset':'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding':'en-us',
'Connection':'keep-alive',
'Referer':'http://www.baidu.com/'
}
req_timeout = 5
testUrl = "http://www.baidu.com/"
testStr = "wahaha"
file1 = open('proxy.txt' , 'w')
# url = ""
# req = urllib2.Request(url,None,req_header)
# jsondatas = urllib2.urlopen(req,None,req_timeout).read()
# cookies = urllib2.HTTPCookieProcessor()
# 希望登录状态一直保持,使用Cookie处理
import http.cookiejar
# 使用http.cookiejar.CookieJar()创建CookieJar对象
cjar = http.cookiejar.CookieJar()
cookies = urllib.request.HTTPCookieProcessor(cjar)
checked_num = 0
grasp_num = 0
for page in range(1, 3):
# req = urllib2.Request('http://www.xici.net.co/nn/' + str(page), None, req_header)
# html_doc = urllib2.urlopen(req, None, req_timeout).read()
req = urllib.request.Request('http://www.xici.net.co/nn/' + str(page))
req.add_header('User-Agent',
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1")
html_doc = urllib.request.urlopen(req).read().decode('utf-8')
# html_doc = urllib2.urlopen('http://www.xici.net.co/nn/' + str(page)).read()
soup = BeautifulSoup(html_doc)
trs = soup.find('table', id='ip_list').find_all('tr')
print(trs)
for tr in trs[1:]:
tds = tr.find_all('td')
ip = tds[1].text.strip()
port = tds[2].text.strip()
protocol = tds[5].text.strip()
if protocol == 'HTTP' or protocol == 'HTTPS':
#of.write('%s=%s:%s\n' % (protocol, ip, port))
print('%s=%s:%s' % (protocol, ip, port))
grasp_num +=1
proxyHandler = urllib.request.ProxyHandler({"http": r'http://%s:%s' % (ip, port)})
opener = urllib.request.build_opener(cookies, proxyHandler)
opener.addheaders = [('User-Agent',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36')]
t1 = time.time()
try:
req = opener.open(testUrl, timeout=req_timeout)
result = req.read()
timeused = time.time() - t1
pos = result.find(testStr)
if pos > 1:
file1.write(protocol+"\t"+ip+"\t"+port+"\n")
checked_num+=1
print(checked_num, grasp_num)
else:
continue
except Exception as e | print(str(e))
continue
file1.close()
print(checked_num,grasp_num) | :
| conditional_block |
ly_proxy_test.py | # Author: Jason Lu
import urllib.request
from bs4 import BeautifulSoup
import time
req_header = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
#'Accept-Language': 'en-US,en;q=0.8,zh-Hans-CN;q=0.5,zh-Hans;q=0.3',
'Accept-Charset':'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding':'en-us',
'Connection':'keep-alive',
'Referer':'http://www.baidu.com/'
}
req_timeout = 5
testUrl = "http://www.baidu.com/"
testStr = "wahaha"
file1 = open('proxy.txt' , 'w')
# url = ""
# req = urllib2.Request(url,None,req_header)
# jsondatas = urllib2.urlopen(req,None,req_timeout).read()
# cookies = urllib2.HTTPCookieProcessor()
# 希望登录状态一直保持,使用Cookie处理
import http.cookiejar
# 使用http.cookiejar.CookieJar()创建CookieJar对象
cjar = http.cookiejar.CookieJar()
cookies = urllib.request.HTTPCookieProcessor(cjar)
checked_num = 0
grasp_num = 0
for page in range(1, 3):
# req = urllib2.Request('http://www.xici.net.co/nn/' + str(page), None, req_header)
# html_doc = urllib2.urlopen(req, None, req_timeout).read()
req = urllib.request.Request('http://www.xici.net.co/nn/' + str(page))
req.add_header('User-Agent',
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1")
html_doc = urllib.request.urlopen(req).read().decode('utf-8')
# html_doc = urllib2.urlopen('http://www.xici.net.co/nn/' + str(page)).read()
soup = BeautifulSoup(html_doc)
trs = soup.find('table', id='ip_list').find_all('tr')
print(trs)
for tr in trs[1:]:
tds = tr.find_all('td')
ip = tds[1].text.strip()
port = tds[2].text.strip()
protocol = tds[5].text.strip() | proxyHandler = urllib.request.ProxyHandler({"http": r'http://%s:%s' % (ip, port)})
opener = urllib.request.build_opener(cookies, proxyHandler)
opener.addheaders = [('User-Agent',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36')]
t1 = time.time()
try:
req = opener.open(testUrl, timeout=req_timeout)
result = req.read()
timeused = time.time() - t1
pos = result.find(testStr)
if pos > 1:
file1.write(protocol+"\t"+ip+"\t"+port+"\n")
checked_num+=1
print(checked_num, grasp_num)
else:
continue
except Exception as e:
print(str(e))
continue
file1.close()
print(checked_num,grasp_num) | if protocol == 'HTTP' or protocol == 'HTTPS':
#of.write('%s=%s:%s\n' % (protocol, ip, port))
print('%s=%s:%s' % (protocol, ip, port))
grasp_num +=1 | random_line_split |
__init__.py | import socket
from .attrtree import AttrTree
from .checks import Checks
config = AttrTree()
# the list of checks
config.install_attr('checks', Checks())
# This is the base granularity (in seconds) for polling
# Each check may then individually be configured to run every N * tick
config.install_attr('base_tick', 60)
# Default "every" check parameter, can be overridden on a per-check basis
config.install_attr('default_every', 1)
# Default "error_every" (how often we retry checks that are in error) parameter
# -1 disables feature (same as regular "every"), can be also be overridden
config.install_attr('default_error_every', -1)
| # Verbosity level (one of CRITICAL, ERROR, WARNING, INFO, DEBUG)
config.install_attr('verb_level', 'INFO')
# Email addresses to send to when an alert is triggered
config.install_attr('emails.to', [])
# The From: address
config.install_attr('emails.addr_from',
'Picomon <picomon@%s>' % socket.getfqdn())
# The SMTP host, with optional :port suffix
config.install_attr('emails.smtp_host', 'localhost:25')
# The inactive timeout after which to close the SMTP connection
config.install_attr('emails.smtp_keepalive_timeout', 60)
# Timeout after which to retry sending emails after a failure
config.install_attr('emails.smtp_retry_timeout', 60)
# Interval in seconds between global reports when some checks are in error
# 0 disables reports
config.install_attr('emails.report.every', 0)
# Subject template for state change email notifications
# available substitutions:
# - state ("Problem" or "OK")
# - check (check's name, like "CheckDNSRec6")
# - dest (the target of the check ie. an IP or a Host's 'name'
# parameter)
config.install_attr('emails.subject_tpl',
'[DOMAIN] {state}: {check} on {dest}')
# reports email subject
config.install_attr('emails.report.subject', '[DOMAIN] Picomon error report')
# watchdog error email subject
config.install_attr('emails.watchdog_subject', '[DOMAIN] Picomon stopped') | random_line_split |
|
dis.rs | processor jumps to the code segment and offset specified with the
// > target operand. Here the target operand specifies an absolute far
// > address either directly with a pointer (ptr16:16 or ptr16:32) or
// > indirectly with a memory location (m16:16 or m16:32). With the
// > pointer method, the segment and address of the called procedure is
// > encoded in the instruction, using a 4-byte (16-bit operand size) or
// > 6-byte (32-bit operand size) far address immediate.
// TODO: do something intelligent with the segment.
Ok(Some(op.ptr.offset as u64))
}
pub fn get_immediate_operand_xref(
module: &Module,
va: VA,
insn: &zydis::DecodedInstruction,
op: &zydis::DecodedOperand,
) -> Result<Option<VA>> {
if op.imm.is_relative {
// the operand is an immediate constant relative to $PC.
// destination = $pc + immediate + insn.len
//
// see doctest: [test relative immediate operand]()
let imm = if op.imm.is_signed {
util::u64_i64(op.imm.value)
} else {
op.imm.value as i64
};
let dst = match util::va_add_signed(va + insn.length as u64, imm) {
None => return Ok(None),
Some(dst) => dst,
};
// must be mapped
if module.probe_va(dst, Permissions::RWX) {
Ok(Some(dst))
} else {
// invalid address
Ok(None)
}
} else {
// the operand is an immediate absolute address.
let dst = if op.imm.is_signed {
let imm = util::u64_i64(op.imm.value);
if imm < 0 {
// obviously this isn't an address if negative.
return Ok(None);
}
imm as u64
} else {
op.imm.value
};
// must be mapped
if module.probe_va(dst, Permissions::RWX) {
Ok(Some(dst))
} else {
// invalid address
Ok(None)
}
}
}
pub fn get_operand_xref(
module: &Module,
va: VA,
insn: &zydis::DecodedInstruction,
op: &zydis::DecodedOperand,
) -> Result<Option<Target>> {
match op.ty {
// like: .text:0000000180001041 FF 15 D1 78 07 00 call cs:__imp_RtlVirtualUnwind_0
// 0x0000000000001041: call [0x0000000000079980]
zydis::OperandType::MEMORY => match get_memory_operand_ptr(va, insn, op) {
Ok(Some(ptr)) => Ok(Some(Target::Indirect(ptr))),
Ok(None) => Ok(None),
Err(e) => Err(e),
},
// like: EA 33 D2 B9 60 80 40 jmp far ptr 4080h:60B9D233h
// "ptr": {
// "segment": 16512,
// "offset": 1622790707
// },
zydis::OperandType::POINTER => match get_pointer_operand_xref(op) {
Ok(Some(ptr)) => Ok(Some(Target::Indirect(ptr))),
Ok(None) => Ok(None),
Err(e) => Err(e),
},
zydis::OperandType::IMMEDIATE => match get_immediate_operand_xref(module, va, insn, op) {
Ok(Some(va)) => Ok(Some(Target::Direct(va))),
Ok(None) => Ok(None),
Err(e) => Err(e),
},
// like: CALL [rax]
// which cannot be resolved without emulation.
zydis::OperandType::REGISTER => Ok(Some(Target::Indirect(0x0))),
zydis::OperandType::UNUSED => Ok(None),
}
}
#[cfg(test)]
mod tests {
use crate::{analysis::dis::*, rsrc::*, test::*};
#[test]
fn test_get_memory_operand_ptr() {
//```
// .text:00000001800134D4 call cs:KernelBaseGetGlobalData
//```
//
// this should result in a call flow to IAT entry 0x1800773F0
let buf = get_buf(Rsrc::K32);
let pe = crate::loader::pe::PE::from_bytes(&buf).unwrap();
let insn = read_insn(&pe.module, 0x1800134D4);
let op = get_first_operand(&insn).unwrap();
let xref = get_memory_operand_ptr(0x1800134D4, &insn, &op).unwrap();
assert_eq!(xref.is_some(), true);
assert_eq!(xref.unwrap(), 0x1800773F0);
}
#[test]
fn test_get_memory_operand_xref_simple() {
// 0: ff 25 06 00 00 00 +-> jmp DWORD PTR ds:0x6
// 6: 00 00 00 00 +-- dw 0x0
let module = load_shellcode32(b"\xFF\x25\x06\x00\x00\x00\x00\x00\x00\x00");
let insn = read_insn(&module, 0x0);
let op = get_first_operand(&insn).unwrap();
let xref = get_memory_operand_xref(&module, 0x0, &insn, &op).unwrap();
assert_eq!(xref.is_some(), true);
assert_eq!(xref.unwrap(), 0x0);
}
#[test]
fn test_get_memory_operand_xref_rip_relative() {
// FF 15 00 00 00 00 CALL $+5
// 00 00 00 00 00 00 00 00 dq 0x0
let module = load_shellcode64(b"\xFF\x15\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00");
let insn = read_insn(&module, 0x0);
let op = get_first_operand(&insn).unwrap();
let xref = get_memory_operand_xref(&module, 0x0, &insn, &op).unwrap();
assert_eq!(xref.is_some(), true);
assert_eq!(xref.unwrap(), 0x0);
}
#[test]
fn test_get_pointer_operand_xref() {
// this is a far ptr jump from addr 0x0 to itmodule:
// JMP FAR PTR 0:00000000
// [ EA ] [ 00 00 00 00 ] [ 00 00 ]
// opcode ptr segment
let module = load_shellcode32(b"\xEA\x00\x00\x00\x00\x00\x00");
let insn = read_insn(&module, 0x0);
let op = get_first_operand(&insn).unwrap();
let xref = get_pointer_operand_xref(&op).unwrap();
assert_eq!(xref.is_some(), true, "has pointer operand xref");
assert_eq!(xref.unwrap(), 0x0, "correct pointer operand xref");
}
#[test]
fn test_get_immediate_operand_xref() | {
// this is a jump from addr 0x0 to itmodule:
// JMP $+0;
let module = load_shellcode32(b"\xEB\xFE");
let insn = read_insn(&module, 0x0);
let op = get_first_operand(&insn).unwrap();
let xref = get_immediate_operand_xref(&module, 0x0, &insn, &op).unwrap();
assert_eq!(xref.is_some(), true, "has immediate operand");
assert_eq!(xref.unwrap(), 0x0, "correct immediate operand");
// this is a jump from addr 0x0 to -1, which is unmapped
// JMP $-1;
let module = load_shellcode32(b"\xEB\xFD");
let insn = read_insn(&module, 0x0);
let op = get_first_operand(&insn).unwrap();
let xref = get_immediate_operand_xref(&module, 0x0, &insn, &op).unwrap();
assert_eq!(xref.is_some(), false, "does not have immediate operand");
} | identifier_body |
|
dis.rs | };
// must be mapped
if module.probe_va(dst, Permissions::RWX) {
Ok(Some(dst))
} else {
// invalid address
Ok(None)
}
} else {
// the operand is an immediate absolute address.
let dst = if op.imm.is_signed {
let imm = util::u64_i64(op.imm.value);
if imm < 0 {
// obviously this isn't an address if negative.
return Ok(None);
}
imm as u64
} else {
op.imm.value
};
// must be mapped
if module.probe_va(dst, Permissions::RWX) {
Ok(Some(dst))
} else {
// invalid address
Ok(None)
}
}
}
pub fn get_operand_xref(
module: &Module,
va: VA,
insn: &zydis::DecodedInstruction,
op: &zydis::DecodedOperand,
) -> Result<Option<Target>> {
match op.ty {
// like: .text:0000000180001041 FF 15 D1 78 07 00 call cs:__imp_RtlVirtualUnwind_0
// 0x0000000000001041: call [0x0000000000079980]
zydis::OperandType::MEMORY => match get_memory_operand_ptr(va, insn, op) {
Ok(Some(ptr)) => Ok(Some(Target::Indirect(ptr))),
Ok(None) => Ok(None),
Err(e) => Err(e),
},
// like: EA 33 D2 B9 60 80 40 jmp far ptr 4080h:60B9D233h
// "ptr": {
// "segment": 16512,
// "offset": 1622790707
// },
zydis::OperandType::POINTER => match get_pointer_operand_xref(op) {
Ok(Some(ptr)) => Ok(Some(Target::Indirect(ptr))),
Ok(None) => Ok(None),
Err(e) => Err(e),
},
zydis::OperandType::IMMEDIATE => match get_immediate_operand_xref(module, va, insn, op) {
Ok(Some(va)) => Ok(Some(Target::Direct(va))),
Ok(None) => Ok(None),
Err(e) => Err(e),
},
// like: CALL [rax]
// which cannot be resolved without emulation.
zydis::OperandType::REGISTER => Ok(Some(Target::Indirect(0x0))),
zydis::OperandType::UNUSED => Ok(None),
}
}
#[cfg(test)]
mod tests {
use crate::{analysis::dis::*, rsrc::*, test::*};
#[test]
fn test_get_memory_operand_ptr() {
//```
// .text:00000001800134D4 call cs:KernelBaseGetGlobalData
//```
//
// this should result in a call flow to IAT entry 0x1800773F0
let buf = get_buf(Rsrc::K32);
let pe = crate::loader::pe::PE::from_bytes(&buf).unwrap();
let insn = read_insn(&pe.module, 0x1800134D4);
let op = get_first_operand(&insn).unwrap();
let xref = get_memory_operand_ptr(0x1800134D4, &insn, &op).unwrap();
assert_eq!(xref.is_some(), true);
assert_eq!(xref.unwrap(), 0x1800773F0);
}
#[test]
fn test_get_memory_operand_xref_simple() {
// 0: ff 25 06 00 00 00 +-> jmp DWORD PTR ds:0x6
// 6: 00 00 00 00 +-- dw 0x0
let module = load_shellcode32(b"\xFF\x25\x06\x00\x00\x00\x00\x00\x00\x00");
let insn = read_insn(&module, 0x0);
let op = get_first_operand(&insn).unwrap();
let xref = get_memory_operand_xref(&module, 0x0, &insn, &op).unwrap();
assert_eq!(xref.is_some(), true);
assert_eq!(xref.unwrap(), 0x0);
}
#[test]
fn test_get_memory_operand_xref_rip_relative() {
// FF 15 00 00 00 00 CALL $+5
// 00 00 00 00 00 00 00 00 dq 0x0
let module = load_shellcode64(b"\xFF\x15\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00");
let insn = read_insn(&module, 0x0);
let op = get_first_operand(&insn).unwrap();
let xref = get_memory_operand_xref(&module, 0x0, &insn, &op).unwrap();
assert_eq!(xref.is_some(), true);
assert_eq!(xref.unwrap(), 0x0);
}
#[test]
fn test_get_pointer_operand_xref() {
// this is a far ptr jump from addr 0x0 to itmodule:
// JMP FAR PTR 0:00000000
// [ EA ] [ 00 00 00 00 ] [ 00 00 ]
// opcode ptr segment
let module = load_shellcode32(b"\xEA\x00\x00\x00\x00\x00\x00");
let insn = read_insn(&module, 0x0);
let op = get_first_operand(&insn).unwrap();
let xref = get_pointer_operand_xref(&op).unwrap();
assert_eq!(xref.is_some(), true, "has pointer operand xref");
assert_eq!(xref.unwrap(), 0x0, "correct pointer operand xref");
}
#[test]
fn test_get_immediate_operand_xref() {
// this is a jump from addr 0x0 to itmodule:
// JMP $+0;
let module = load_shellcode32(b"\xEB\xFE");
let insn = read_insn(&module, 0x0);
let op = get_first_operand(&insn).unwrap();
let xref = get_immediate_operand_xref(&module, 0x0, &insn, &op).unwrap();
assert_eq!(xref.is_some(), true, "has immediate operand");
assert_eq!(xref.unwrap(), 0x0, "correct immediate operand");
// this is a jump from addr 0x0 to -1, which is unmapped
// JMP $-1;
let module = load_shellcode32(b"\xEB\xFD");
let insn = read_insn(&module, 0x0);
let op = get_first_operand(&insn).unwrap();
let xref = get_immediate_operand_xref(&module, 0x0, &insn, &op).unwrap();
assert_eq!(xref.is_some(), false, "does not have immediate operand");
}
#[test]
fn test_format_insn() {
use crate::analysis::dis::zydis;
let buf = get_buf(Rsrc::K32);
let pe = crate::loader::pe::PE::from_bytes(&buf).unwrap();
let mut formatter = zydis::Formatter::new(zydis::FormatterStyle::INTEL).unwrap();
struct UserData {
names: std::collections::BTreeMap<VA, String>,
orig_print_address_abs: Option<zydis::Hook>,
}
let mut userdata = Box::new(UserData {
names: Default::default(),
orig_print_address_abs: None,
});
let orig = formatter
.set_print_address_abs(Box::new(
|formatter: &zydis::Formatter,
buf: &mut zydis::FormatterBuffer,
ctx: &mut zydis::FormatterContext,
userdata: Option<&mut dyn core::any::Any>|
-> zydis::Result<()> {
// programming error: userdata must be provided.
// TODO: enforce via types.
let userdata = userdata.expect("no userdata");
// programming error: userdata must be a Box<UserData>.
// TODO: enforce via types.
let userdata = userdata.downcast_ref::<Box<UserData>>().expect("incorrect userdata"); |
let absolute_address = unsafe {
// safety: the insn and operands come from zydis, so we assume they contain
// valid data.
let insn: &zydis::DecodedInstruction = &*ctx.instruction; | random_line_split |
|
dis.rs | 401000]`
// fetch the pointer, rather than the dest,
// so like `0x401000`.
#[allow(clippy::if_same_then_else)]
pub fn get_memory_operand_ptr(
va: VA,
insn: &zydis::DecodedInstruction,
op: &zydis::DecodedOperand,
) -> Result<Option<VA>> {
if op.mem.base == zydis::Register::NONE
&& op.mem.index == zydis::Register::NONE
&& op.mem.scale == 0
&& op.mem.disp.has_displacement
{
// the operand is a deref of a memory address.
// for example: JMP [0x0]
// this means: read the ptr from 0x0, and then jump to it.
//
// we'll have to make some assumptions here:
// - the ptr doesn't change (can detect via mem segment perms)
// - the ptr is fixed up (TODO)
//
// see doctest: [test simple memory ptr operand]()
if op.mem.disp.displacement < 0 {
Ok(None)
} else {
Ok(Some(op.mem.disp.displacement as VA))
}
} else if op.mem.base == zydis::Register::RIP
// only valid on x64
&& op.mem.index == zydis::Register::NONE
&& op.mem.scale == 0
&& op.mem.disp.has_displacement
{
// this is RIP-relative addressing.
// it works like a relative immediate,
// that is: dst = *(rva + displacement + instruction len)
match util::va_add_signed(va + insn.length as u64, op.mem.disp.displacement as i64) {
None => Ok(None),
Some(ptr) => Ok(Some(ptr)),
}
} else if op.mem.base != zydis::Register::NONE {
// this is something like `CALL [eax+4]`
// can't resolve without emulation
// TODO: add test
Ok(None)
} else if op.mem.scale > 0 {
// this is something like `JMP [0x1000+eax*4]` (32-bit)
Ok(None)
} else {
println!("{:#x}: get mem op xref", va);
print_op(op);
panic!("not supported");
}
}
// for a memory operand, like `mov eax, [0x401000]`
// fetch what the pointer points to,
// which is *not* `0x401000` in this example.
#[allow(clippy::if_same_then_else)]
pub fn get_memory_operand_xref(
module: &Module,
va: VA,
insn: &zydis::DecodedInstruction,
op: &zydis::DecodedOperand,
) -> Result<Option<VA>> {
if let Some(ptr) = get_memory_operand_ptr(va, insn, op)? {
let dst = match module.read_va_at_va(ptr) {
Ok(dst) => dst,
Err(_) => return Ok(None),
};
// must be mapped
if module.probe_va(dst, Permissions::RWX) {
// this is the happy path!
Ok(Some(dst))
} else {
// invalid address
Ok(None)
}
} else {
Ok(None)
}
}
pub fn get_pointer_operand_xref(op: &zydis::DecodedOperand) -> Result<Option<VA>> {
// ref: https://c9x.me/x86/html/file_module_x86_id_147.html
//
// > Far Jumps in Real-Address or Virtual-8086 Mode.
// > When executing a far jump in real address or virtual-8086 mode,
// > the processor jumps to the code segment and offset specified with the
// > target operand. Here the target operand specifies an absolute far
// > address either directly with a pointer (ptr16:16 or ptr16:32) or
// > indirectly with a memory location (m16:16 or m16:32). With the
// > pointer method, the segment and address of the called procedure is
// > encoded in the instruction, using a 4-byte (16-bit operand size) or
// > 6-byte (32-bit operand size) far address immediate.
// TODO: do something intelligent with the segment.
Ok(Some(op.ptr.offset as u64))
}
pub fn get_immediate_operand_xref(
module: &Module,
va: VA,
insn: &zydis::DecodedInstruction,
op: &zydis::DecodedOperand,
) -> Result<Option<VA>> {
if op.imm.is_relative {
// the operand is an immediate constant relative to $PC.
// destination = $pc + immediate + insn.len
//
// see doctest: [test relative immediate operand]()
let imm = if op.imm.is_signed {
util::u64_i64(op.imm.value)
} else {
op.imm.value as i64
};
let dst = match util::va_add_signed(va + insn.length as u64, imm) {
None => return Ok(None),
Some(dst) => dst,
};
// must be mapped
if module.probe_va(dst, Permissions::RWX) {
Ok(Some(dst))
} else {
// invalid address
Ok(None)
}
} else {
// the operand is an immediate absolute address.
let dst = if op.imm.is_signed {
let imm = util::u64_i64(op.imm.value);
if imm < 0 {
// obviously this isn't an address if negative.
return Ok(None);
}
imm as u64
} else {
op.imm.value
};
// must be mapped
if module.probe_va(dst, Permissions::RWX) {
Ok(Some(dst))
} else {
// invalid address
Ok(None)
}
}
}
pub fn get_operand_xref(
module: &Module,
va: VA,
insn: &zydis::DecodedInstruction,
op: &zydis::DecodedOperand,
) -> Result<Option<Target>> {
match op.ty {
// like: .text:0000000180001041 FF 15 D1 78 07 00 call cs:__imp_RtlVirtualUnwind_0
// 0x0000000000001041: call [0x0000000000079980]
zydis::OperandType::MEMORY => match get_memory_operand_ptr(va, insn, op) {
Ok(Some(ptr)) => Ok(Some(Target::Indirect(ptr))),
Ok(None) => Ok(None),
Err(e) => Err(e),
},
// like: EA 33 D2 B9 60 80 40 jmp far ptr 4080h:60B9D233h
// "ptr": {
// "segment": 16512,
// "offset": 1622790707
// },
zydis::OperandType::POINTER => match get_pointer_operand_xref(op) {
Ok(Some(ptr)) => Ok(Some(Target::Indirect(ptr))),
Ok(None) => Ok(None),
Err(e) => Err(e),
},
zydis::OperandType::IMMEDIATE => match get_immediate_operand_xref(module, va, insn, op) {
Ok(Some(va)) => Ok(Some(Target::Direct(va))),
Ok(None) => Ok(None),
Err(e) => Err(e),
},
// like: CALL [rax]
// which cannot be resolved without emulation.
zydis::OperandType::REGISTER => Ok(Some(Target::Indirect(0x0))),
zydis::OperandType::UNUSED => Ok(None),
}
}
#[cfg(test)]
mod tests {
use crate::{analysis::dis::*, rsrc::*, test::*};
#[test]
fn test_get_memory_operand_ptr() {
//```
// .text:00000001800134D4 call cs:KernelBaseGetGlobalData
//```
//
// this should result in a call flow to IAT entry 0x1800773F0
let buf = get_buf(Rsrc::K32);
let pe = crate::loader::pe::PE::from_bytes(&buf).unwrap();
let insn = read_insn(&pe.module, 0x1800134D4);
let op = get_first_operand(&insn).unwrap();
let xref = get_memory_operand_ptr(0x1800134D4, &insn, &op).unwrap();
assert_eq!(xref.is_some(), true);
assert_eq!(xref.unwrap(), 0x1800773F0);
}
#[test]
fn | test_get_memory_operand_xref_simple | identifier_name |
|
utils.js | 'use strict';
var fs = require('fs'),
request = require('request'),
wikiParser = require('./wikiparser'),
_ = require('lodash');
var fns = {
getSlugNameFor: function(str) {
var slug = str.replace(/\s+/g, '-').toLowerCase();
return slug;
},
extendWithJSONs: function(series) {
for (var ser in series) {
var fileName = './cache/' + fns.getSlugNameFor(ser) + '.json';
if (!fs.existsSync(fileName)) {
console.error('json for "', ser, '" not exists. Filename should be: ', fileName);
continue;
}
series[ser].jsonData = require('./.' + fileName);
}
},
writeJSON: function(fileName, jsonData) {
return new Promise(function (fulfill, reject) {
fs.writeFile(fileName, JSON.stringify(jsonData, null, 2), function(err) {
if (err) {
reject()
} else {
fulfill();
}
});
});
},
findSeriesByTitle: function(title, series) {
var hit = _.find(series, function(value, key, fullObj) {
return key === title;
});
if (!hit) {
console.error('Couldn\'t find this show in the db:', title);
return '';
}
return hit;
},
loadUrl: function(url) {
return new Promise(function (fulfill, reject) {
request(url, function (error, response, body) {
if (error || response.statusCode != 200) {
throw new Error(error);
}
fulfill(body);
});
});
},
updateCacheFromWiki: function(seriesData) {
return fns.loadUrl(seriesData.wikiUrl)
.then(function(body) {
return wikiParser.extractDataFromWikiPage(seriesData, body);
});
},
populateEpisodes: function(series, date, beforeDays, afterDays) {
var showsBefore = [], showsAfter = [],
beforeDate = date.getTime() - beforeDays * 1000 * 24 * 60 * 60,
afterDate = date.getTime() + afterDays * 1000 * 24 * 60 * 60,
nowDate = date.getTime();
_.each(series, function(data, key) {
if (data.jsonData) {
_.each(data.jsonData, function(season) {
_.each(season.episodes, function(episode) {
episode.dateObj = new Date(episode.date);
if (episode.dateObj.getTime() > beforeDate && episode.dateObj.getTime() < nowDate) | else if (episode.dateObj.getTime() > nowDate && episode.dateObj.getTime() < afterDate) {
showsAfter.push({ series: key, season: season.season, episode: episode});
}
});
});
}
});
return {
showsBefore: _.sortBy(showsBefore, function(n) {
return n.episode.dateObj.getTime();
}),
showsAfter: _.sortBy(showsAfter, function(n) {
return n.episode.dateObj.getTime();
})
};
},
formatDate: function(date) {
var month = date.getMonth() + 1,
day = date.getDay(),
dayText = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'][day];
return date.getFullYear() + '-' +
(month < 10 ? '0' + month : month) + '-' +
(date.getDate() < 10 ? '0' + date.getDate() : date.getDate()) +
' (' + dayText + ')';
}
}
module.exports = fns;
| {
showsBefore.push({ series: key, season: season.season, episode: episode});
} | conditional_block |
utils.js | 'use strict';
var fs = require('fs'),
request = require('request'),
wikiParser = require('./wikiparser'),
_ = require('lodash');
var fns = {
getSlugNameFor: function(str) {
var slug = str.replace(/\s+/g, '-').toLowerCase();
return slug;
},
extendWithJSONs: function(series) {
for (var ser in series) {
var fileName = './cache/' + fns.getSlugNameFor(ser) + '.json';
if (!fs.existsSync(fileName)) {
console.error('json for "', ser, '" not exists. Filename should be: ', fileName);
continue;
}
series[ser].jsonData = require('./.' + fileName);
} | writeJSON: function(fileName, jsonData) {
return new Promise(function (fulfill, reject) {
fs.writeFile(fileName, JSON.stringify(jsonData, null, 2), function(err) {
if (err) {
reject()
} else {
fulfill();
}
});
});
},
findSeriesByTitle: function(title, series) {
var hit = _.find(series, function(value, key, fullObj) {
return key === title;
});
if (!hit) {
console.error('Couldn\'t find this show in the db:', title);
return '';
}
return hit;
},
loadUrl: function(url) {
return new Promise(function (fulfill, reject) {
request(url, function (error, response, body) {
if (error || response.statusCode != 200) {
throw new Error(error);
}
fulfill(body);
});
});
},
updateCacheFromWiki: function(seriesData) {
return fns.loadUrl(seriesData.wikiUrl)
.then(function(body) {
return wikiParser.extractDataFromWikiPage(seriesData, body);
});
},
populateEpisodes: function(series, date, beforeDays, afterDays) {
var showsBefore = [], showsAfter = [],
beforeDate = date.getTime() - beforeDays * 1000 * 24 * 60 * 60,
afterDate = date.getTime() + afterDays * 1000 * 24 * 60 * 60,
nowDate = date.getTime();
_.each(series, function(data, key) {
if (data.jsonData) {
_.each(data.jsonData, function(season) {
_.each(season.episodes, function(episode) {
episode.dateObj = new Date(episode.date);
if (episode.dateObj.getTime() > beforeDate && episode.dateObj.getTime() < nowDate) {
showsBefore.push({ series: key, season: season.season, episode: episode});
} else if (episode.dateObj.getTime() > nowDate && episode.dateObj.getTime() < afterDate) {
showsAfter.push({ series: key, season: season.season, episode: episode});
}
});
});
}
});
return {
showsBefore: _.sortBy(showsBefore, function(n) {
return n.episode.dateObj.getTime();
}),
showsAfter: _.sortBy(showsAfter, function(n) {
return n.episode.dateObj.getTime();
})
};
},
formatDate: function(date) {
var month = date.getMonth() + 1,
day = date.getDay(),
dayText = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'][day];
return date.getFullYear() + '-' +
(month < 10 ? '0' + month : month) + '-' +
(date.getDate() < 10 ? '0' + date.getDate() : date.getDate()) +
' (' + dayText + ')';
}
}
module.exports = fns; | },
| random_line_split |
context.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Data needed by the layout thread.
use fnv::FnvHasher;
use gfx::display_list::{WebRenderImageInfo, OpaqueNode};
use gfx::font_cache_thread::FontCacheThread;
use gfx::font_context::FontContext;
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
use msg::constellation_msg::PipelineId;
use net_traits::image_cache::{CanRequestImages, ImageCache, ImageState};
use net_traits::image_cache::{ImageOrMetadataAvailable, UsePlaceholder};
use opaque_node::OpaqueNodeMethods;
use parking_lot::RwLock;
use script_layout_interface::{PendingImage, PendingImageState};
use script_traits::Painter;
use script_traits::UntrustedNodeAddress;
use servo_atoms::Atom;
use servo_url::ServoUrl;
use std::cell::{RefCell, RefMut};
use std::collections::HashMap;
use std::hash::BuildHasherDefault;
use std::sync::{Arc, Mutex};
use std::thread;
use style::context::RegisteredSpeculativePainter;
use style::context::SharedStyleContext;
thread_local!(static FONT_CONTEXT_KEY: RefCell<Option<FontContext>> = RefCell::new(None));
pub fn with_thread_local_font_context<F, R>(layout_context: &LayoutContext, f: F) -> R
where F: FnOnce(&mut FontContext) -> R
{
FONT_CONTEXT_KEY.with(|k| {
let mut font_context = k.borrow_mut();
if font_context.is_none() {
let font_cache_thread = layout_context.font_cache_thread.lock().unwrap().clone();
*font_context = Some(FontContext::new(font_cache_thread));
}
f(&mut RefMut::map(font_context, |x| x.as_mut().unwrap()))
})
}
pub fn malloc_size_of_persistent_local_context(ops: &mut MallocSizeOfOps) -> usize {
FONT_CONTEXT_KEY.with(|r| {
if let Some(ref context) = *r.borrow() {
context.size_of(ops)
} else {
0
}
})
}
/// Layout information shared among all workers. This must be thread-safe.
pub struct LayoutContext<'a> {
/// The pipeline id of this LayoutContext.
pub id: PipelineId,
/// Bits shared by the layout and style system.
pub style_context: SharedStyleContext<'a>,
/// Reference to the script thread image cache.
pub image_cache: Arc<ImageCache>,
/// Interface to the font cache thread.
pub font_cache_thread: Mutex<FontCacheThread>,
/// A cache of WebRender image info.
pub webrender_image_cache: Arc<RwLock<HashMap<(ServoUrl, UsePlaceholder),
WebRenderImageInfo,
BuildHasherDefault<FnvHasher>>>>,
/// Paint worklets
pub registered_painters: &'a RegisteredPainters,
/// A list of in-progress image loads to be shared with the script thread.
/// A None value means that this layout was not initiated by the script thread.
pub pending_images: Option<Mutex<Vec<PendingImage>>>,
/// A list of nodes that have just initiated a CSS transition.
/// A None value means that this layout was not initiated by the script thread.
pub newly_transitioning_nodes: Option<Mutex<Vec<UntrustedNodeAddress>>>,
}
impl<'a> Drop for LayoutContext<'a> {
fn drop(&mut self) {
if !thread::panicking() {
if let Some(ref pending_images) = self.pending_images {
assert!(pending_images.lock().unwrap().is_empty());
}
}
}
}
impl<'a> LayoutContext<'a> {
#[inline(always)]
pub fn | (&self) -> &SharedStyleContext {
&self.style_context
}
pub fn get_or_request_image_or_meta(&self,
node: OpaqueNode,
url: ServoUrl,
use_placeholder: UsePlaceholder)
-> Option<ImageOrMetadataAvailable> {
//XXXjdm For cases where we do not request an image, we still need to
// ensure the node gets another script-initiated reflow or it
// won't be requested at all.
let can_request = if self.pending_images.is_some() {
CanRequestImages::Yes
} else {
CanRequestImages::No
};
// See if the image is already available
let result = self.image_cache.find_image_or_metadata(url.clone(),
use_placeholder,
can_request);
match result {
Ok(image_or_metadata) => Some(image_or_metadata),
// Image failed to load, so just return nothing
Err(ImageState::LoadError) => None,
// Not yet requested - request image or metadata from the cache
Err(ImageState::NotRequested(id)) => {
let image = PendingImage {
state: PendingImageState::Unrequested(url),
node: node.to_untrusted_node_address(),
id: id,
};
self.pending_images.as_ref().unwrap().lock().unwrap().push(image);
None
}
// Image has been requested, is still pending. Return no image for this paint loop.
// When the image loads it will trigger a reflow and/or repaint.
Err(ImageState::Pending(id)) => {
//XXXjdm if self.pending_images is not available, we should make sure that
// this node gets marked dirty again so it gets a script-initiated
// reflow that deals with this properly.
if let Some(ref pending_images) = self.pending_images {
let image = PendingImage {
state: PendingImageState::PendingResponse,
node: node.to_untrusted_node_address(),
id: id,
};
pending_images.lock().unwrap().push(image);
}
None
}
}
}
pub fn get_webrender_image_for_url(&self,
node: OpaqueNode,
url: ServoUrl,
use_placeholder: UsePlaceholder)
-> Option<WebRenderImageInfo> {
if let Some(existing_webrender_image) = self.webrender_image_cache
.read()
.get(&(url.clone(), use_placeholder)) {
return Some((*existing_webrender_image).clone())
}
match self.get_or_request_image_or_meta(node, url.clone(), use_placeholder) {
Some(ImageOrMetadataAvailable::ImageAvailable(image, _)) => {
let image_info = WebRenderImageInfo::from_image(&*image);
if image_info.key.is_none() {
Some(image_info)
} else {
let mut webrender_image_cache = self.webrender_image_cache.write();
webrender_image_cache.insert((url, use_placeholder),
image_info);
Some(image_info)
}
}
None | Some(ImageOrMetadataAvailable::MetadataAvailable(_)) => None,
}
}
}
/// A registered painter
pub trait RegisteredPainter: RegisteredSpeculativePainter + Painter {}
/// A set of registered painters
pub trait RegisteredPainters: Sync {
/// Look up a painter
fn get(&self, name: &Atom) -> Option<&RegisteredPainter>;
}
| shared_context | identifier_name |
context.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Data needed by the layout thread.
use fnv::FnvHasher;
use gfx::display_list::{WebRenderImageInfo, OpaqueNode};
use gfx::font_cache_thread::FontCacheThread;
use gfx::font_context::FontContext;
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
use msg::constellation_msg::PipelineId;
use net_traits::image_cache::{CanRequestImages, ImageCache, ImageState};
use net_traits::image_cache::{ImageOrMetadataAvailable, UsePlaceholder};
use opaque_node::OpaqueNodeMethods;
use parking_lot::RwLock;
use script_layout_interface::{PendingImage, PendingImageState};
use script_traits::Painter;
use script_traits::UntrustedNodeAddress;
use servo_atoms::Atom;
use servo_url::ServoUrl;
use std::cell::{RefCell, RefMut};
use std::collections::HashMap;
use std::hash::BuildHasherDefault;
use std::sync::{Arc, Mutex};
use std::thread;
use style::context::RegisteredSpeculativePainter;
use style::context::SharedStyleContext;
thread_local!(static FONT_CONTEXT_KEY: RefCell<Option<FontContext>> = RefCell::new(None));
pub fn with_thread_local_font_context<F, R>(layout_context: &LayoutContext, f: F) -> R
where F: FnOnce(&mut FontContext) -> R
{
FONT_CONTEXT_KEY.with(|k| {
let mut font_context = k.borrow_mut();
if font_context.is_none() {
let font_cache_thread = layout_context.font_cache_thread.lock().unwrap().clone();
*font_context = Some(FontContext::new(font_cache_thread));
}
f(&mut RefMut::map(font_context, |x| x.as_mut().unwrap()))
})
}
pub fn malloc_size_of_persistent_local_context(ops: &mut MallocSizeOfOps) -> usize {
FONT_CONTEXT_KEY.with(|r| {
if let Some(ref context) = *r.borrow() {
context.size_of(ops)
} else {
0
}
})
}
/// Layout information shared among all workers. This must be thread-safe.
pub struct LayoutContext<'a> {
/// The pipeline id of this LayoutContext.
pub id: PipelineId,
/// Bits shared by the layout and style system.
pub style_context: SharedStyleContext<'a>,
/// Reference to the script thread image cache.
pub image_cache: Arc<ImageCache>,
/// Interface to the font cache thread.
pub font_cache_thread: Mutex<FontCacheThread>,
/// A cache of WebRender image info. | pub registered_painters: &'a RegisteredPainters,
/// A list of in-progress image loads to be shared with the script thread.
/// A None value means that this layout was not initiated by the script thread.
pub pending_images: Option<Mutex<Vec<PendingImage>>>,
/// A list of nodes that have just initiated a CSS transition.
/// A None value means that this layout was not initiated by the script thread.
pub newly_transitioning_nodes: Option<Mutex<Vec<UntrustedNodeAddress>>>,
}
impl<'a> Drop for LayoutContext<'a> {
fn drop(&mut self) {
if !thread::panicking() {
if let Some(ref pending_images) = self.pending_images {
assert!(pending_images.lock().unwrap().is_empty());
}
}
}
}
impl<'a> LayoutContext<'a> {
#[inline(always)]
pub fn shared_context(&self) -> &SharedStyleContext {
&self.style_context
}
pub fn get_or_request_image_or_meta(&self,
node: OpaqueNode,
url: ServoUrl,
use_placeholder: UsePlaceholder)
-> Option<ImageOrMetadataAvailable> {
//XXXjdm For cases where we do not request an image, we still need to
// ensure the node gets another script-initiated reflow or it
// won't be requested at all.
let can_request = if self.pending_images.is_some() {
CanRequestImages::Yes
} else {
CanRequestImages::No
};
// See if the image is already available
let result = self.image_cache.find_image_or_metadata(url.clone(),
use_placeholder,
can_request);
match result {
Ok(image_or_metadata) => Some(image_or_metadata),
// Image failed to load, so just return nothing
Err(ImageState::LoadError) => None,
// Not yet requested - request image or metadata from the cache
Err(ImageState::NotRequested(id)) => {
let image = PendingImage {
state: PendingImageState::Unrequested(url),
node: node.to_untrusted_node_address(),
id: id,
};
self.pending_images.as_ref().unwrap().lock().unwrap().push(image);
None
}
// Image has been requested, is still pending. Return no image for this paint loop.
// When the image loads it will trigger a reflow and/or repaint.
Err(ImageState::Pending(id)) => {
//XXXjdm if self.pending_images is not available, we should make sure that
// this node gets marked dirty again so it gets a script-initiated
// reflow that deals with this properly.
if let Some(ref pending_images) = self.pending_images {
let image = PendingImage {
state: PendingImageState::PendingResponse,
node: node.to_untrusted_node_address(),
id: id,
};
pending_images.lock().unwrap().push(image);
}
None
}
}
}
pub fn get_webrender_image_for_url(&self,
node: OpaqueNode,
url: ServoUrl,
use_placeholder: UsePlaceholder)
-> Option<WebRenderImageInfo> {
if let Some(existing_webrender_image) = self.webrender_image_cache
.read()
.get(&(url.clone(), use_placeholder)) {
return Some((*existing_webrender_image).clone())
}
match self.get_or_request_image_or_meta(node, url.clone(), use_placeholder) {
Some(ImageOrMetadataAvailable::ImageAvailable(image, _)) => {
let image_info = WebRenderImageInfo::from_image(&*image);
if image_info.key.is_none() {
Some(image_info)
} else {
let mut webrender_image_cache = self.webrender_image_cache.write();
webrender_image_cache.insert((url, use_placeholder),
image_info);
Some(image_info)
}
}
None | Some(ImageOrMetadataAvailable::MetadataAvailable(_)) => None,
}
}
}
/// A registered painter
pub trait RegisteredPainter: RegisteredSpeculativePainter + Painter {}
/// A set of registered painters
pub trait RegisteredPainters: Sync {
/// Look up a painter
fn get(&self, name: &Atom) -> Option<&RegisteredPainter>;
} | pub webrender_image_cache: Arc<RwLock<HashMap<(ServoUrl, UsePlaceholder),
WebRenderImageInfo,
BuildHasherDefault<FnvHasher>>>>,
/// Paint worklets | random_line_split |
context.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Data needed by the layout thread.
use fnv::FnvHasher;
use gfx::display_list::{WebRenderImageInfo, OpaqueNode};
use gfx::font_cache_thread::FontCacheThread;
use gfx::font_context::FontContext;
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
use msg::constellation_msg::PipelineId;
use net_traits::image_cache::{CanRequestImages, ImageCache, ImageState};
use net_traits::image_cache::{ImageOrMetadataAvailable, UsePlaceholder};
use opaque_node::OpaqueNodeMethods;
use parking_lot::RwLock;
use script_layout_interface::{PendingImage, PendingImageState};
use script_traits::Painter;
use script_traits::UntrustedNodeAddress;
use servo_atoms::Atom;
use servo_url::ServoUrl;
use std::cell::{RefCell, RefMut};
use std::collections::HashMap;
use std::hash::BuildHasherDefault;
use std::sync::{Arc, Mutex};
use std::thread;
use style::context::RegisteredSpeculativePainter;
use style::context::SharedStyleContext;
thread_local!(static FONT_CONTEXT_KEY: RefCell<Option<FontContext>> = RefCell::new(None));
pub fn with_thread_local_font_context<F, R>(layout_context: &LayoutContext, f: F) -> R
where F: FnOnce(&mut FontContext) -> R
{
FONT_CONTEXT_KEY.with(|k| {
let mut font_context = k.borrow_mut();
if font_context.is_none() {
let font_cache_thread = layout_context.font_cache_thread.lock().unwrap().clone();
*font_context = Some(FontContext::new(font_cache_thread));
}
f(&mut RefMut::map(font_context, |x| x.as_mut().unwrap()))
})
}
pub fn malloc_size_of_persistent_local_context(ops: &mut MallocSizeOfOps) -> usize |
/// Layout information shared among all workers. This must be thread-safe.
pub struct LayoutContext<'a> {
/// The pipeline id of this LayoutContext.
pub id: PipelineId,
/// Bits shared by the layout and style system.
pub style_context: SharedStyleContext<'a>,
/// Reference to the script thread image cache.
pub image_cache: Arc<ImageCache>,
/// Interface to the font cache thread.
pub font_cache_thread: Mutex<FontCacheThread>,
/// A cache of WebRender image info.
pub webrender_image_cache: Arc<RwLock<HashMap<(ServoUrl, UsePlaceholder),
WebRenderImageInfo,
BuildHasherDefault<FnvHasher>>>>,
/// Paint worklets
pub registered_painters: &'a RegisteredPainters,
/// A list of in-progress image loads to be shared with the script thread.
/// A None value means that this layout was not initiated by the script thread.
pub pending_images: Option<Mutex<Vec<PendingImage>>>,
/// A list of nodes that have just initiated a CSS transition.
/// A None value means that this layout was not initiated by the script thread.
pub newly_transitioning_nodes: Option<Mutex<Vec<UntrustedNodeAddress>>>,
}
impl<'a> Drop for LayoutContext<'a> {
fn drop(&mut self) {
if !thread::panicking() {
if let Some(ref pending_images) = self.pending_images {
assert!(pending_images.lock().unwrap().is_empty());
}
}
}
}
impl<'a> LayoutContext<'a> {
#[inline(always)]
pub fn shared_context(&self) -> &SharedStyleContext {
&self.style_context
}
pub fn get_or_request_image_or_meta(&self,
node: OpaqueNode,
url: ServoUrl,
use_placeholder: UsePlaceholder)
-> Option<ImageOrMetadataAvailable> {
//XXXjdm For cases where we do not request an image, we still need to
// ensure the node gets another script-initiated reflow or it
// won't be requested at all.
let can_request = if self.pending_images.is_some() {
CanRequestImages::Yes
} else {
CanRequestImages::No
};
// See if the image is already available
let result = self.image_cache.find_image_or_metadata(url.clone(),
use_placeholder,
can_request);
match result {
Ok(image_or_metadata) => Some(image_or_metadata),
// Image failed to load, so just return nothing
Err(ImageState::LoadError) => None,
// Not yet requested - request image or metadata from the cache
Err(ImageState::NotRequested(id)) => {
let image = PendingImage {
state: PendingImageState::Unrequested(url),
node: node.to_untrusted_node_address(),
id: id,
};
self.pending_images.as_ref().unwrap().lock().unwrap().push(image);
None
}
// Image has been requested, is still pending. Return no image for this paint loop.
// When the image loads it will trigger a reflow and/or repaint.
Err(ImageState::Pending(id)) => {
//XXXjdm if self.pending_images is not available, we should make sure that
// this node gets marked dirty again so it gets a script-initiated
// reflow that deals with this properly.
if let Some(ref pending_images) = self.pending_images {
let image = PendingImage {
state: PendingImageState::PendingResponse,
node: node.to_untrusted_node_address(),
id: id,
};
pending_images.lock().unwrap().push(image);
}
None
}
}
}
pub fn get_webrender_image_for_url(&self,
node: OpaqueNode,
url: ServoUrl,
use_placeholder: UsePlaceholder)
-> Option<WebRenderImageInfo> {
if let Some(existing_webrender_image) = self.webrender_image_cache
.read()
.get(&(url.clone(), use_placeholder)) {
return Some((*existing_webrender_image).clone())
}
match self.get_or_request_image_or_meta(node, url.clone(), use_placeholder) {
Some(ImageOrMetadataAvailable::ImageAvailable(image, _)) => {
let image_info = WebRenderImageInfo::from_image(&*image);
if image_info.key.is_none() {
Some(image_info)
} else {
let mut webrender_image_cache = self.webrender_image_cache.write();
webrender_image_cache.insert((url, use_placeholder),
image_info);
Some(image_info)
}
}
None | Some(ImageOrMetadataAvailable::MetadataAvailable(_)) => None,
}
}
}
/// A registered painter
pub trait RegisteredPainter: RegisteredSpeculativePainter + Painter {}
/// A set of registered painters
pub trait RegisteredPainters: Sync {
/// Look up a painter
fn get(&self, name: &Atom) -> Option<&RegisteredPainter>;
}
| {
FONT_CONTEXT_KEY.with(|r| {
if let Some(ref context) = *r.borrow() {
context.size_of(ops)
} else {
0
}
})
} | identifier_body |
context.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Data needed by the layout thread.
use fnv::FnvHasher;
use gfx::display_list::{WebRenderImageInfo, OpaqueNode};
use gfx::font_cache_thread::FontCacheThread;
use gfx::font_context::FontContext;
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
use msg::constellation_msg::PipelineId;
use net_traits::image_cache::{CanRequestImages, ImageCache, ImageState};
use net_traits::image_cache::{ImageOrMetadataAvailable, UsePlaceholder};
use opaque_node::OpaqueNodeMethods;
use parking_lot::RwLock;
use script_layout_interface::{PendingImage, PendingImageState};
use script_traits::Painter;
use script_traits::UntrustedNodeAddress;
use servo_atoms::Atom;
use servo_url::ServoUrl;
use std::cell::{RefCell, RefMut};
use std::collections::HashMap;
use std::hash::BuildHasherDefault;
use std::sync::{Arc, Mutex};
use std::thread;
use style::context::RegisteredSpeculativePainter;
use style::context::SharedStyleContext;
thread_local!(static FONT_CONTEXT_KEY: RefCell<Option<FontContext>> = RefCell::new(None));
pub fn with_thread_local_font_context<F, R>(layout_context: &LayoutContext, f: F) -> R
where F: FnOnce(&mut FontContext) -> R
{
FONT_CONTEXT_KEY.with(|k| {
let mut font_context = k.borrow_mut();
if font_context.is_none() {
let font_cache_thread = layout_context.font_cache_thread.lock().unwrap().clone();
*font_context = Some(FontContext::new(font_cache_thread));
}
f(&mut RefMut::map(font_context, |x| x.as_mut().unwrap()))
})
}
pub fn malloc_size_of_persistent_local_context(ops: &mut MallocSizeOfOps) -> usize {
FONT_CONTEXT_KEY.with(|r| {
if let Some(ref context) = *r.borrow() | else {
0
}
})
}
/// Layout information shared among all workers. This must be thread-safe.
pub struct LayoutContext<'a> {
/// The pipeline id of this LayoutContext.
pub id: PipelineId,
/// Bits shared by the layout and style system.
pub style_context: SharedStyleContext<'a>,
/// Reference to the script thread image cache.
pub image_cache: Arc<ImageCache>,
/// Interface to the font cache thread.
pub font_cache_thread: Mutex<FontCacheThread>,
/// A cache of WebRender image info.
pub webrender_image_cache: Arc<RwLock<HashMap<(ServoUrl, UsePlaceholder),
WebRenderImageInfo,
BuildHasherDefault<FnvHasher>>>>,
/// Paint worklets
pub registered_painters: &'a RegisteredPainters,
/// A list of in-progress image loads to be shared with the script thread.
/// A None value means that this layout was not initiated by the script thread.
pub pending_images: Option<Mutex<Vec<PendingImage>>>,
/// A list of nodes that have just initiated a CSS transition.
/// A None value means that this layout was not initiated by the script thread.
pub newly_transitioning_nodes: Option<Mutex<Vec<UntrustedNodeAddress>>>,
}
impl<'a> Drop for LayoutContext<'a> {
fn drop(&mut self) {
if !thread::panicking() {
if let Some(ref pending_images) = self.pending_images {
assert!(pending_images.lock().unwrap().is_empty());
}
}
}
}
impl<'a> LayoutContext<'a> {
#[inline(always)]
pub fn shared_context(&self) -> &SharedStyleContext {
&self.style_context
}
pub fn get_or_request_image_or_meta(&self,
node: OpaqueNode,
url: ServoUrl,
use_placeholder: UsePlaceholder)
-> Option<ImageOrMetadataAvailable> {
//XXXjdm For cases where we do not request an image, we still need to
// ensure the node gets another script-initiated reflow or it
// won't be requested at all.
let can_request = if self.pending_images.is_some() {
CanRequestImages::Yes
} else {
CanRequestImages::No
};
// See if the image is already available
let result = self.image_cache.find_image_or_metadata(url.clone(),
use_placeholder,
can_request);
match result {
Ok(image_or_metadata) => Some(image_or_metadata),
// Image failed to load, so just return nothing
Err(ImageState::LoadError) => None,
// Not yet requested - request image or metadata from the cache
Err(ImageState::NotRequested(id)) => {
let image = PendingImage {
state: PendingImageState::Unrequested(url),
node: node.to_untrusted_node_address(),
id: id,
};
self.pending_images.as_ref().unwrap().lock().unwrap().push(image);
None
}
// Image has been requested, is still pending. Return no image for this paint loop.
// When the image loads it will trigger a reflow and/or repaint.
Err(ImageState::Pending(id)) => {
//XXXjdm if self.pending_images is not available, we should make sure that
// this node gets marked dirty again so it gets a script-initiated
// reflow that deals with this properly.
if let Some(ref pending_images) = self.pending_images {
let image = PendingImage {
state: PendingImageState::PendingResponse,
node: node.to_untrusted_node_address(),
id: id,
};
pending_images.lock().unwrap().push(image);
}
None
}
}
}
pub fn get_webrender_image_for_url(&self,
node: OpaqueNode,
url: ServoUrl,
use_placeholder: UsePlaceholder)
-> Option<WebRenderImageInfo> {
if let Some(existing_webrender_image) = self.webrender_image_cache
.read()
.get(&(url.clone(), use_placeholder)) {
return Some((*existing_webrender_image).clone())
}
match self.get_or_request_image_or_meta(node, url.clone(), use_placeholder) {
Some(ImageOrMetadataAvailable::ImageAvailable(image, _)) => {
let image_info = WebRenderImageInfo::from_image(&*image);
if image_info.key.is_none() {
Some(image_info)
} else {
let mut webrender_image_cache = self.webrender_image_cache.write();
webrender_image_cache.insert((url, use_placeholder),
image_info);
Some(image_info)
}
}
None | Some(ImageOrMetadataAvailable::MetadataAvailable(_)) => None,
}
}
}
/// A registered painter
pub trait RegisteredPainter: RegisteredSpeculativePainter + Painter {}
/// A set of registered painters
pub trait RegisteredPainters: Sync {
/// Look up a painter
fn get(&self, name: &Atom) -> Option<&RegisteredPainter>;
}
| {
context.size_of(ops)
} | conditional_block |
pretty.rs | use chrono::*;
pub use super::split;
pub fn pretty_short(dur: Duration) -> String {
let components = split::split_duration(dur).as_vec();
let mut components_iter = components.iter().skip_while(|a| a.val() == 0);
let first_component = components_iter.next();
let second_component = components_iter.next();
let final_str = match first_component {
Some(x) => {
match second_component.and_then(has_second) {
Some(y) => {format!("{} and {}", x.to_string(), y.to_string())},
None => {x.to_string()}
}
},
// The duration is 0
None => {split::TimePeriod::Millisecond(0).to_string()},
};
fn | (val: &split::TimePeriod) -> Option<&split::TimePeriod> {
if val.val() == 0 {return None};
return Some(val);
}
return final_str;
}
pub fn pretty_full(dur: Duration) -> String {
let components = split::split_duration(dur);
let mut final_str = String::new();
for (i, component) in components.as_vec().iter().enumerate() {
if i != 0 {
final_str.push_str(", ");
}
final_str.push_str(&component.to_string());
}
return final_str;
}
#[test]
fn test_pretty_full_simple() {
let test_data = vec![
(Duration::days(365), "1 year, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::days(30), "0 years, 1 month, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::weeks(1), "0 years, 0 months, 1 week, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::days(1), "0 years, 0 months, 0 weeks, 1 day, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::hours(1), "0 years, 0 months, 0 weeks, 0 days, 1 hour, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::minutes(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 1 minute, 0 seconds, 0 milliseconds"),
(Duration::seconds(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 1 second, 0 milliseconds"),
(Duration::milliseconds(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 1 millisecond"),
];
for (dur, final_str) in test_data {
assert_eq!(pretty_full(dur), final_str);
}
}
#[test]
fn test_pretty_short() {
let test_data = vec![
(Duration::milliseconds(0), "0 milliseconds"),
(Duration::milliseconds(1), "1 millisecond"),
(-Duration::milliseconds(1), "1 millisecond"),
(Duration::milliseconds(200), "200 milliseconds"),
(Duration::seconds(1) + Duration::milliseconds(200), "1 second and 200 milliseconds"),
(Duration::days(1) + Duration::hours(2), "1 day and 2 hours"),
(Duration::days(1) + Duration::seconds(2), "1 day"),
];
for (dur, final_str) in test_data {
assert_eq!(pretty_short(dur), final_str);
}
}
| has_second | identifier_name |
pretty.rs | use chrono::*;
pub use super::split;
pub fn pretty_short(dur: Duration) -> String {
let components = split::split_duration(dur).as_vec();
let mut components_iter = components.iter().skip_while(|a| a.val() == 0);
let first_component = components_iter.next();
let second_component = components_iter.next();
let final_str = match first_component {
Some(x) => | ,
// The duration is 0
None => {split::TimePeriod::Millisecond(0).to_string()},
};
fn has_second(val: &split::TimePeriod) -> Option<&split::TimePeriod> {
if val.val() == 0 {return None};
return Some(val);
}
return final_str;
}
pub fn pretty_full(dur: Duration) -> String {
let components = split::split_duration(dur);
let mut final_str = String::new();
for (i, component) in components.as_vec().iter().enumerate() {
if i != 0 {
final_str.push_str(", ");
}
final_str.push_str(&component.to_string());
}
return final_str;
}
#[test]
fn test_pretty_full_simple() {
let test_data = vec![
(Duration::days(365), "1 year, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::days(30), "0 years, 1 month, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::weeks(1), "0 years, 0 months, 1 week, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::days(1), "0 years, 0 months, 0 weeks, 1 day, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::hours(1), "0 years, 0 months, 0 weeks, 0 days, 1 hour, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::minutes(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 1 minute, 0 seconds, 0 milliseconds"),
(Duration::seconds(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 1 second, 0 milliseconds"),
(Duration::milliseconds(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 1 millisecond"),
];
for (dur, final_str) in test_data {
assert_eq!(pretty_full(dur), final_str);
}
}
#[test]
fn test_pretty_short() {
let test_data = vec![
(Duration::milliseconds(0), "0 milliseconds"),
(Duration::milliseconds(1), "1 millisecond"),
(-Duration::milliseconds(1), "1 millisecond"),
(Duration::milliseconds(200), "200 milliseconds"),
(Duration::seconds(1) + Duration::milliseconds(200), "1 second and 200 milliseconds"),
(Duration::days(1) + Duration::hours(2), "1 day and 2 hours"),
(Duration::days(1) + Duration::seconds(2), "1 day"),
];
for (dur, final_str) in test_data {
assert_eq!(pretty_short(dur), final_str);
}
}
| {
match second_component.and_then(has_second) {
Some(y) => {format!("{} and {}", x.to_string(), y.to_string())},
None => {x.to_string()}
}
} | conditional_block |
pretty.rs | use chrono::*;
pub use super::split;
pub fn pretty_short(dur: Duration) -> String {
let components = split::split_duration(dur).as_vec();
let mut components_iter = components.iter().skip_while(|a| a.val() == 0);
let first_component = components_iter.next();
let second_component = components_iter.next();
let final_str = match first_component {
Some(x) => {
match second_component.and_then(has_second) {
Some(y) => {format!("{} and {}", x.to_string(), y.to_string())},
None => {x.to_string()}
}
},
// The duration is 0
None => {split::TimePeriod::Millisecond(0).to_string()},
};
fn has_second(val: &split::TimePeriod) -> Option<&split::TimePeriod> {
if val.val() == 0 {return None};
return Some(val);
}
return final_str;
}
pub fn pretty_full(dur: Duration) -> String {
let components = split::split_duration(dur);
let mut final_str = String::new();
for (i, component) in components.as_vec().iter().enumerate() {
if i != 0 {
final_str.push_str(", ");
}
final_str.push_str(&component.to_string());
}
return final_str;
}
#[test]
fn test_pretty_full_simple() {
let test_data = vec![
(Duration::days(365), "1 year, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::days(30), "0 years, 1 month, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::weeks(1), "0 years, 0 months, 1 week, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::days(1), "0 years, 0 months, 0 weeks, 1 day, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::hours(1), "0 years, 0 months, 0 weeks, 0 days, 1 hour, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::minutes(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 1 minute, 0 seconds, 0 milliseconds"),
(Duration::seconds(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 1 second, 0 milliseconds"),
(Duration::milliseconds(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 1 millisecond"),
];
for (dur, final_str) in test_data {
assert_eq!(pretty_full(dur), final_str);
}
}
#[test]
fn test_pretty_short() {
let test_data = vec![
(Duration::milliseconds(0), "0 milliseconds"),
(Duration::milliseconds(1), "1 millisecond"),
(-Duration::milliseconds(1), "1 millisecond"),
(Duration::milliseconds(200), "200 milliseconds"),
(Duration::seconds(1) + Duration::milliseconds(200), "1 second and 200 milliseconds"),
(Duration::days(1) + Duration::hours(2), "1 day and 2 hours"),
(Duration::days(1) + Duration::seconds(2), "1 day"),
];
for (dur, final_str) in test_data { | assert_eq!(pretty_short(dur), final_str);
}
} | random_line_split |
|
pretty.rs | use chrono::*;
pub use super::split;
pub fn pretty_short(dur: Duration) -> String {
let components = split::split_duration(dur).as_vec();
let mut components_iter = components.iter().skip_while(|a| a.val() == 0);
let first_component = components_iter.next();
let second_component = components_iter.next();
let final_str = match first_component {
Some(x) => {
match second_component.and_then(has_second) {
Some(y) => {format!("{} and {}", x.to_string(), y.to_string())},
None => {x.to_string()}
}
},
// The duration is 0
None => {split::TimePeriod::Millisecond(0).to_string()},
};
fn has_second(val: &split::TimePeriod) -> Option<&split::TimePeriod> {
if val.val() == 0 {return None};
return Some(val);
}
return final_str;
}
pub fn pretty_full(dur: Duration) -> String {
let components = split::split_duration(dur);
let mut final_str = String::new();
for (i, component) in components.as_vec().iter().enumerate() {
if i != 0 {
final_str.push_str(", ");
}
final_str.push_str(&component.to_string());
}
return final_str;
}
#[test]
fn test_pretty_full_simple() {
let test_data = vec![
(Duration::days(365), "1 year, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::days(30), "0 years, 1 month, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::weeks(1), "0 years, 0 months, 1 week, 0 days, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::days(1), "0 years, 0 months, 0 weeks, 1 day, 0 hours, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::hours(1), "0 years, 0 months, 0 weeks, 0 days, 1 hour, 0 minutes, 0 seconds, 0 milliseconds"),
(Duration::minutes(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 1 minute, 0 seconds, 0 milliseconds"),
(Duration::seconds(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 1 second, 0 milliseconds"),
(Duration::milliseconds(1), "0 years, 0 months, 0 weeks, 0 days, 0 hours, 0 minutes, 0 seconds, 1 millisecond"),
];
for (dur, final_str) in test_data {
assert_eq!(pretty_full(dur), final_str);
}
}
#[test]
fn test_pretty_short() | {
let test_data = vec![
(Duration::milliseconds(0), "0 milliseconds"),
(Duration::milliseconds(1), "1 millisecond"),
(-Duration::milliseconds(1), "1 millisecond"),
(Duration::milliseconds(200), "200 milliseconds"),
(Duration::seconds(1) + Duration::milliseconds(200), "1 second and 200 milliseconds"),
(Duration::days(1) + Duration::hours(2), "1 day and 2 hours"),
(Duration::days(1) + Duration::seconds(2), "1 day"),
];
for (dur, final_str) in test_data {
assert_eq!(pretty_short(dur), final_str);
}
} | identifier_body |
|
api_boilerplate.py | """
This module is responsible for doing all the authentication.
Adapted from the Google API Documentation.
"""
from __future__ import print_function
import os
import httplib2
import apiclient
import oauth2client
try:
import argparse
flags = argparse.ArgumentParser(
parents=[oauth2client.tools.argparser]).parse_args()
except ImportError:
flags = None
SCOPES = 'https://www.googleapis.com/auth/drive'
CLIENT_SECRET_FILE = 'client_secret.json'
# Enter your project name here!!
APPLICATION_NAME = 'API Project'
def | ():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.gdrive-credentials-cache')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'gdrive-credentials.json')
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = oauth2client.client.flow_from_clientsecrets(
CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = oauth2client.tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = oauth2client.tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
file_service = apiclient.discovery.build('drive', 'v3', http=http).files()
| get_credentials | identifier_name |
api_boilerplate.py | """
This module is responsible for doing all the authentication.
Adapted from the Google API Documentation.
"""
from __future__ import print_function
import os
import httplib2
import apiclient
import oauth2client
try:
import argparse
flags = argparse.ArgumentParser(
parents=[oauth2client.tools.argparser]).parse_args()
except ImportError:
flags = None
SCOPES = 'https://www.googleapis.com/auth/drive'
CLIENT_SECRET_FILE = 'client_secret.json'
# Enter your project name here!!
APPLICATION_NAME = 'API Project'
def get_credentials():
| flow.user_agent = APPLICATION_NAME
if flags:
credentials = oauth2client.tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = oauth2client.tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
file_service = apiclient.discovery.build('drive', 'v3', http=http).files()
| """Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.gdrive-credentials-cache')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'gdrive-credentials.json')
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = oauth2client.client.flow_from_clientsecrets(
CLIENT_SECRET_FILE, SCOPES) | identifier_body |
api_boilerplate.py | """
This module is responsible for doing all the authentication.
Adapted from the Google API Documentation.
"""
from __future__ import print_function
import os
import httplib2
import apiclient
import oauth2client
try:
import argparse
flags = argparse.ArgumentParser(
parents=[oauth2client.tools.argparser]).parse_args()
except ImportError:
flags = None
SCOPES = 'https://www.googleapis.com/auth/drive'
CLIENT_SECRET_FILE = 'client_secret.json'
# Enter your project name here!!
APPLICATION_NAME = 'API Project'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.gdrive-credentials-cache')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'gdrive-credentials.json')
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = oauth2client.client.flow_from_clientsecrets(
CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = oauth2client.tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
|
print('Storing credentials to ' + credential_path)
return credentials
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
file_service = apiclient.discovery.build('drive', 'v3', http=http).files()
| credentials = oauth2client.tools.run(flow, store) | conditional_block |
api_boilerplate.py | """
This module is responsible for doing all the authentication.
Adapted from the Google API Documentation.
"""
from __future__ import print_function
import os
import httplib2
import apiclient
import oauth2client
try:
import argparse
flags = argparse.ArgumentParser(
parents=[oauth2client.tools.argparser]).parse_args()
except ImportError:
flags = None
SCOPES = 'https://www.googleapis.com/auth/drive' | CLIENT_SECRET_FILE = 'client_secret.json'
# Enter your project name here!!
APPLICATION_NAME = 'API Project'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.gdrive-credentials-cache')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'gdrive-credentials.json')
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = oauth2client.client.flow_from_clientsecrets(
CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = oauth2client.tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = oauth2client.tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
file_service = apiclient.discovery.build('drive', 'v3', http=http).files() | random_line_split |
|
data-xml-debug.js | Represents the name of the xml root-tag when sending <b>multiple</b> records to the server.</li>
* <li>{Array} records The records being sent to the server, ie: the subject of the write-action being performed. The records parameter will be always be an array, even when only a single record is being acted upon.
* Each item within the records array will contain an array of field objects having the following properties:
* <ul>
* <li>{String} name The field-name of the record as defined by your {@link Ext.data.Record#create Ext.data.Record definition}. The "mapping" property will be used, otherwise it will match the "name" property. Use this parameter to define the XML tag-name of the property.</li>
* <li>{Mixed} value The record value of the field enclosed within XML tags specified by name property above.</li>
* </ul></li>
* <li>{Array} baseParams. The baseParams as defined upon {@link Ext.data.Store#baseParams}. Note that the baseParams have been converted into an array of [{name : "foo", value: "bar"}, ...] pairs in the same manner as the <b>records</b> parameter above. See {@link #documentRoot} and {@link #forceDocumentRoot}.</li>
* </ul>
*/
// Encoding the ? here in case it's being included by some kind of page that will parse it (eg. PHP)
tpl: '<tpl for="."><\u003fxml version="{version}" encoding="{encoding}"\u003f><tpl if="documentRoot"><{documentRoot}><tpl for="baseParams"><tpl for="."><{name}>{value}</{name}></tpl></tpl></tpl><tpl if="records.length>1"><{root}></tpl><tpl for="records"><{parent.record}><tpl for="."><{name}>{value}</{name}></tpl></{parent.record}></tpl><tpl if="records.length>1"></{root}></tpl><tpl if="documentRoot"></{documentRoot}></tpl></tpl>',
/**
* XmlWriter implementation of the final stage of a write action.
* @param {Object} params Transport-proxy's (eg: {@link Ext.Ajax#request}) params-object to write-to.
* @param {Object} baseParams as defined by {@link Ext.data.Store#baseParams}. The baseParms must be encoded by the extending class, eg: {@link Ext.data.JsonWriter}, {@link Ext.data.XmlWriter}.
* @param {Object/Object[]} data Data-object representing the compiled Store-recordset.
*/
render : function(params, baseParams, data) {
baseParams = this.toArray(baseParams);
params.xmlData = this.tpl.applyTemplate({
version: this.xmlVersion,
encoding: this.xmlEncoding,
documentRoot: (baseParams.length > 0 || this.forceDocumentRoot === true) ? this.documentRoot : false,
record: this.meta.record,
root: this.root,
baseParams: baseParams,
records: (Ext.isArray(data[0])) ? data : [data]
});
},
/**
* createRecord
* @protected
* @param {Ext.data.Record} rec
* @return {Array} Array of <tt>name:value</tt> pairs for attributes of the {@link Ext.data.Record}. See {@link Ext.data.DataWriter#toHash}.
*/
createRecord : function(rec) {
return this.toArray(this.toHash(rec));
},
/**
* updateRecord
* @protected
* @param {Ext.data.Record} rec
* @return {Array} Array of {name:value} pairs for attributes of the {@link Ext.data.Record}. See {@link Ext.data.DataWriter#toHash}.
*/
updateRecord : function(rec) {
return this.toArray(this.toHash(rec));
},
/**
* destroyRecord
* @protected
* @param {Ext.data.Record} rec
* @return {Array} Array containing a attribute-object (name/value pair) representing the {@link Ext.data.DataReader#idProperty idProperty}.
*/
destroyRecord : function(rec) {
var data = {};
data[this.meta.idProperty] = rec.id;
return this.toArray(data);
}
});
/*!
* Ext JS Library 3.4.0
* Copyright(c) 2006-2011 Sencha Inc.
* [email protected]
* http://www.sencha.com/license
*/
/**
* @class Ext.data.XmlReader
* @extends Ext.data.DataReader
* <p>Data reader class to create an Array of {@link Ext.data.Record} objects from an XML document
* based on mappings in a provided {@link Ext.data.Record} constructor.</p>
* <p><b>Note</b>: that in order for the browser to parse a returned XML document, the Content-Type
* header in the HTTP response must be set to "text/xml" or "application/xml".</p>
* <p>Example code:</p>
* <pre><code>
var Employee = Ext.data.Record.create([
{name: 'name', mapping: 'name'}, // "mapping" property not needed if it is the same as "name"
{name: 'occupation'} // This field will use "occupation" as the mapping.
]);
var myReader = new Ext.data.XmlReader({
totalProperty: "results", // The element which contains the total dataset size (optional)
record: "row", // The repeated element which contains row information
idProperty: "id" // The element within the row that provides an ID for the record (optional)
messageProperty: "msg" // The element within the response that provides a user-feedback message (optional)
}, Employee);
</code></pre>
* <p>
* This would consume an XML file like this:
* <pre><code>
<?xml version="1.0" encoding="UTF-8"?>
<dataset>
<results>2</results>
<row>
<id>1</id>
<name>Bill</name>
<occupation>Gardener</occupation>
</row>
<row>
<id>2</id>
<name>Ben</name>
<occupation>Horticulturalist</occupation>
</row>
</dataset>
</code></pre>
* @cfg {String} totalProperty The DomQuery path from which to retrieve the total number of records
* in the dataset. This is only needed if the whole dataset is not passed in one go, but is being
* paged from the remote server.
* @cfg {String} record The DomQuery path to the repeated element which contains record information.
* @cfg {String} record The DomQuery path to the repeated element which contains record information.
* @cfg {String} successProperty The DomQuery path to the success attribute used by forms.
* @cfg {String} idPath The DomQuery path relative from the record element to the element that contains
* a record identifier value.
* @constructor
* Create a new XmlReader.
* @param {Object} meta Metadata configuration options
* @param {Object} recordType Either an Array of field definition objects as passed to
* {@link Ext.data.Record#create}, or a Record constructor object created using {@link Ext.data.Record#create}.
*/
Ext.data.XmlReader = function(meta, recordType){
meta = meta || {};
// backwards compat, convert idPath or id / success
Ext.applyIf(meta, {
idProperty: meta.idProperty || meta.idPath || meta.id,
successProperty: meta.successProperty || meta.success
});
Ext.data.XmlReader.superclass.constructor.call(this, meta, recordType || meta.fields);
};
Ext.extend(Ext.data.XmlReader, Ext.data.DataReader, {
/**
* This method is only used by a DataProxy which has retrieved data from a remote server.
* @param {Object} response The XHR object which contains the parsed XML document. The response is expected
* to contain a property called <tt>responseXML</tt> which refers to an XML document object.
* @return {Object} records A data block which is used by an {@link Ext.data.Store} as
* a cache of Ext.data.Records.
*/
read : function(response){
var doc = response.responseXML;
if(!doc) {
throw {message: "XmlReader.read: XML Document not available"};
}
return this.readRecords(doc);
},
/**
* Create a data block containing Ext.data.Records from an XML document.
* @param {Object} doc A parsed XML document.
* @return {Object} records A data block which is used by an {@link Ext.data.Store} as
* a cache of Ext.data.Records.
*/
readRecords : function(doc){
/**
* After any data loads/reads, the raw XML Document is available for further custom processing.
* @type XMLDocument
*/
this.xmlData = doc;
var root = doc.documentElement || doc,
q = Ext.DomQuery,
totalRecords = 0,
success = true;
if(this.meta.totalProperty){
totalRecords = this.getTotal(root, 0);
}
if(this.meta.successProperty) | {
success = this.getSuccess(root);
} | conditional_block |
|
data-xml-debug.js | : baseParams,
records: (Ext.isArray(data[0])) ? data : [data]
});
},
/**
* createRecord
* @protected
* @param {Ext.data.Record} rec
* @return {Array} Array of <tt>name:value</tt> pairs for attributes of the {@link Ext.data.Record}. See {@link Ext.data.DataWriter#toHash}.
*/
createRecord : function(rec) {
return this.toArray(this.toHash(rec));
},
/**
* updateRecord
* @protected
* @param {Ext.data.Record} rec
* @return {Array} Array of {name:value} pairs for attributes of the {@link Ext.data.Record}. See {@link Ext.data.DataWriter#toHash}.
*/
updateRecord : function(rec) {
return this.toArray(this.toHash(rec));
},
/**
* destroyRecord
* @protected
* @param {Ext.data.Record} rec
* @return {Array} Array containing a attribute-object (name/value pair) representing the {@link Ext.data.DataReader#idProperty idProperty}.
*/
destroyRecord : function(rec) {
var data = {};
data[this.meta.idProperty] = rec.id;
return this.toArray(data);
}
});
/*!
* Ext JS Library 3.4.0
* Copyright(c) 2006-2011 Sencha Inc.
* [email protected]
* http://www.sencha.com/license
*/
/**
* @class Ext.data.XmlReader
* @extends Ext.data.DataReader
* <p>Data reader class to create an Array of {@link Ext.data.Record} objects from an XML document
* based on mappings in a provided {@link Ext.data.Record} constructor.</p>
* <p><b>Note</b>: that in order for the browser to parse a returned XML document, the Content-Type
* header in the HTTP response must be set to "text/xml" or "application/xml".</p>
* <p>Example code:</p>
* <pre><code>
var Employee = Ext.data.Record.create([
{name: 'name', mapping: 'name'}, // "mapping" property not needed if it is the same as "name"
{name: 'occupation'} // This field will use "occupation" as the mapping.
]);
var myReader = new Ext.data.XmlReader({
totalProperty: "results", // The element which contains the total dataset size (optional)
record: "row", // The repeated element which contains row information
idProperty: "id" // The element within the row that provides an ID for the record (optional)
messageProperty: "msg" // The element within the response that provides a user-feedback message (optional)
}, Employee);
</code></pre>
* <p>
* This would consume an XML file like this:
* <pre><code>
<?xml version="1.0" encoding="UTF-8"?>
<dataset>
<results>2</results>
<row>
<id>1</id>
<name>Bill</name>
<occupation>Gardener</occupation>
</row>
<row>
<id>2</id>
<name>Ben</name>
<occupation>Horticulturalist</occupation>
</row>
</dataset>
</code></pre>
* @cfg {String} totalProperty The DomQuery path from which to retrieve the total number of records
* in the dataset. This is only needed if the whole dataset is not passed in one go, but is being
* paged from the remote server.
* @cfg {String} record The DomQuery path to the repeated element which contains record information.
* @cfg {String} record The DomQuery path to the repeated element which contains record information.
* @cfg {String} successProperty The DomQuery path to the success attribute used by forms.
* @cfg {String} idPath The DomQuery path relative from the record element to the element that contains
* a record identifier value.
* @constructor
* Create a new XmlReader.
* @param {Object} meta Metadata configuration options
* @param {Object} recordType Either an Array of field definition objects as passed to
* {@link Ext.data.Record#create}, or a Record constructor object created using {@link Ext.data.Record#create}.
*/
Ext.data.XmlReader = function(meta, recordType){
meta = meta || {};
// backwards compat, convert idPath or id / success
Ext.applyIf(meta, {
idProperty: meta.idProperty || meta.idPath || meta.id,
successProperty: meta.successProperty || meta.success
});
Ext.data.XmlReader.superclass.constructor.call(this, meta, recordType || meta.fields);
};
Ext.extend(Ext.data.XmlReader, Ext.data.DataReader, {
/**
* This method is only used by a DataProxy which has retrieved data from a remote server.
* @param {Object} response The XHR object which contains the parsed XML document. The response is expected
* to contain a property called <tt>responseXML</tt> which refers to an XML document object.
* @return {Object} records A data block which is used by an {@link Ext.data.Store} as
* a cache of Ext.data.Records.
*/
read : function(response){
var doc = response.responseXML;
if(!doc) {
throw {message: "XmlReader.read: XML Document not available"};
}
return this.readRecords(doc);
},
/**
* Create a data block containing Ext.data.Records from an XML document.
* @param {Object} doc A parsed XML document.
* @return {Object} records A data block which is used by an {@link Ext.data.Store} as
* a cache of Ext.data.Records.
*/
readRecords : function(doc){
/**
* After any data loads/reads, the raw XML Document is available for further custom processing.
* @type XMLDocument
*/
this.xmlData = doc;
var root = doc.documentElement || doc,
q = Ext.DomQuery,
totalRecords = 0,
success = true;
if(this.meta.totalProperty){
totalRecords = this.getTotal(root, 0);
}
if(this.meta.successProperty){
success = this.getSuccess(root);
}
var records = this.extractData(q.select(this.meta.record, root), true); // <-- true to return Ext.data.Record[]
// TODO return Ext.data.Response instance. @see #readResponse
return {
success : success,
records : records,
totalRecords : totalRecords || records.length
};
},
/**
* Decode an XML response from server.
* @param {String} action [{@link Ext.data.Api#actions} create|read|update|destroy]
* @param {Object} response HTTP Response object from browser.
* @return {Ext.data.Response} An instance of {@link Ext.data.Response}
*/
readResponse : function(action, response) {
var q = Ext.DomQuery,
doc = response.responseXML,
root = doc.documentElement || doc;
// create general Response instance.
var res = new Ext.data.Response({
action: action,
success : this.getSuccess(root),
message: this.getMessage(root),
data: this.extractData(q.select(this.meta.record, root) || q.select(this.meta.root, root), false),
raw: doc
});
if (Ext.isEmpty(res.success)) {
throw new Ext.data.DataReader.Error('successProperty-response', this.meta.successProperty);
}
// Create actions from a response having status 200 must return pk
if (action === Ext.data.Api.actions.create) {
var def = Ext.isDefined(res.data);
if (def && Ext.isEmpty(res.data)) {
throw new Ext.data.JsonReader.Error('root-empty', this.meta.root);
}
else if (!def) {
throw new Ext.data.JsonReader.Error('root-undefined-response', this.meta.root);
}
}
return res;
},
getSuccess : function() {
return true;
},
/**
* build response-data extractor functions.
* @private
* @ignore
*/
buildExtractors : function() {
if(this.ef){
return;
}
var s = this.meta,
Record = this.recordType,
f = Record.prototype.fields,
fi = f.items,
fl = f.length;
if(s.totalProperty) {
this.getTotal = this.createAccessor(s.totalProperty);
}
if(s.successProperty) {
this.getSuccess = this.createAccessor(s.successProperty);
}
if (s.messageProperty) {
this.getMessage = this.createAccessor(s.messageProperty);
}
this.getRoot = function(res) {
return (!Ext.isEmpty(res[this.meta.record])) ? res[this.meta.record] : res[this.meta.root];
};
if (s.idPath || s.idProperty) {
var g = this.createAccessor(s.idPath || s.idProperty);
this.getId = function(rec) {
var id = g(rec) || rec.id;
return (id === undefined || id === '') ? null : id;
};
} else {
this.getId = function(){return null;};
}
var ef = [];
for(var i = 0; i < fl; i++){
f = fi[i]; | random_line_split |
||
index.tsx | import * as React from 'react'
import * as classnames from 'classnames'
import { A } from '~/components/Typography'
import { ILocation } from '~/models/location'
import { IRouteConfig } from '~/models/route-config'
const s = require('./style.css')
const classes = (currPath: string) => ({ path }: IRouteConfig) => (
classnames({
[s.headerItem]: true,
[s.headerItemSelected]: currPath.replace('/', '') === path || currPath === path,
})
)
const HeaderItem = (currPath: string) => (route: IRouteConfig, i: number) => (
<li className={classes(currPath)(route)} key={i}>
<A to={route.path}>
{route.title}
</A>
</li>
)
| const Header = ({ routes, location }: { routes: IRouteConfig[], location: ILocation }) => (
<nav className={s.nav}>
<ul>
{routes.map(HeaderItem(location.pathname))}
</ul>
</nav>
)
export { Header } | random_line_split |
|
kendo.culture.nso-ZA.js | /**
* Copyright 2015 Telerik AD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function(f, define){
define([], f);
})(function(){
(function( window, undefined ) {
var kendo = window.kendo || (window.kendo = { cultures: {} });
kendo.cultures["nso-ZA"] = {
name: "nso-ZA",
numberFormat: {
pattern: ["-n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
percent: {
pattern: ["-%n","%n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "%"
},
currency: {
pattern: ["$-n","$ n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "R"
}
},
calendars: {
standard: {
days: {
names: ["Lamorena","Mošupologo","Labobedi","Laboraro","Labone","Labohlano","Mokibelo"],
namesAbbr: ["Lam","Moš","Lbb","Lbr","Lbn","Lbh","Mok"],
namesShort: ["L","M","L","L","L","L","M"]
},
months: {
names: ["Pherekgong","Hlakola","Mopitlo","Moranang","Mosegamanye","Ngoatobošego","Phuphu","Phato","Lewedi","Diphalana","Dibatsela","Manthole",""],
namesAbbr: ["Pher","Hlak","Mop","Mor","Mos","Ngwat","Phup","Phat","Lew","Dip","Dib","Man",""]
},
AM: ["AM","am","AM"],
PM: ["PM","pm","PM"],
patterns: {
d: "yyyy/MM/dd",
D: "dd MMMM yyyy",
F: "dd MMMM yyyy hh:mm:ss tt",
g: "yyyy/MM/dd hh:mm tt",
G: "yyyy/MM/dd hh:mm:ss tt",
m: "dd MMMM",
M: "dd MMMM",
s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss",
t: "hh:mm tt",
T: "hh:mm:ss tt",
u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
y: "MMMM yyyy",
Y: "MMMM yyyy"
},
"/": "/",
":": ":",
firstDay: 0
}
}
}
})(this);
|
}, typeof define == 'function' && define.amd ? define : function(_, f){ f(); }); | return window.kendo; | random_line_split |
build.rs | // Copyright (C) 2016 ParadoxSpiral
//
// This file is part of mpv-rs.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#[cfg(feature = "build_libmpv")]
use std::env;
#[cfg(all(feature = "build_libmpv", not(target_os = "windows")))]
use std::process::Command;
#[cfg(not(feature = "build_libmpv"))]
fn main() {}
#[cfg(all(feature = "build_libmpv", target_os = "windows"))]
fn main() { | let source = env::var("MPV_SOURCE").expect("env var `MPV_SOURCE` not set");
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "64" {
println!("cargo:rustc-link-search={}/64/", source);
} else {
println!("cargo:rustc-link-search={}/32/", source);
}
}
#[cfg(all(feature = "build_libmpv", not(target_os = "windows")))]
fn main() {
let source = env::var("MPV_SOURCE").expect("env var `MPV_SOURCE` not set");
let num_threads = env::var("NUM_JOBS").unwrap();
// `target` (in cfg) doesn't really mean target. It means target(host) of build script,
// which is a bit confusing because it means the actual `--target` everywhere else.
#[cfg(target_pointer_width = "64")]
{
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "32" {
panic!("Cross-compiling to different arch not yet supported");
}
}
#[cfg(target_pointer_width = "32")]
{
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "64" {
panic!("Cross-compiling to different arch not yet supported");
}
}
// The mpv build script interprets the TARGET env var, which is set by cargo to e.g.
// x86_64-unknown-linux-gnu, thus the script can't find the compiler.
// TODO: When Cross-compiling to different archs is implemented, this has to be handled.
env::remove_var("TARGET");
let cmd = format!(
"cd {} && echo \"--enable-libmpv-shared\" > {0}/mpv_options \
&& {0}/build -j{}",
source, num_threads
);
Command::new("sh")
.arg("-c")
.arg(&cmd)
.spawn()
.expect("mpv-build build failed")
.wait()
.expect("mpv-build build failed");
println!("cargo:rustc-link-search={}/mpv/build/", source);
} | random_line_split |
|
build.rs | // Copyright (C) 2016 ParadoxSpiral
//
// This file is part of mpv-rs.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#[cfg(feature = "build_libmpv")]
use std::env;
#[cfg(all(feature = "build_libmpv", not(target_os = "windows")))]
use std::process::Command;
#[cfg(not(feature = "build_libmpv"))]
fn main() {}
#[cfg(all(feature = "build_libmpv", target_os = "windows"))]
fn | () {
let source = env::var("MPV_SOURCE").expect("env var `MPV_SOURCE` not set");
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "64" {
println!("cargo:rustc-link-search={}/64/", source);
} else {
println!("cargo:rustc-link-search={}/32/", source);
}
}
#[cfg(all(feature = "build_libmpv", not(target_os = "windows")))]
fn main() {
let source = env::var("MPV_SOURCE").expect("env var `MPV_SOURCE` not set");
let num_threads = env::var("NUM_JOBS").unwrap();
// `target` (in cfg) doesn't really mean target. It means target(host) of build script,
// which is a bit confusing because it means the actual `--target` everywhere else.
#[cfg(target_pointer_width = "64")]
{
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "32" {
panic!("Cross-compiling to different arch not yet supported");
}
}
#[cfg(target_pointer_width = "32")]
{
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "64" {
panic!("Cross-compiling to different arch not yet supported");
}
}
// The mpv build script interprets the TARGET env var, which is set by cargo to e.g.
// x86_64-unknown-linux-gnu, thus the script can't find the compiler.
// TODO: When Cross-compiling to different archs is implemented, this has to be handled.
env::remove_var("TARGET");
let cmd = format!(
"cd {} && echo \"--enable-libmpv-shared\" > {0}/mpv_options \
&& {0}/build -j{}",
source, num_threads
);
Command::new("sh")
.arg("-c")
.arg(&cmd)
.spawn()
.expect("mpv-build build failed")
.wait()
.expect("mpv-build build failed");
println!("cargo:rustc-link-search={}/mpv/build/", source);
}
| main | identifier_name |
build.rs | // Copyright (C) 2016 ParadoxSpiral
//
// This file is part of mpv-rs.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#[cfg(feature = "build_libmpv")]
use std::env;
#[cfg(all(feature = "build_libmpv", not(target_os = "windows")))]
use std::process::Command;
#[cfg(not(feature = "build_libmpv"))]
fn main() {}
#[cfg(all(feature = "build_libmpv", target_os = "windows"))]
fn main() {
let source = env::var("MPV_SOURCE").expect("env var `MPV_SOURCE` not set");
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "64" {
println!("cargo:rustc-link-search={}/64/", source);
} else {
println!("cargo:rustc-link-search={}/32/", source);
}
}
#[cfg(all(feature = "build_libmpv", not(target_os = "windows")))]
fn main() | // x86_64-unknown-linux-gnu, thus the script can't find the compiler.
// TODO: When Cross-compiling to different archs is implemented, this has to be handled.
env::remove_var("TARGET");
let cmd = format!(
"cd {} && echo \"--enable-libmpv-shared\" > {0}/mpv_options \
&& {0}/build -j{}",
source, num_threads
);
Command::new("sh")
.arg("-c")
.arg(&cmd)
.spawn()
.expect("mpv-build build failed")
.wait()
.expect("mpv-build build failed");
println!("cargo:rustc-link-search={}/mpv/build/", source);
}
| {
let source = env::var("MPV_SOURCE").expect("env var `MPV_SOURCE` not set");
let num_threads = env::var("NUM_JOBS").unwrap();
// `target` (in cfg) doesn't really mean target. It means target(host) of build script,
// which is a bit confusing because it means the actual `--target` everywhere else.
#[cfg(target_pointer_width = "64")]
{
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "32" {
panic!("Cross-compiling to different arch not yet supported");
}
}
#[cfg(target_pointer_width = "32")]
{
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "64" {
panic!("Cross-compiling to different arch not yet supported");
}
}
// The mpv build script interprets the TARGET env var, which is set by cargo to e.g. | identifier_body |
build.rs | // Copyright (C) 2016 ParadoxSpiral
//
// This file is part of mpv-rs.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#[cfg(feature = "build_libmpv")]
use std::env;
#[cfg(all(feature = "build_libmpv", not(target_os = "windows")))]
use std::process::Command;
#[cfg(not(feature = "build_libmpv"))]
fn main() {}
#[cfg(all(feature = "build_libmpv", target_os = "windows"))]
fn main() {
let source = env::var("MPV_SOURCE").expect("env var `MPV_SOURCE` not set");
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "64" {
println!("cargo:rustc-link-search={}/64/", source);
} else {
println!("cargo:rustc-link-search={}/32/", source);
}
}
#[cfg(all(feature = "build_libmpv", not(target_os = "windows")))]
fn main() {
let source = env::var("MPV_SOURCE").expect("env var `MPV_SOURCE` not set");
let num_threads = env::var("NUM_JOBS").unwrap();
// `target` (in cfg) doesn't really mean target. It means target(host) of build script,
// which is a bit confusing because it means the actual `--target` everywhere else.
#[cfg(target_pointer_width = "64")]
{
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "32" |
}
#[cfg(target_pointer_width = "32")]
{
if env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap() == "64" {
panic!("Cross-compiling to different arch not yet supported");
}
}
// The mpv build script interprets the TARGET env var, which is set by cargo to e.g.
// x86_64-unknown-linux-gnu, thus the script can't find the compiler.
// TODO: When Cross-compiling to different archs is implemented, this has to be handled.
env::remove_var("TARGET");
let cmd = format!(
"cd {} && echo \"--enable-libmpv-shared\" > {0}/mpv_options \
&& {0}/build -j{}",
source, num_threads
);
Command::new("sh")
.arg("-c")
.arg(&cmd)
.spawn()
.expect("mpv-build build failed")
.wait()
.expect("mpv-build build failed");
println!("cargo:rustc-link-search={}/mpv/build/", source);
}
| {
panic!("Cross-compiling to different arch not yet supported");
} | conditional_block |
launchtree_loader.py | #!/usr/bin/env python
import sys
from roslaunch.xmlloader import XmlLoader, loader
from rosgraph.names import get_ros_namespace
from rqt_launchtree.launchtree_context import LaunchtreeContext
class LaunchtreeLoader(XmlLoader):
def _include_tag(self, tag, context, ros_config, default_machine, is_core, verbose):
inc_filename = self.resolve_args(tag.attributes['file'].value, context)
ros_config.push_level(inc_filename, unique=True)
result = super(LaunchtreeLoader, self)._include_tag(tag, context, ros_config, default_machine, is_core, verbose)
ros_config.pop_level()
return result
def | (self, tag, context, ros_config, default_machine, is_test=False, verbose=True):
try:
if is_test:
self._check_attrs(tag, context, ros_config, XmlLoader.TEST_ATTRS)
(name,) = self.opt_attrs(tag, context, ('name',))
test_name, time_limit, retry = self._test_attrs(tag, context)
if not name:
name = test_name
else:
self._check_attrs(tag, context, ros_config, XmlLoader.NODE_ATTRS)
(name,) = self.reqd_attrs(tag, context, ('name',))
except Exception as e:
pass # will be handled in super
ros_config.push_level(name)
result = super(LaunchtreeLoader, self)._node_tag(tag, context, ros_config, default_machine, is_test, verbose)
ros_config.pop_level()
return result
def _rosparam_tag(self, tag, context, ros_config, verbose):
param_file = tag.attributes['file'].value \
if tag.attributes.has_key('file') else ''
if param_file != '':
param_filename = self.resolve_args(param_file, context)
level_name = ros_config.push_level(param_filename, unique=True)
result = super(LaunchtreeLoader, self)._rosparam_tag(tag, context, ros_config, verbose)
if param_file != '':
ros_config.pop_level()
context.add_rosparam(tag.attributes.get('command', 'load'), param_filename, level_name)
return result
def _load_launch(self, launch, ros_config, is_core=False, filename=None, argv=None, verbose=True):
if argv is None:
argv = sys.argv
self._launch_tag(launch, ros_config, filename)
self.root_context = LaunchtreeContext(get_ros_namespace(), filename, config=ros_config)
loader.load_sysargs_into_context(self.root_context, argv)
if len(launch.getElementsByTagName('master')) > 0:
print "WARNING: ignoring defunct <master /> tag"
self._recurse_load(ros_config, launch.childNodes, self.root_context, None, is_core, verbose)
| _node_tag | identifier_name |
launchtree_loader.py | #!/usr/bin/env python
import sys
from roslaunch.xmlloader import XmlLoader, loader
from rosgraph.names import get_ros_namespace
from rqt_launchtree.launchtree_context import LaunchtreeContext
class LaunchtreeLoader(XmlLoader):
def _include_tag(self, tag, context, ros_config, default_machine, is_core, verbose):
inc_filename = self.resolve_args(tag.attributes['file'].value, context)
ros_config.push_level(inc_filename, unique=True)
result = super(LaunchtreeLoader, self)._include_tag(tag, context, ros_config, default_machine, is_core, verbose)
ros_config.pop_level()
return result
def _node_tag(self, tag, context, ros_config, default_machine, is_test=False, verbose=True):
try:
if is_test:
self._check_attrs(tag, context, ros_config, XmlLoader.TEST_ATTRS)
(name,) = self.opt_attrs(tag, context, ('name',))
test_name, time_limit, retry = self._test_attrs(tag, context)
if not name:
name = test_name
else:
self._check_attrs(tag, context, ros_config, XmlLoader.NODE_ATTRS)
(name,) = self.reqd_attrs(tag, context, ('name',))
except Exception as e:
pass # will be handled in super
ros_config.push_level(name)
result = super(LaunchtreeLoader, self)._node_tag(tag, context, ros_config, default_machine, is_test, verbose)
ros_config.pop_level()
return result
def _rosparam_tag(self, tag, context, ros_config, verbose):
param_file = tag.attributes['file'].value \
if tag.attributes.has_key('file') else ''
if param_file != '':
param_filename = self.resolve_args(param_file, context)
level_name = ros_config.push_level(param_filename, unique=True)
result = super(LaunchtreeLoader, self)._rosparam_tag(tag, context, ros_config, verbose)
if param_file != '':
ros_config.pop_level()
context.add_rosparam(tag.attributes.get('command', 'load'), param_filename, level_name)
return result
def _load_launch(self, launch, ros_config, is_core=False, filename=None, argv=None, verbose=True):
| if argv is None:
argv = sys.argv
self._launch_tag(launch, ros_config, filename)
self.root_context = LaunchtreeContext(get_ros_namespace(), filename, config=ros_config)
loader.load_sysargs_into_context(self.root_context, argv)
if len(launch.getElementsByTagName('master')) > 0:
print "WARNING: ignoring defunct <master /> tag"
self._recurse_load(ros_config, launch.childNodes, self.root_context, None, is_core, verbose) | identifier_body |
|
launchtree_loader.py | #!/usr/bin/env python
import sys
from roslaunch.xmlloader import XmlLoader, loader
from rosgraph.names import get_ros_namespace
from rqt_launchtree.launchtree_context import LaunchtreeContext
class LaunchtreeLoader(XmlLoader):
def _include_tag(self, tag, context, ros_config, default_machine, is_core, verbose):
inc_filename = self.resolve_args(tag.attributes['file'].value, context)
ros_config.push_level(inc_filename, unique=True)
result = super(LaunchtreeLoader, self)._include_tag(tag, context, ros_config, default_machine, is_core, verbose)
ros_config.pop_level()
return result
def _node_tag(self, tag, context, ros_config, default_machine, is_test=False, verbose=True):
try:
if is_test:
self._check_attrs(tag, context, ros_config, XmlLoader.TEST_ATTRS)
(name,) = self.opt_attrs(tag, context, ('name',))
test_name, time_limit, retry = self._test_attrs(tag, context)
if not name:
name = test_name
else:
self._check_attrs(tag, context, ros_config, XmlLoader.NODE_ATTRS) | except Exception as e:
pass # will be handled in super
ros_config.push_level(name)
result = super(LaunchtreeLoader, self)._node_tag(tag, context, ros_config, default_machine, is_test, verbose)
ros_config.pop_level()
return result
def _rosparam_tag(self, tag, context, ros_config, verbose):
param_file = tag.attributes['file'].value \
if tag.attributes.has_key('file') else ''
if param_file != '':
param_filename = self.resolve_args(param_file, context)
level_name = ros_config.push_level(param_filename, unique=True)
result = super(LaunchtreeLoader, self)._rosparam_tag(tag, context, ros_config, verbose)
if param_file != '':
ros_config.pop_level()
context.add_rosparam(tag.attributes.get('command', 'load'), param_filename, level_name)
return result
def _load_launch(self, launch, ros_config, is_core=False, filename=None, argv=None, verbose=True):
if argv is None:
argv = sys.argv
self._launch_tag(launch, ros_config, filename)
self.root_context = LaunchtreeContext(get_ros_namespace(), filename, config=ros_config)
loader.load_sysargs_into_context(self.root_context, argv)
if len(launch.getElementsByTagName('master')) > 0:
print "WARNING: ignoring defunct <master /> tag"
self._recurse_load(ros_config, launch.childNodes, self.root_context, None, is_core, verbose) | (name,) = self.reqd_attrs(tag, context, ('name',)) | random_line_split |
launchtree_loader.py | #!/usr/bin/env python
import sys
from roslaunch.xmlloader import XmlLoader, loader
from rosgraph.names import get_ros_namespace
from rqt_launchtree.launchtree_context import LaunchtreeContext
class LaunchtreeLoader(XmlLoader):
def _include_tag(self, tag, context, ros_config, default_machine, is_core, verbose):
inc_filename = self.resolve_args(tag.attributes['file'].value, context)
ros_config.push_level(inc_filename, unique=True)
result = super(LaunchtreeLoader, self)._include_tag(tag, context, ros_config, default_machine, is_core, verbose)
ros_config.pop_level()
return result
def _node_tag(self, tag, context, ros_config, default_machine, is_test=False, verbose=True):
try:
if is_test:
self._check_attrs(tag, context, ros_config, XmlLoader.TEST_ATTRS)
(name,) = self.opt_attrs(tag, context, ('name',))
test_name, time_limit, retry = self._test_attrs(tag, context)
if not name:
name = test_name
else:
self._check_attrs(tag, context, ros_config, XmlLoader.NODE_ATTRS)
(name,) = self.reqd_attrs(tag, context, ('name',))
except Exception as e:
pass # will be handled in super
ros_config.push_level(name)
result = super(LaunchtreeLoader, self)._node_tag(tag, context, ros_config, default_machine, is_test, verbose)
ros_config.pop_level()
return result
def _rosparam_tag(self, tag, context, ros_config, verbose):
param_file = tag.attributes['file'].value \
if tag.attributes.has_key('file') else ''
if param_file != '':
|
result = super(LaunchtreeLoader, self)._rosparam_tag(tag, context, ros_config, verbose)
if param_file != '':
ros_config.pop_level()
context.add_rosparam(tag.attributes.get('command', 'load'), param_filename, level_name)
return result
def _load_launch(self, launch, ros_config, is_core=False, filename=None, argv=None, verbose=True):
if argv is None:
argv = sys.argv
self._launch_tag(launch, ros_config, filename)
self.root_context = LaunchtreeContext(get_ros_namespace(), filename, config=ros_config)
loader.load_sysargs_into_context(self.root_context, argv)
if len(launch.getElementsByTagName('master')) > 0:
print "WARNING: ignoring defunct <master /> tag"
self._recurse_load(ros_config, launch.childNodes, self.root_context, None, is_core, verbose)
| param_filename = self.resolve_args(param_file, context)
level_name = ros_config.push_level(param_filename, unique=True) | conditional_block |
exportWCON.py | import WormStats
from tierpsy.helper.params import read_unit_conversions, read_ventral_side, read_fps
def getWCONMetaData(fname, READ_FEATURES=False, provenance_step='FEAT_CREATE'):
def _order_metadata(metadata_dict):
ordered_fields = ['strain', 'timestamp', 'gene', 'chromosome', 'allele',
'strain_description', 'sex', 'stage', 'ventral_side', 'media', 'arena', 'food',
'habituation', 'who', 'protocol', 'lab', 'software']
extra_fields = metadata_dict.keys() - set(ordered_fields)
ordered_fields += sorted(extra_fields)
ordered_metadata = OrderedDict()
for field in ordered_fields:
if field in metadata_dict:
ordered_metadata[field] = metadata_dict[field]
return ordered_metadata
with tables.File(fname, 'r') as fid:
if not '/experiment_info' in fid:
experiment_info = {}
else:
experiment_info = fid.get_node('/experiment_info').read()
experiment_info = json.loads(experiment_info.decode('utf-8'))
provenance_tracking = fid.get_node('/provenance_tracking/' + provenance_step).read()
provenance_tracking = json.loads(provenance_tracking.decode('utf-8'))
commit_hash = provenance_tracking['commit_hash']
if 'tierpsy' in commit_hash:
tierpsy_version = commit_hash['tierpsy']
else:
tierpsy_version = commit_hash['MWTracker']
MWTracker_ver = {"name":"tierpsy (https://github.com/ver228/tierpsy-tracker)",
"version": tierpsy_version,
"featureID":"@OMG"}
if not READ_FEATURES:
experiment_info["software"] = MWTracker_ver
else:
#add open_worm_analysis_toolbox info and save as a list of "softwares"
open_worm_ver = {"name":"open_worm_analysis_toolbox (https://github.com/openworm/open-worm-analysis-toolbox)",
"version":commit_hash['open_worm_analysis_toolbox'],
"featureID":""}
experiment_info["software"] = [MWTracker_ver, open_worm_ver]
return _order_metadata(experiment_info)
def __reformatForJson(A):
if isinstance(A, (int, float)):
return A
good = ~np.isnan(A) & (A != 0)
dd = A[good]
if dd.size > 0:
dd = np.abs(np.floor(np.log10(np.abs(dd)))-2)
precision = max(2, int(np.min(dd)))
A = np.round(A.astype(np.float64), precision)
A = np.where(np.isnan(A), None, A)
#wcon specification require to return a single number if it is only one element list
if A.size == 1:
return A[0]
else:
return A.tolist()
def __addOMGFeat(fid, worm_feat_time, worm_id):
worm_features = OrderedDict()
#add time series features
for col_name, col_dat in worm_feat_time.iteritems():
if not col_name in ['worm_index', 'timestamp']:
worm_features[col_name] = col_dat.values
worm_path = '/features_events/worm_%i' % worm_id
worm_node = fid.get_node(worm_path)
#add event features
for feature_name in worm_node._v_children:
feature_path = worm_path + '/' + feature_name
worm_features[feature_name] = fid.get_node(feature_path)[:]
return worm_features
def _get_ventral_side(features_file):
ventral_side = read_ventral_side(features_file)
if not ventral_side or ventral_side == 'unknown':
ventral_type = '?'
else:
#we will merge the ventral and dorsal contours so the ventral contour is clockwise
ventral_type='CW'
return ventral_type
def _getData(features_file, READ_FEATURES=False, IS_FOR_WCON=True):
if IS_FOR_WCON:
lab_prefix = '@OMG '
else:
lab_prefix = ''
with pd.HDFStore(features_file, 'r') as fid:
if not '/features_timeseries' in fid:
return {} #empty file nothing to do here
features_timeseries = fid['/features_timeseries']
feat_time_group_by_worm = features_timeseries.groupby('worm_index');
ventral_side = _get_ventral_side(features_file)
with tables.File(features_file, 'r') as fid:
#fps used to adjust timestamp to real time
fps = read_fps(features_file)
#get pointers to some useful data
skeletons = fid.get_node('/coordinates/skeletons')
dorsal_contours = fid.get_node('/coordinates/dorsal_contours')
ventral_contours = fid.get_node('/coordinates/ventral_contours')
#let's append the data of each individual worm as a element in a list
all_worms_feats = []
#group by iterator will return sorted worm indexes
for worm_id, worm_feat_time in feat_time_group_by_worm:
worm_id = int(worm_id)
#read worm skeletons data
worm_skel = skeletons[worm_feat_time.index] | #start ordered dictionary with the basic features
worm_basic = OrderedDict()
worm_basic['id'] = str(worm_id)
worm_basic['head'] = 'L'
worm_basic['ventral'] = ventral_side
worm_basic['ptail'] = worm_ven_cnt.shape[1]-1 #index starting with 0
worm_basic['t'] = worm_feat_time['timestamp'].values/fps #convert from frames to seconds
worm_basic['x'] = worm_skel[:, :, 0]
worm_basic['y'] = worm_skel[:, :, 1]
contour = np.hstack((worm_ven_cnt, worm_dor_cnt[:, ::-1, :]))
worm_basic['px'] = contour[:, :, 0]
worm_basic['py'] = contour[:, :, 1]
if READ_FEATURES:
worm_features = __addOMGFeat(fid, worm_feat_time, worm_id)
for feat in worm_features:
worm_basic[lab_prefix + feat] = worm_features[feat]
if IS_FOR_WCON:
for x in worm_basic:
if not x in ['id', 'head', 'ventral', 'ptail']:
worm_basic[x] = __reformatForJson(worm_basic[x])
#append features
all_worms_feats.append(worm_basic)
return all_worms_feats
def _getUnits(features_file, READ_FEATURES=False):
fps_out, microns_per_pixel_out, _ = read_unit_conversions(features_file)
xy_units = microns_per_pixel_out[1]
time_units = fps_out[2]
units = OrderedDict()
units["size"] = "mm" #size of the plate
units['t'] = time_units #frames or seconds
for field in ['x', 'y', 'px', 'py']:
units[field] = xy_units #(pixels or micrometers)
if READ_FEATURES:
#TODO how to change microns to pixels when required
ws = WormStats()
for field, unit in ws.features_info['units'].iteritems():
units['@OMG ' + field] = unit
return units
def exportWCONdict(features_file, READ_FEATURES=False):
metadata = getWCONMetaData(features_file, READ_FEATURES)
data = _getData(features_file, READ_FEATURES)
units = _getUnits(features_file, READ_FEATURES)
#units = {x:units[x].replace('degrees', '1') for x in units}
#units = {x:units[x].replace('radians', '1') for x in units}
wcon_dict = OrderedDict()
wcon_dict['metadata'] = metadata
wcon_dict['units'] = units
wcon_dict['data'] = data
return wcon_dict
def getWCOName(features_file):
return features_file.replace('_features.hdf5', '.wcon.zip')
def exportWCON(features_file, READ_FEATURES=False):
base_name = os.path.basename(features_file).replace('_features.hdf5', '')
print_flush("{} Exporting data to WCON...".format(base_name))
wcon_dict = exportWCONdict(features_file, READ_FEATURES)
wcon_file = getWCOName(features_file)
#with gzip.open(wcon_file, 'wt') as fid:
# json.dump(wcon_dict, fid, allow_nan=False)
with zipfile.ZipFile(wcon_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zf:
zip_name = os.path.basename(wcon_file).replace('.zip', '')
wcon_txt = json.dumps(wcon_dict, allow_nan=False, separators=(',', ':'))
zf.writestr(zip_name, wcon_txt)
print_flush("{} Finised to export to WCON.".format(base_name))
if __name__ == '__main__':
features_file = '/Users/ajaver/OneDrive - Imperial College London/Local_Videos/single | worm_dor_cnt = dorsal_contours[worm_feat_time.index]
worm_ven_cnt = ventral_contours[worm_feat_time.index]
| random_line_split |
exportWCON.py | import WormStats
from tierpsy.helper.params import read_unit_conversions, read_ventral_side, read_fps
def getWCONMetaData(fname, READ_FEATURES=False, provenance_step='FEAT_CREATE'):
def _order_metadata(metadata_dict):
ordered_fields = ['strain', 'timestamp', 'gene', 'chromosome', 'allele',
'strain_description', 'sex', 'stage', 'ventral_side', 'media', 'arena', 'food',
'habituation', 'who', 'protocol', 'lab', 'software']
extra_fields = metadata_dict.keys() - set(ordered_fields)
ordered_fields += sorted(extra_fields)
ordered_metadata = OrderedDict()
for field in ordered_fields:
if field in metadata_dict:
ordered_metadata[field] = metadata_dict[field]
return ordered_metadata
with tables.File(fname, 'r') as fid:
if not '/experiment_info' in fid:
experiment_info = {}
else:
experiment_info = fid.get_node('/experiment_info').read()
experiment_info = json.loads(experiment_info.decode('utf-8'))
provenance_tracking = fid.get_node('/provenance_tracking/' + provenance_step).read()
provenance_tracking = json.loads(provenance_tracking.decode('utf-8'))
commit_hash = provenance_tracking['commit_hash']
if 'tierpsy' in commit_hash:
tierpsy_version = commit_hash['tierpsy']
else:
tierpsy_version = commit_hash['MWTracker']
MWTracker_ver = {"name":"tierpsy (https://github.com/ver228/tierpsy-tracker)",
"version": tierpsy_version,
"featureID":"@OMG"}
if not READ_FEATURES:
experiment_info["software"] = MWTracker_ver
else:
#add open_worm_analysis_toolbox info and save as a list of "softwares"
open_worm_ver = {"name":"open_worm_analysis_toolbox (https://github.com/openworm/open-worm-analysis-toolbox)",
"version":commit_hash['open_worm_analysis_toolbox'],
"featureID":""}
experiment_info["software"] = [MWTracker_ver, open_worm_ver]
return _order_metadata(experiment_info)
def __reformatForJson(A):
if isinstance(A, (int, float)):
return A
good = ~np.isnan(A) & (A != 0)
dd = A[good]
if dd.size > 0:
dd = np.abs(np.floor(np.log10(np.abs(dd)))-2)
precision = max(2, int(np.min(dd)))
A = np.round(A.astype(np.float64), precision)
A = np.where(np.isnan(A), None, A)
#wcon specification require to return a single number if it is only one element list
if A.size == 1:
return A[0]
else:
return A.tolist()
def __addOMGFeat(fid, worm_feat_time, worm_id):
worm_features = OrderedDict()
#add time series features
for col_name, col_dat in worm_feat_time.iteritems():
if not col_name in ['worm_index', 'timestamp']:
worm_features[col_name] = col_dat.values
worm_path = '/features_events/worm_%i' % worm_id
worm_node = fid.get_node(worm_path)
#add event features
for feature_name in worm_node._v_children:
|
return worm_features
def _get_ventral_side(features_file):
ventral_side = read_ventral_side(features_file)
if not ventral_side or ventral_side == 'unknown':
ventral_type = '?'
else:
#we will merge the ventral and dorsal contours so the ventral contour is clockwise
ventral_type='CW'
return ventral_type
def _getData(features_file, READ_FEATURES=False, IS_FOR_WCON=True):
if IS_FOR_WCON:
lab_prefix = '@OMG '
else:
lab_prefix = ''
with pd.HDFStore(features_file, 'r') as fid:
if not '/features_timeseries' in fid:
return {} #empty file nothing to do here
features_timeseries = fid['/features_timeseries']
feat_time_group_by_worm = features_timeseries.groupby('worm_index');
ventral_side = _get_ventral_side(features_file)
with tables.File(features_file, 'r') as fid:
#fps used to adjust timestamp to real time
fps = read_fps(features_file)
#get pointers to some useful data
skeletons = fid.get_node('/coordinates/skeletons')
dorsal_contours = fid.get_node('/coordinates/dorsal_contours')
ventral_contours = fid.get_node('/coordinates/ventral_contours')
#let's append the data of each individual worm as a element in a list
all_worms_feats = []
#group by iterator will return sorted worm indexes
for worm_id, worm_feat_time in feat_time_group_by_worm:
worm_id = int(worm_id)
#read worm skeletons data
worm_skel = skeletons[worm_feat_time.index]
worm_dor_cnt = dorsal_contours[worm_feat_time.index]
worm_ven_cnt = ventral_contours[worm_feat_time.index]
#start ordered dictionary with the basic features
worm_basic = OrderedDict()
worm_basic['id'] = str(worm_id)
worm_basic['head'] = 'L'
worm_basic['ventral'] = ventral_side
worm_basic['ptail'] = worm_ven_cnt.shape[1]-1 #index starting with 0
worm_basic['t'] = worm_feat_time['timestamp'].values/fps #convert from frames to seconds
worm_basic['x'] = worm_skel[:, :, 0]
worm_basic['y'] = worm_skel[:, :, 1]
contour = np.hstack((worm_ven_cnt, worm_dor_cnt[:, ::-1, :]))
worm_basic['px'] = contour[:, :, 0]
worm_basic['py'] = contour[:, :, 1]
if READ_FEATURES:
worm_features = __addOMGFeat(fid, worm_feat_time, worm_id)
for feat in worm_features:
worm_basic[lab_prefix + feat] = worm_features[feat]
if IS_FOR_WCON:
for x in worm_basic:
if not x in ['id', 'head', 'ventral', 'ptail']:
worm_basic[x] = __reformatForJson(worm_basic[x])
#append features
all_worms_feats.append(worm_basic)
return all_worms_feats
def _getUnits(features_file, READ_FEATURES=False):
fps_out, microns_per_pixel_out, _ = read_unit_conversions(features_file)
xy_units = microns_per_pixel_out[1]
time_units = fps_out[2]
units = OrderedDict()
units["size"] = "mm" #size of the plate
units['t'] = time_units #frames or seconds
for field in ['x', 'y', 'px', 'py']:
units[field] = xy_units #(pixels or micrometers)
if READ_FEATURES:
#TODO how to change microns to pixels when required
ws = WormStats()
for field, unit in ws.features_info['units'].iteritems():
units['@OMG ' + field] = unit
return units
def exportWCONdict(features_file, READ_FEATURES=False):
metadata = getWCONMetaData(features_file, READ_FEATURES)
data = _getData(features_file, READ_FEATURES)
units = _getUnits(features_file, READ_FEATURES)
#units = {x:units[x].replace('degrees', '1') for x in units}
#units = {x:units[x].replace('radians', '1') for x in units}
wcon_dict = OrderedDict()
wcon_dict['metadata'] = metadata
wcon_dict['units'] = units
wcon_dict['data'] = data
return wcon_dict
def getWCOName(features_file):
return features_file.replace('_features.hdf5', '.wcon.zip')
def exportWCON(features_file, READ_FEATURES=False):
base_name = os.path.basename(features_file).replace('_features.hdf5', '')
print_flush("{} Exporting data to WCON...".format(base_name))
wcon_dict = exportWCONdict(features_file, READ_FEATURES)
wcon_file = getWCOName(features_file)
#with gzip.open(wcon_file, 'wt') as fid:
# json.dump(wcon_dict, fid, allow_nan=False)
with zipfile.ZipFile(wcon_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zf:
zip_name = os.path.basename(wcon_file).replace('.zip', '')
wcon_txt = json.dumps(wcon_dict, allow_nan=False, separators=(',', ':'))
zf.writestr(zip_name, wcon_txt)
print_flush("{} Finised to export to WCON.".format(base_name))
if __name__ == '__main__':
features_file = '/Users/ajaver/OneDrive - Imperial College London/Local_Videos/single_w | feature_path = worm_path + '/' + feature_name
worm_features[feature_name] = fid.get_node(feature_path)[:] | conditional_block |
exportWCON.py | import WormStats
from tierpsy.helper.params import read_unit_conversions, read_ventral_side, read_fps
def getWCONMetaData(fname, READ_FEATURES=False, provenance_step='FEAT_CREATE'):
def _order_metadata(metadata_dict):
ordered_fields = ['strain', 'timestamp', 'gene', 'chromosome', 'allele',
'strain_description', 'sex', 'stage', 'ventral_side', 'media', 'arena', 'food',
'habituation', 'who', 'protocol', 'lab', 'software']
extra_fields = metadata_dict.keys() - set(ordered_fields)
ordered_fields += sorted(extra_fields)
ordered_metadata = OrderedDict()
for field in ordered_fields:
if field in metadata_dict:
ordered_metadata[field] = metadata_dict[field]
return ordered_metadata
with tables.File(fname, 'r') as fid:
if not '/experiment_info' in fid:
experiment_info = {}
else:
experiment_info = fid.get_node('/experiment_info').read()
experiment_info = json.loads(experiment_info.decode('utf-8'))
provenance_tracking = fid.get_node('/provenance_tracking/' + provenance_step).read()
provenance_tracking = json.loads(provenance_tracking.decode('utf-8'))
commit_hash = provenance_tracking['commit_hash']
if 'tierpsy' in commit_hash:
tierpsy_version = commit_hash['tierpsy']
else:
tierpsy_version = commit_hash['MWTracker']
MWTracker_ver = {"name":"tierpsy (https://github.com/ver228/tierpsy-tracker)",
"version": tierpsy_version,
"featureID":"@OMG"}
if not READ_FEATURES:
experiment_info["software"] = MWTracker_ver
else:
#add open_worm_analysis_toolbox info and save as a list of "softwares"
open_worm_ver = {"name":"open_worm_analysis_toolbox (https://github.com/openworm/open-worm-analysis-toolbox)",
"version":commit_hash['open_worm_analysis_toolbox'],
"featureID":""}
experiment_info["software"] = [MWTracker_ver, open_worm_ver]
return _order_metadata(experiment_info)
def __reformatForJson(A):
if isinstance(A, (int, float)):
return A
good = ~np.isnan(A) & (A != 0)
dd = A[good]
if dd.size > 0:
dd = np.abs(np.floor(np.log10(np.abs(dd)))-2)
precision = max(2, int(np.min(dd)))
A = np.round(A.astype(np.float64), precision)
A = np.where(np.isnan(A), None, A)
#wcon specification require to return a single number if it is only one element list
if A.size == 1:
return A[0]
else:
return A.tolist()
def __addOMGFeat(fid, worm_feat_time, worm_id):
|
def _get_ventral_side(features_file):
ventral_side = read_ventral_side(features_file)
if not ventral_side or ventral_side == 'unknown':
ventral_type = '?'
else:
#we will merge the ventral and dorsal contours so the ventral contour is clockwise
ventral_type='CW'
return ventral_type
def _getData(features_file, READ_FEATURES=False, IS_FOR_WCON=True):
if IS_FOR_WCON:
lab_prefix = '@OMG '
else:
lab_prefix = ''
with pd.HDFStore(features_file, 'r') as fid:
if not '/features_timeseries' in fid:
return {} #empty file nothing to do here
features_timeseries = fid['/features_timeseries']
feat_time_group_by_worm = features_timeseries.groupby('worm_index');
ventral_side = _get_ventral_side(features_file)
with tables.File(features_file, 'r') as fid:
#fps used to adjust timestamp to real time
fps = read_fps(features_file)
#get pointers to some useful data
skeletons = fid.get_node('/coordinates/skeletons')
dorsal_contours = fid.get_node('/coordinates/dorsal_contours')
ventral_contours = fid.get_node('/coordinates/ventral_contours')
#let's append the data of each individual worm as a element in a list
all_worms_feats = []
#group by iterator will return sorted worm indexes
for worm_id, worm_feat_time in feat_time_group_by_worm:
worm_id = int(worm_id)
#read worm skeletons data
worm_skel = skeletons[worm_feat_time.index]
worm_dor_cnt = dorsal_contours[worm_feat_time.index]
worm_ven_cnt = ventral_contours[worm_feat_time.index]
#start ordered dictionary with the basic features
worm_basic = OrderedDict()
worm_basic['id'] = str(worm_id)
worm_basic['head'] = 'L'
worm_basic['ventral'] = ventral_side
worm_basic['ptail'] = worm_ven_cnt.shape[1]-1 #index starting with 0
worm_basic['t'] = worm_feat_time['timestamp'].values/fps #convert from frames to seconds
worm_basic['x'] = worm_skel[:, :, 0]
worm_basic['y'] = worm_skel[:, :, 1]
contour = np.hstack((worm_ven_cnt, worm_dor_cnt[:, ::-1, :]))
worm_basic['px'] = contour[:, :, 0]
worm_basic['py'] = contour[:, :, 1]
if READ_FEATURES:
worm_features = __addOMGFeat(fid, worm_feat_time, worm_id)
for feat in worm_features:
worm_basic[lab_prefix + feat] = worm_features[feat]
if IS_FOR_WCON:
for x in worm_basic:
if not x in ['id', 'head', 'ventral', 'ptail']:
worm_basic[x] = __reformatForJson(worm_basic[x])
#append features
all_worms_feats.append(worm_basic)
return all_worms_feats
def _getUnits(features_file, READ_FEATURES=False):
fps_out, microns_per_pixel_out, _ = read_unit_conversions(features_file)
xy_units = microns_per_pixel_out[1]
time_units = fps_out[2]
units = OrderedDict()
units["size"] = "mm" #size of the plate
units['t'] = time_units #frames or seconds
for field in ['x', 'y', 'px', 'py']:
units[field] = xy_units #(pixels or micrometers)
if READ_FEATURES:
#TODO how to change microns to pixels when required
ws = WormStats()
for field, unit in ws.features_info['units'].iteritems():
units['@OMG ' + field] = unit
return units
def exportWCONdict(features_file, READ_FEATURES=False):
metadata = getWCONMetaData(features_file, READ_FEATURES)
data = _getData(features_file, READ_FEATURES)
units = _getUnits(features_file, READ_FEATURES)
#units = {x:units[x].replace('degrees', '1') for x in units}
#units = {x:units[x].replace('radians', '1') for x in units}
wcon_dict = OrderedDict()
wcon_dict['metadata'] = metadata
wcon_dict['units'] = units
wcon_dict['data'] = data
return wcon_dict
def getWCOName(features_file):
return features_file.replace('_features.hdf5', '.wcon.zip')
def exportWCON(features_file, READ_FEATURES=False):
base_name = os.path.basename(features_file).replace('_features.hdf5', '')
print_flush("{} Exporting data to WCON...".format(base_name))
wcon_dict = exportWCONdict(features_file, READ_FEATURES)
wcon_file = getWCOName(features_file)
#with gzip.open(wcon_file, 'wt') as fid:
# json.dump(wcon_dict, fid, allow_nan=False)
with zipfile.ZipFile(wcon_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zf:
zip_name = os.path.basename(wcon_file).replace('.zip', '')
wcon_txt = json.dumps(wcon_dict, allow_nan=False, separators=(',', ':'))
zf.writestr(zip_name, wcon_txt)
print_flush("{} Finised to export to WCON.".format(base_name))
if __name__ == '__main__':
features_file = '/Users/ajaver/OneDrive - Imperial College London/Local_Videos/single_w | worm_features = OrderedDict()
#add time series features
for col_name, col_dat in worm_feat_time.iteritems():
if not col_name in ['worm_index', 'timestamp']:
worm_features[col_name] = col_dat.values
worm_path = '/features_events/worm_%i' % worm_id
worm_node = fid.get_node(worm_path)
#add event features
for feature_name in worm_node._v_children:
feature_path = worm_path + '/' + feature_name
worm_features[feature_name] = fid.get_node(feature_path)[:]
return worm_features | identifier_body |
exportWCON.py | import WormStats
from tierpsy.helper.params import read_unit_conversions, read_ventral_side, read_fps
def getWCONMetaData(fname, READ_FEATURES=False, provenance_step='FEAT_CREATE'):
def _order_metadata(metadata_dict):
ordered_fields = ['strain', 'timestamp', 'gene', 'chromosome', 'allele',
'strain_description', 'sex', 'stage', 'ventral_side', 'media', 'arena', 'food',
'habituation', 'who', 'protocol', 'lab', 'software']
extra_fields = metadata_dict.keys() - set(ordered_fields)
ordered_fields += sorted(extra_fields)
ordered_metadata = OrderedDict()
for field in ordered_fields:
if field in metadata_dict:
ordered_metadata[field] = metadata_dict[field]
return ordered_metadata
with tables.File(fname, 'r') as fid:
if not '/experiment_info' in fid:
experiment_info = {}
else:
experiment_info = fid.get_node('/experiment_info').read()
experiment_info = json.loads(experiment_info.decode('utf-8'))
provenance_tracking = fid.get_node('/provenance_tracking/' + provenance_step).read()
provenance_tracking = json.loads(provenance_tracking.decode('utf-8'))
commit_hash = provenance_tracking['commit_hash']
if 'tierpsy' in commit_hash:
tierpsy_version = commit_hash['tierpsy']
else:
tierpsy_version = commit_hash['MWTracker']
MWTracker_ver = {"name":"tierpsy (https://github.com/ver228/tierpsy-tracker)",
"version": tierpsy_version,
"featureID":"@OMG"}
if not READ_FEATURES:
experiment_info["software"] = MWTracker_ver
else:
#add open_worm_analysis_toolbox info and save as a list of "softwares"
open_worm_ver = {"name":"open_worm_analysis_toolbox (https://github.com/openworm/open-worm-analysis-toolbox)",
"version":commit_hash['open_worm_analysis_toolbox'],
"featureID":""}
experiment_info["software"] = [MWTracker_ver, open_worm_ver]
return _order_metadata(experiment_info)
def __reformatForJson(A):
if isinstance(A, (int, float)):
return A
good = ~np.isnan(A) & (A != 0)
dd = A[good]
if dd.size > 0:
dd = np.abs(np.floor(np.log10(np.abs(dd)))-2)
precision = max(2, int(np.min(dd)))
A = np.round(A.astype(np.float64), precision)
A = np.where(np.isnan(A), None, A)
#wcon specification require to return a single number if it is only one element list
if A.size == 1:
return A[0]
else:
return A.tolist()
def __addOMGFeat(fid, worm_feat_time, worm_id):
worm_features = OrderedDict()
#add time series features
for col_name, col_dat in worm_feat_time.iteritems():
if not col_name in ['worm_index', 'timestamp']:
worm_features[col_name] = col_dat.values
worm_path = '/features_events/worm_%i' % worm_id
worm_node = fid.get_node(worm_path)
#add event features
for feature_name in worm_node._v_children:
feature_path = worm_path + '/' + feature_name
worm_features[feature_name] = fid.get_node(feature_path)[:]
return worm_features
def _get_ventral_side(features_file):
ventral_side = read_ventral_side(features_file)
if not ventral_side or ventral_side == 'unknown':
ventral_type = '?'
else:
#we will merge the ventral and dorsal contours so the ventral contour is clockwise
ventral_type='CW'
return ventral_type
def | (features_file, READ_FEATURES=False, IS_FOR_WCON=True):
if IS_FOR_WCON:
lab_prefix = '@OMG '
else:
lab_prefix = ''
with pd.HDFStore(features_file, 'r') as fid:
if not '/features_timeseries' in fid:
return {} #empty file nothing to do here
features_timeseries = fid['/features_timeseries']
feat_time_group_by_worm = features_timeseries.groupby('worm_index');
ventral_side = _get_ventral_side(features_file)
with tables.File(features_file, 'r') as fid:
#fps used to adjust timestamp to real time
fps = read_fps(features_file)
#get pointers to some useful data
skeletons = fid.get_node('/coordinates/skeletons')
dorsal_contours = fid.get_node('/coordinates/dorsal_contours')
ventral_contours = fid.get_node('/coordinates/ventral_contours')
#let's append the data of each individual worm as a element in a list
all_worms_feats = []
#group by iterator will return sorted worm indexes
for worm_id, worm_feat_time in feat_time_group_by_worm:
worm_id = int(worm_id)
#read worm skeletons data
worm_skel = skeletons[worm_feat_time.index]
worm_dor_cnt = dorsal_contours[worm_feat_time.index]
worm_ven_cnt = ventral_contours[worm_feat_time.index]
#start ordered dictionary with the basic features
worm_basic = OrderedDict()
worm_basic['id'] = str(worm_id)
worm_basic['head'] = 'L'
worm_basic['ventral'] = ventral_side
worm_basic['ptail'] = worm_ven_cnt.shape[1]-1 #index starting with 0
worm_basic['t'] = worm_feat_time['timestamp'].values/fps #convert from frames to seconds
worm_basic['x'] = worm_skel[:, :, 0]
worm_basic['y'] = worm_skel[:, :, 1]
contour = np.hstack((worm_ven_cnt, worm_dor_cnt[:, ::-1, :]))
worm_basic['px'] = contour[:, :, 0]
worm_basic['py'] = contour[:, :, 1]
if READ_FEATURES:
worm_features = __addOMGFeat(fid, worm_feat_time, worm_id)
for feat in worm_features:
worm_basic[lab_prefix + feat] = worm_features[feat]
if IS_FOR_WCON:
for x in worm_basic:
if not x in ['id', 'head', 'ventral', 'ptail']:
worm_basic[x] = __reformatForJson(worm_basic[x])
#append features
all_worms_feats.append(worm_basic)
return all_worms_feats
def _getUnits(features_file, READ_FEATURES=False):
fps_out, microns_per_pixel_out, _ = read_unit_conversions(features_file)
xy_units = microns_per_pixel_out[1]
time_units = fps_out[2]
units = OrderedDict()
units["size"] = "mm" #size of the plate
units['t'] = time_units #frames or seconds
for field in ['x', 'y', 'px', 'py']:
units[field] = xy_units #(pixels or micrometers)
if READ_FEATURES:
#TODO how to change microns to pixels when required
ws = WormStats()
for field, unit in ws.features_info['units'].iteritems():
units['@OMG ' + field] = unit
return units
def exportWCONdict(features_file, READ_FEATURES=False):
metadata = getWCONMetaData(features_file, READ_FEATURES)
data = _getData(features_file, READ_FEATURES)
units = _getUnits(features_file, READ_FEATURES)
#units = {x:units[x].replace('degrees', '1') for x in units}
#units = {x:units[x].replace('radians', '1') for x in units}
wcon_dict = OrderedDict()
wcon_dict['metadata'] = metadata
wcon_dict['units'] = units
wcon_dict['data'] = data
return wcon_dict
def getWCOName(features_file):
return features_file.replace('_features.hdf5', '.wcon.zip')
def exportWCON(features_file, READ_FEATURES=False):
base_name = os.path.basename(features_file).replace('_features.hdf5', '')
print_flush("{} Exporting data to WCON...".format(base_name))
wcon_dict = exportWCONdict(features_file, READ_FEATURES)
wcon_file = getWCOName(features_file)
#with gzip.open(wcon_file, 'wt') as fid:
# json.dump(wcon_dict, fid, allow_nan=False)
with zipfile.ZipFile(wcon_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zf:
zip_name = os.path.basename(wcon_file).replace('.zip', '')
wcon_txt = json.dumps(wcon_dict, allow_nan=False, separators=(',', ':'))
zf.writestr(zip_name, wcon_txt)
print_flush("{} Finised to export to WCON.".format(base_name))
if __name__ == '__main__':
features_file = '/Users/ajaver/OneDrive - Imperial College London/Local_Videos/single | _getData | identifier_name |
seed.config.ts | JavaScript files.
* @type {string}
*/
JS_DEST = `${this.APP_DEST}/js`;
/**
* The version of the application as defined in the `package.json`.
*/
VERSION = appVersion();
/**
* The name of the bundle file to includes all CSS files.
* @type {string}
*/
CSS_PROD_BUNDLE = 'all.css';
/**
* The name of the bundle file to include all JavaScript shims.
* @type {string}
*/
JS_PROD_SHIMS_BUNDLE = 'shims.js';
/**
* The name of the bundle file to include all JavaScript application files.
* @type {string}
*/
JS_PROD_APP_BUNDLE = 'app.js';
/**
* The required NPM version to run the application.
* @type {string}
*/
VERSION_NPM = '3.0.0';
/**
* The required NodeJS version to run the application.
* @type {string}
*/
VERSION_NODE = '5.0.0';
/**
* The ruleset to be used by `codelyzer` for linting the TypeScript files.
*/
CODELYZER_RULES = customRules();
/**
* The list of NPM dependcies to be injected in the `index.html`.
* @type {InjectableDependency[]}
*/
NPM_DEPENDENCIES: InjectableDependency[] = [
{ src: 'systemjs/dist/system-polyfills.src.js', inject: 'shims', env: ENVIRONMENTS.DEVELOPMENT },
{ src: 'zone.js/dist/zone.js', inject: 'libs' },
{ src: 'core-js/client/shim.min.js', inject: 'shims' },
{ src: 'systemjs/dist/system.src.js', inject: 'shims', env: ENVIRONMENTS.DEVELOPMENT },
{ src: 'rxjs/bundles/Rx.js', inject: 'libs', env: ENVIRONMENTS.DEVELOPMENT }
];
/**
* The list of local files to be injected in the `index.html`.
* @type {InjectableDependency[]}
*/
APP_ASSETS: InjectableDependency[] = [
{ src: `${this.CSS_SRC}/main.css`, inject: true, vendor: false }
];
/**
* The list of editor temporary files to ignore in watcher and asset builder.
* @type {string[]}
*/
TEMP_FILES: string[] = [
'**/*___jb_tmp___',
'**/*~',
];
/**
* Returns the array of injectable dependencies (npm dependencies and assets).
* @return {InjectableDependency[]} The array of npm dependencies and assets.
*/
get DEPENDENCIES(): InjectableDependency[] {
return normalizeDependencies(this.NPM_DEPENDENCIES.filter(filterDependency.bind(null, this.ENV)))
.concat(this.APP_ASSETS.filter(filterDependency.bind(null, this.ENV)));
}
/**
* The configuration of SystemJS for the `dev` environment.
* @type {any}
*/
protected SYSTEM_CONFIG_DEV: any = {
defaultJSExtensions: true,
packageConfigPaths: [
`${this.APP_BASE}node_modules/*/package.json`,
`${this.APP_BASE}node_modules/**/package.json`,
`${this.APP_BASE}node_modules/@angular/*/package.json`
],
paths: {
[this.BOOTSTRAP_MODULE]: `${this.APP_BASE}${this.BOOTSTRAP_MODULE}`,
'@angular/core': `${this.APP_BASE}node_modules/@angular/core/core.umd.js`,
'@angular/common': `${this.APP_BASE}node_modules/@angular/common/common.umd.js`,
'@angular/compiler': `${this.APP_BASE}node_modules/@angular/compiler/compiler.umd.js`,
'@angular/http': `${this.APP_BASE}node_modules/@angular/http/http.umd.js`,
'@angular/router': `${this.APP_BASE}node_modules/@angular/router/router.umd.js`,
'@angular/platform-browser': `${this.APP_BASE}node_modules/@angular/platform-browser/platform-browser.umd.js`,
'@angular/platform-browser-dynamic': `${this.APP_BASE}node_modules/@angular/platform-browser-dynamic/platform-browser-dynamic.umd.js`,
'rxjs/*': `${this.APP_BASE}node_modules/rxjs/*`,
'app/*': `/app/*`,
'*': `${this.APP_BASE}node_modules/*`
},
packages: {
rxjs: { defaultExtension: false }
}
};
/**
* The configuration of SystemJS of the application.
* Per default, the configuration of the `dev` environment will be used.
* @type {any}
*/
SYSTEM_CONFIG: any = this.SYSTEM_CONFIG_DEV;
/**
* The system builder configuration of the application.
* @type {any}
*/
SYSTEM_BUILDER_CONFIG: any = {
defaultJSExtensions: true,
packageConfigPaths: [
join(this.PROJECT_ROOT, 'node_modules', '*', 'package.json'),
join(this.PROJECT_ROOT, 'node_modules', '@angular', '*', 'package.json')
],
paths: {
[`${this.TMP_DIR}/*`]: `${this.TMP_DIR}/*`,
'*': 'node_modules/*'
},
packages: {
'@angular/core': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/compiler': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/common': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/http': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/platform-browser': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/platform-browser-dynamic': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/router': {
main: 'index.js',
defaultExtension: 'js'
},
'rxjs': {
defaultExtension: 'js'
}
}
};
/**
* The Autoprefixer configuration for the application.
* @type {Array}
*/
BROWSER_LIST = [
'ie >= 10',
'ie_mob >= 10',
'ff >= 30',
'chrome >= 34',
'safari >= 7',
'opera >= 23',
'ios >= 7',
'android >= 4.4',
'bb >= 10'
];
/**
* Configurations for NPM module configurations. Add to or override in project.config.ts.
* If you like, use the mergeObject() method to assist with this.
*/
PLUGIN_CONFIGS: any = {
/**
* The BrowserSync configuration of the application.
* The default open behavior is to open the browser. To prevent the browser from opening use the `--b` flag when
* running `npm start` (tested with serve.dev).
* Example: `npm start -- --b`
* @type {any}
*/
'browser-sync': {
middleware: [require('connect-history-api-fallback')({ index: `${this.APP_BASE}index.html` })],
port: this.PORT,
startPath: this.APP_BASE,
open: argv['b'] ? false : true,
injectChanges: false,
server: {
baseDir: `${this.DIST_DIR}/empty/`,
routes: {
[`${this.APP_BASE}${this.APP_DEST}`]: this.APP_DEST,
[`${this.APP_BASE}node_modules`]: 'node_modules',
[`${this.APP_BASE.replace(/\/$/, '')}`]: this.APP_DEST
}
}
}
};
/**
* Recursively merge source onto target.
* @param {any} target The target object (to receive values from source)
* @param {any} source The source object (to be merged onto target)
*/
mergeObject(target: any, source: any) {
const deepExtend = require('deep-extend');
deepExtend(target, source);
}
/**
* Locate a plugin configuration object by plugin key.
* @param {any} pluginKey The object key to look up in PLUGIN_CONFIGS.
*/
getPluginConfig(pluginKey: string): any {
if (this.PLUGIN_CONFIGS[ pluginKey ]) {
return this.PLUGIN_CONFIGS[pluginKey];
}
return null;
}
}
/**
* Normalizes the given `deps` to skip globs.
* @param {InjectableDependency[]} deps - The dependencies to be normalized.
*/
export function normalizeDependencies(deps: InjectableDependency[]) {
deps
.filter((d: InjectableDependency) => !/\*/.test(d.src)) // Skip globs
.forEach((d: InjectableDependency) => d.src = require.resolve(d.src));
return deps;
}
/**
* Returns if the given dependency is used in the given environment.
* @param {string} env - The environment to be filtered for.
* @param {InjectableDependency} d - The dependency to check.
* @return {boolean} `true` if the dependency is used in this environment, `false` otherwise.
*/
function filterDependency(env: string, d: InjectableDependency): boolean {
if (!d.env) {
d.env = Object.keys(ENVIRONMENTS).map(k => ENVIRONMENTS[k]);
}
if (!(d.env instanceof Array)) | {
(<any>d).env = [d.env];
} | conditional_block |
|
seed.config.ts | runtime.
* The default path is `/`, which can be overriden by the `--base` flag when running `npm start`.
* @type {string}
*/
APP_BASE = argv['base'] || '/';
/**
* The flag to include templates into JS app prod file.
* Per default the option is `true`, but can it can be set to false using `--inline-template false`
* flag when running `npm run build.prod`.
* @type {boolean}
*/
INLINE_TEMPLATES = argv['inline-template'] !== 'false';
/**
* The flag for the hot-loader option of the application.
* Per default the option is not set, but can be set by the `--hot-loader` flag when running `npm start`.
* @type {boolean}
*/
ENABLE_HOT_LOADING = argv['hot-loader'];
/**
* The port where the application will run, if the `hot-loader` option mode is used.
* The default hot-loader port is `5578`.
* @type {number}
*/
HOT_LOADER_PORT = 5578;
/**
* The flag for the targeting of desktop option of the application.
* Per default the option is false.
* @type {boolean}
*/
TARGET_DESKTOP = false;
/**
* The flag for the targeting of desktop build option of the application.
* Per default the option is false.
* @type {boolean}
*/
TARGET_DESKTOP_BUILD = false;
/**
* The directory where the bootstrap file is located.
* The default directory is `app`.
* @type {string}
*/
BOOTSTRAP_DIR = 'app';
/**
* The directory where the client files are located.
* The default directory is `client`.
* @type {string}
*/
APP_CLIENT = argv['client'] || 'client';
/**
* The bootstrap file to be used to boot the application. The file to be used is dependent if the hot-loader option is
* used or not.
* Per default (non hot-loader mode) the `main.ts` file will be used, with the hot-loader option enabled, the
* `hot_loader_main.ts` file will be used.
* @type {string}
*/
BOOTSTRAP_MODULE = `${this.BOOTSTRAP_DIR}/` + (this.ENABLE_HOT_LOADING ? 'hot_loader_main' : 'main');
/**
* The default title of the application as used in the `<title>` tag of the
* `index.html`.
* @type {string}
*/
APP_TITLE = 'Welcome to angular2-seed!';
/**
* The base folder of the applications source files.
* @type {string}
*/
APP_SRC = `src/${this.APP_CLIENT}`;
/**
* The folder of the applications asset files.
* @type {string}
*/
ASSETS_SRC = `${this.APP_SRC}/assets`;
/**
* The folder of the applications css files.
* @type {string}
*/
CSS_SRC = `${this.APP_SRC}/css`;
/**
* The directory of the applications tools
* @type {string}
*/
TOOLS_DIR = 'tools';
/**
* The directory of the tasks provided by the seed.
*/
SEED_TASKS_DIR = join(process.cwd(), this.TOOLS_DIR, 'tasks', 'seed');
/**
* The destination folder for the generated documentation.
* @type {string}
*/
DOCS_DEST = 'docs';
/**
* The base folder for built files.
* @type {string}
*/
DIST_DIR = 'dist';
/**
* The folder for built files in the `dev` environment.
* @type {string}
*/
DEV_DEST = `${this.DIST_DIR}/dev`;
/**
* The folder for the built files in the `prod` environment.
* @type {string}
*/
PROD_DEST = `${this.DIST_DIR}/prod`;
/**
* The folder for temporary files.
* @type {string}
*/
TMP_DIR = `${this.DIST_DIR}/tmp`;
/**
* The folder for the built files, corresponding to the current environment.
* @type {string}
*/
APP_DEST = this.ENV === ENVIRONMENTS.DEVELOPMENT ? this.DEV_DEST : this.PROD_DEST;
/**
* The folder for the built CSS files.
* @type {strings}
*/
CSS_DEST = `${this.APP_DEST}/css`;
/**
* The folder for the built JavaScript files.
* @type {string}
*/
JS_DEST = `${this.APP_DEST}/js`;
/**
* The version of the application as defined in the `package.json`.
*/
VERSION = appVersion();
/**
* The name of the bundle file to includes all CSS files.
* @type {string}
*/
CSS_PROD_BUNDLE = 'all.css';
/**
* The name of the bundle file to include all JavaScript shims.
* @type {string}
*/
JS_PROD_SHIMS_BUNDLE = 'shims.js';
/**
* The name of the bundle file to include all JavaScript application files.
* @type {string}
*/
JS_PROD_APP_BUNDLE = 'app.js';
/**
* The required NPM version to run the application.
* @type {string}
*/
VERSION_NPM = '3.0.0';
/**
* The required NodeJS version to run the application.
* @type {string}
*/
VERSION_NODE = '5.0.0';
/**
* The ruleset to be used by `codelyzer` for linting the TypeScript files.
*/
CODELYZER_RULES = customRules();
/**
* The list of NPM dependcies to be injected in the `index.html`.
* @type {InjectableDependency[]}
*/
NPM_DEPENDENCIES: InjectableDependency[] = [
{ src: 'systemjs/dist/system-polyfills.src.js', inject: 'shims', env: ENVIRONMENTS.DEVELOPMENT },
{ src: 'zone.js/dist/zone.js', inject: 'libs' },
{ src: 'core-js/client/shim.min.js', inject: 'shims' },
{ src: 'systemjs/dist/system.src.js', inject: 'shims', env: ENVIRONMENTS.DEVELOPMENT },
{ src: 'rxjs/bundles/Rx.js', inject: 'libs', env: ENVIRONMENTS.DEVELOPMENT }
];
/**
* The list of local files to be injected in the `index.html`.
* @type {InjectableDependency[]}
*/
APP_ASSETS: InjectableDependency[] = [
{ src: `${this.CSS_SRC}/main.css`, inject: true, vendor: false }
];
/**
* The list of editor temporary files to ignore in watcher and asset builder.
* @type {string[]}
*/
TEMP_FILES: string[] = [
'**/*___jb_tmp___',
'**/*~',
];
/**
* Returns the array of injectable dependencies (npm dependencies and assets).
* @return {InjectableDependency[]} The array of npm dependencies and assets.
*/
get | (): InjectableDependency[] {
return normalizeDependencies(this.NPM_DEPENDENCIES.filter(filterDependency.bind(null, this.ENV)))
.concat(this.APP_ASSETS.filter(filterDependency.bind(null, this.ENV)));
}
/**
* The configuration of SystemJS for the `dev` environment.
* @type {any}
*/
protected SYSTEM_CONFIG_DEV: any = {
defaultJSExtensions: true,
packageConfigPaths: [
`${this.APP_BASE}node_modules/*/package.json`,
`${this.APP_BASE}node_modules/**/package.json`,
`${this.APP_BASE}node_modules/@angular/*/package.json`
],
paths: {
[this.BOOTSTRAP_MODULE]: `${this.APP_BASE}${this.BOOTSTRAP_MODULE}`,
'@angular/core': `${this.APP_BASE}node_modules/@angular/core/core.umd.js`,
'@angular/common': `${this.APP_BASE}node_modules/@angular/common/common.umd.js`,
'@angular/compiler': `${this.APP_BASE}node_modules/@angular/compiler/compiler.umd.js`,
'@angular/http': `${this.APP_BASE}node_modules/@angular/http/http.umd.js`,
'@angular/router': `${this.APP_BASE}node_modules/@angular/router/router.umd.js`,
'@angular/platform-browser': `${this.APP_BASE}node_modules/@angular/platform-browser/platform-browser.umd.js`,
'@angular/platform-browser-dynamic': `${this.APP_BASE}node_modules/@angular/platform-browser-dynamic/platform-browser-dynamic.umd.js`,
'rxjs/*': `${this.APP_BASE}node_modules/rxjs/*`,
'app/*': `/app/*`,
'*': `${this.APP_BASE}node_modules/*`
},
packages: {
rxjs: { defaultExtension: false }
}
};
/**
* The configuration of SystemJS of the application.
* Per default, the configuration of the `dev` environment will be used.
* @type {any}
*/
SYSTEM_CONFIG: any = this.SYSTEM_CONFIG_DEV;
/**
* The system builder configuration of the application.
* @type {any}
*/
SYSTEM_BUILDER_CONFIG: any = {
defaultJSExtensions: true,
packageConfigPaths: [
join(this.PROJECT | DEPENDENCIES | identifier_name |
seed.config.ts | runtime.
* The default path is `/`, which can be overriden by the `--base` flag when running `npm start`.
* @type {string}
*/
APP_BASE = argv['base'] || '/';
/**
* The flag to include templates into JS app prod file.
* Per default the option is `true`, but can it can be set to false using `--inline-template false`
* flag when running `npm run build.prod`.
* @type {boolean}
*/
INLINE_TEMPLATES = argv['inline-template'] !== 'false';
/**
* The flag for the hot-loader option of the application.
* Per default the option is not set, but can be set by the `--hot-loader` flag when running `npm start`.
* @type {boolean}
*/
ENABLE_HOT_LOADING = argv['hot-loader'];
/**
* The port where the application will run, if the `hot-loader` option mode is used.
* The default hot-loader port is `5578`.
* @type {number}
*/
HOT_LOADER_PORT = 5578;
/**
* The flag for the targeting of desktop option of the application.
* Per default the option is false.
* @type {boolean}
*/
TARGET_DESKTOP = false;
/**
* The flag for the targeting of desktop build option of the application.
* Per default the option is false.
* @type {boolean} | */
TARGET_DESKTOP_BUILD = false;
/**
* The directory where the bootstrap file is located.
* The default directory is `app`.
* @type {string}
*/
BOOTSTRAP_DIR = 'app';
/**
* The directory where the client files are located.
* The default directory is `client`.
* @type {string}
*/
APP_CLIENT = argv['client'] || 'client';
/**
* The bootstrap file to be used to boot the application. The file to be used is dependent if the hot-loader option is
* used or not.
* Per default (non hot-loader mode) the `main.ts` file will be used, with the hot-loader option enabled, the
* `hot_loader_main.ts` file will be used.
* @type {string}
*/
BOOTSTRAP_MODULE = `${this.BOOTSTRAP_DIR}/` + (this.ENABLE_HOT_LOADING ? 'hot_loader_main' : 'main');
/**
* The default title of the application as used in the `<title>` tag of the
* `index.html`.
* @type {string}
*/
APP_TITLE = 'Welcome to angular2-seed!';
/**
* The base folder of the applications source files.
* @type {string}
*/
APP_SRC = `src/${this.APP_CLIENT}`;
/**
* The folder of the applications asset files.
* @type {string}
*/
ASSETS_SRC = `${this.APP_SRC}/assets`;
/**
* The folder of the applications css files.
* @type {string}
*/
CSS_SRC = `${this.APP_SRC}/css`;
/**
* The directory of the applications tools
* @type {string}
*/
TOOLS_DIR = 'tools';
/**
* The directory of the tasks provided by the seed.
*/
SEED_TASKS_DIR = join(process.cwd(), this.TOOLS_DIR, 'tasks', 'seed');
/**
* The destination folder for the generated documentation.
* @type {string}
*/
DOCS_DEST = 'docs';
/**
* The base folder for built files.
* @type {string}
*/
DIST_DIR = 'dist';
/**
* The folder for built files in the `dev` environment.
* @type {string}
*/
DEV_DEST = `${this.DIST_DIR}/dev`;
/**
* The folder for the built files in the `prod` environment.
* @type {string}
*/
PROD_DEST = `${this.DIST_DIR}/prod`;
/**
* The folder for temporary files.
* @type {string}
*/
TMP_DIR = `${this.DIST_DIR}/tmp`;
/**
* The folder for the built files, corresponding to the current environment.
* @type {string}
*/
APP_DEST = this.ENV === ENVIRONMENTS.DEVELOPMENT ? this.DEV_DEST : this.PROD_DEST;
/**
* The folder for the built CSS files.
* @type {strings}
*/
CSS_DEST = `${this.APP_DEST}/css`;
/**
* The folder for the built JavaScript files.
* @type {string}
*/
JS_DEST = `${this.APP_DEST}/js`;
/**
* The version of the application as defined in the `package.json`.
*/
VERSION = appVersion();
/**
* The name of the bundle file to includes all CSS files.
* @type {string}
*/
CSS_PROD_BUNDLE = 'all.css';
/**
* The name of the bundle file to include all JavaScript shims.
* @type {string}
*/
JS_PROD_SHIMS_BUNDLE = 'shims.js';
/**
* The name of the bundle file to include all JavaScript application files.
* @type {string}
*/
JS_PROD_APP_BUNDLE = 'app.js';
/**
* The required NPM version to run the application.
* @type {string}
*/
VERSION_NPM = '3.0.0';
/**
* The required NodeJS version to run the application.
* @type {string}
*/
VERSION_NODE = '5.0.0';
/**
* The ruleset to be used by `codelyzer` for linting the TypeScript files.
*/
CODELYZER_RULES = customRules();
/**
* The list of NPM dependcies to be injected in the `index.html`.
* @type {InjectableDependency[]}
*/
NPM_DEPENDENCIES: InjectableDependency[] = [
{ src: 'systemjs/dist/system-polyfills.src.js', inject: 'shims', env: ENVIRONMENTS.DEVELOPMENT },
{ src: 'zone.js/dist/zone.js', inject: 'libs' },
{ src: 'core-js/client/shim.min.js', inject: 'shims' },
{ src: 'systemjs/dist/system.src.js', inject: 'shims', env: ENVIRONMENTS.DEVELOPMENT },
{ src: 'rxjs/bundles/Rx.js', inject: 'libs', env: ENVIRONMENTS.DEVELOPMENT }
];
/**
* The list of local files to be injected in the `index.html`.
* @type {InjectableDependency[]}
*/
APP_ASSETS: InjectableDependency[] = [
{ src: `${this.CSS_SRC}/main.css`, inject: true, vendor: false }
];
/**
* The list of editor temporary files to ignore in watcher and asset builder.
* @type {string[]}
*/
TEMP_FILES: string[] = [
'**/*___jb_tmp___',
'**/*~',
];
/**
* Returns the array of injectable dependencies (npm dependencies and assets).
* @return {InjectableDependency[]} The array of npm dependencies and assets.
*/
get DEPENDENCIES(): InjectableDependency[] {
return normalizeDependencies(this.NPM_DEPENDENCIES.filter(filterDependency.bind(null, this.ENV)))
.concat(this.APP_ASSETS.filter(filterDependency.bind(null, this.ENV)));
}
/**
* The configuration of SystemJS for the `dev` environment.
* @type {any}
*/
protected SYSTEM_CONFIG_DEV: any = {
defaultJSExtensions: true,
packageConfigPaths: [
`${this.APP_BASE}node_modules/*/package.json`,
`${this.APP_BASE}node_modules/**/package.json`,
`${this.APP_BASE}node_modules/@angular/*/package.json`
],
paths: {
[this.BOOTSTRAP_MODULE]: `${this.APP_BASE}${this.BOOTSTRAP_MODULE}`,
'@angular/core': `${this.APP_BASE}node_modules/@angular/core/core.umd.js`,
'@angular/common': `${this.APP_BASE}node_modules/@angular/common/common.umd.js`,
'@angular/compiler': `${this.APP_BASE}node_modules/@angular/compiler/compiler.umd.js`,
'@angular/http': `${this.APP_BASE}node_modules/@angular/http/http.umd.js`,
'@angular/router': `${this.APP_BASE}node_modules/@angular/router/router.umd.js`,
'@angular/platform-browser': `${this.APP_BASE}node_modules/@angular/platform-browser/platform-browser.umd.js`,
'@angular/platform-browser-dynamic': `${this.APP_BASE}node_modules/@angular/platform-browser-dynamic/platform-browser-dynamic.umd.js`,
'rxjs/*': `${this.APP_BASE}node_modules/rxjs/*`,
'app/*': `/app/*`,
'*': `${this.APP_BASE}node_modules/*`
},
packages: {
rxjs: { defaultExtension: false }
}
};
/**
* The configuration of SystemJS of the application.
* Per default, the configuration of the `dev` environment will be used.
* @type {any}
*/
SYSTEM_CONFIG: any = this.SYSTEM_CONFIG_DEV;
/**
* The system builder configuration of the application.
* @type {any}
*/
SYSTEM_BUILDER_CONFIG: any = {
defaultJSExtensions: true,
packageConfigPaths: [
join(this.PROJECT | random_line_split |
|
seed.config.ts | 'shims.js';
/**
* The name of the bundle file to include all JavaScript application files.
* @type {string}
*/
JS_PROD_APP_BUNDLE = 'app.js';
/**
* The required NPM version to run the application.
* @type {string}
*/
VERSION_NPM = '3.0.0';
/**
* The required NodeJS version to run the application.
* @type {string}
*/
VERSION_NODE = '5.0.0';
/**
* The ruleset to be used by `codelyzer` for linting the TypeScript files.
*/
CODELYZER_RULES = customRules();
/**
* The list of NPM dependcies to be injected in the `index.html`.
* @type {InjectableDependency[]}
*/
NPM_DEPENDENCIES: InjectableDependency[] = [
{ src: 'systemjs/dist/system-polyfills.src.js', inject: 'shims', env: ENVIRONMENTS.DEVELOPMENT },
{ src: 'zone.js/dist/zone.js', inject: 'libs' },
{ src: 'core-js/client/shim.min.js', inject: 'shims' },
{ src: 'systemjs/dist/system.src.js', inject: 'shims', env: ENVIRONMENTS.DEVELOPMENT },
{ src: 'rxjs/bundles/Rx.js', inject: 'libs', env: ENVIRONMENTS.DEVELOPMENT }
];
/**
* The list of local files to be injected in the `index.html`.
* @type {InjectableDependency[]}
*/
APP_ASSETS: InjectableDependency[] = [
{ src: `${this.CSS_SRC}/main.css`, inject: true, vendor: false }
];
/**
* The list of editor temporary files to ignore in watcher and asset builder.
* @type {string[]}
*/
TEMP_FILES: string[] = [
'**/*___jb_tmp___',
'**/*~',
];
/**
* Returns the array of injectable dependencies (npm dependencies and assets).
* @return {InjectableDependency[]} The array of npm dependencies and assets.
*/
get DEPENDENCIES(): InjectableDependency[] {
return normalizeDependencies(this.NPM_DEPENDENCIES.filter(filterDependency.bind(null, this.ENV)))
.concat(this.APP_ASSETS.filter(filterDependency.bind(null, this.ENV)));
}
/**
* The configuration of SystemJS for the `dev` environment.
* @type {any}
*/
protected SYSTEM_CONFIG_DEV: any = {
defaultJSExtensions: true,
packageConfigPaths: [
`${this.APP_BASE}node_modules/*/package.json`,
`${this.APP_BASE}node_modules/**/package.json`,
`${this.APP_BASE}node_modules/@angular/*/package.json`
],
paths: {
[this.BOOTSTRAP_MODULE]: `${this.APP_BASE}${this.BOOTSTRAP_MODULE}`,
'@angular/core': `${this.APP_BASE}node_modules/@angular/core/core.umd.js`,
'@angular/common': `${this.APP_BASE}node_modules/@angular/common/common.umd.js`,
'@angular/compiler': `${this.APP_BASE}node_modules/@angular/compiler/compiler.umd.js`,
'@angular/http': `${this.APP_BASE}node_modules/@angular/http/http.umd.js`,
'@angular/router': `${this.APP_BASE}node_modules/@angular/router/router.umd.js`,
'@angular/platform-browser': `${this.APP_BASE}node_modules/@angular/platform-browser/platform-browser.umd.js`,
'@angular/platform-browser-dynamic': `${this.APP_BASE}node_modules/@angular/platform-browser-dynamic/platform-browser-dynamic.umd.js`,
'rxjs/*': `${this.APP_BASE}node_modules/rxjs/*`,
'app/*': `/app/*`,
'*': `${this.APP_BASE}node_modules/*`
},
packages: {
rxjs: { defaultExtension: false }
}
};
/**
* The configuration of SystemJS of the application.
* Per default, the configuration of the `dev` environment will be used.
* @type {any}
*/
SYSTEM_CONFIG: any = this.SYSTEM_CONFIG_DEV;
/**
* The system builder configuration of the application.
* @type {any}
*/
SYSTEM_BUILDER_CONFIG: any = {
defaultJSExtensions: true,
packageConfigPaths: [
join(this.PROJECT_ROOT, 'node_modules', '*', 'package.json'),
join(this.PROJECT_ROOT, 'node_modules', '@angular', '*', 'package.json')
],
paths: {
[`${this.TMP_DIR}/*`]: `${this.TMP_DIR}/*`,
'*': 'node_modules/*'
},
packages: {
'@angular/core': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/compiler': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/common': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/http': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/platform-browser': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/platform-browser-dynamic': {
main: 'index.js',
defaultExtension: 'js'
},
'@angular/router': {
main: 'index.js',
defaultExtension: 'js'
},
'rxjs': {
defaultExtension: 'js'
}
}
};
/**
* The Autoprefixer configuration for the application.
* @type {Array}
*/
BROWSER_LIST = [
'ie >= 10',
'ie_mob >= 10',
'ff >= 30',
'chrome >= 34',
'safari >= 7',
'opera >= 23',
'ios >= 7',
'android >= 4.4',
'bb >= 10'
];
/**
* Configurations for NPM module configurations. Add to or override in project.config.ts.
* If you like, use the mergeObject() method to assist with this.
*/
PLUGIN_CONFIGS: any = {
/**
* The BrowserSync configuration of the application.
* The default open behavior is to open the browser. To prevent the browser from opening use the `--b` flag when
* running `npm start` (tested with serve.dev).
* Example: `npm start -- --b`
* @type {any}
*/
'browser-sync': {
middleware: [require('connect-history-api-fallback')({ index: `${this.APP_BASE}index.html` })],
port: this.PORT,
startPath: this.APP_BASE,
open: argv['b'] ? false : true,
injectChanges: false,
server: {
baseDir: `${this.DIST_DIR}/empty/`,
routes: {
[`${this.APP_BASE}${this.APP_DEST}`]: this.APP_DEST,
[`${this.APP_BASE}node_modules`]: 'node_modules',
[`${this.APP_BASE.replace(/\/$/, '')}`]: this.APP_DEST
}
}
}
};
/**
* Recursively merge source onto target.
* @param {any} target The target object (to receive values from source)
* @param {any} source The source object (to be merged onto target)
*/
mergeObject(target: any, source: any) {
const deepExtend = require('deep-extend');
deepExtend(target, source);
}
/**
* Locate a plugin configuration object by plugin key.
* @param {any} pluginKey The object key to look up in PLUGIN_CONFIGS.
*/
getPluginConfig(pluginKey: string): any {
if (this.PLUGIN_CONFIGS[ pluginKey ]) {
return this.PLUGIN_CONFIGS[pluginKey];
}
return null;
}
}
/**
* Normalizes the given `deps` to skip globs.
* @param {InjectableDependency[]} deps - The dependencies to be normalized.
*/
export function normalizeDependencies(deps: InjectableDependency[]) {
deps
.filter((d: InjectableDependency) => !/\*/.test(d.src)) // Skip globs
.forEach((d: InjectableDependency) => d.src = require.resolve(d.src));
return deps;
}
/**
* Returns if the given dependency is used in the given environment.
* @param {string} env - The environment to be filtered for.
* @param {InjectableDependency} d - The dependency to check.
* @return {boolean} `true` if the dependency is used in this environment, `false` otherwise.
*/
function filterDependency(env: string, d: InjectableDependency): boolean {
if (!d.env) {
d.env = Object.keys(ENVIRONMENTS).map(k => ENVIRONMENTS[k]);
}
if (!(d.env instanceof Array)) {
(<any>d).env = [d.env];
}
return d.env.indexOf(env) >= 0;
}
/**
* Returns the applications version as defined in the `package.json`.
* @return {number} The applications version.
*/
function appVersion(): number | string {
var pkg = require('../../package.json');
return pkg.version;
}
/**
* Returns the linting configuration to be used for `codelyzer`.
* @return {string[]} The list of linting rules.
*/
function customRules(): string[] | {
var lintConf = require('../../tslint.json');
return lintConf.rulesDirectory;
} | identifier_body |
|
tco-cross-realm-class-construct.js | /*
* Copyright (c) André Bargull
* Alle Rechte vorbehalten / All Rights Reserved. Use is subject to license terms.
*
* <https://github.com/anba/es6draft>
*/
/*---
id: sec-function-calls-runtime-semantics-evaluation
info: Check TypeError is thrown from correct realm with tco-call to class constructor from class [[Construct]] invocation.
description: >
12.3.4.3 Runtime Semantics: EvaluateDirectCall( func, thisValue, arguments, tailPosition )
...
4. If tailPosition is true, perform PrepareForTailCall().
5. Let result be Call(func, thisValue, argList).
6. Assert: If tailPosition is true, the above call will not return here, but instead evaluation will continue as if the following return has already occurred.
7. Assert: If result is not an abrupt completion, then Type(result) is an ECMAScript language type.
8. Return result.
9.2.1 [[Call]] ( thisArgument, argumentsList)
...
2. If F.[[FunctionKind]] is "classConstructor", throw a TypeError exception.
3. Let callerContext be the running execution context.
4. Let calleeContext be PrepareForOrdinaryCall(F, undefined).
5. Assert: calleeContext is now the running execution context.
...
features: [tail-call-optimization, class] |
// - The class constructor call is in a valid tail-call position, which means PrepareForTailCall is performed.
// - The function call returns from `otherRealm` and proceeds the tail-call in this realm.
// - Calling the class constructor throws a TypeError from the current realm, that means this realm and not `otherRealm`.
var code = "(class { constructor() { return (class {})(); } });";
var otherRealm = $262.createRealm();
var tco = otherRealm.evalScript(code);
assert.throws(TypeError, function() {
new tco();
}); | ---*/ | random_line_split |
expr-block-generic-box2.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(managed_boxes)]
type compare<'a, T> = |T, T|: 'a -> bool;
fn test_generic<T:Clone>(expected: T, eq: compare<T>) {
let actual: T = { expected.clone() };
assert!((eq(expected, actual)));
}
fn test_vec() |
pub fn main() { test_vec(); }
| {
fn compare_vec(v1: @int, v2: @int) -> bool { return v1 == v2; }
test_generic::<@int>(@1, compare_vec);
} | identifier_body |
expr-block-generic-box2.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(managed_boxes)]
type compare<'a, T> = |T, T|: 'a -> bool;
fn | <T:Clone>(expected: T, eq: compare<T>) {
let actual: T = { expected.clone() };
assert!((eq(expected, actual)));
}
fn test_vec() {
fn compare_vec(v1: @int, v2: @int) -> bool { return v1 == v2; }
test_generic::<@int>(@1, compare_vec);
}
pub fn main() { test_vec(); }
| test_generic | identifier_name |
expr-block-generic-box2.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(managed_boxes)]
type compare<'a, T> = |T, T|: 'a -> bool;
fn test_generic<T:Clone>(expected: T, eq: compare<T>) {
let actual: T = { expected.clone() };
assert!((eq(expected, actual)));
}
|
pub fn main() { test_vec(); } | fn test_vec() {
fn compare_vec(v1: @int, v2: @int) -> bool { return v1 == v2; }
test_generic::<@int>(@1, compare_vec);
} | random_line_split |
p2p-versionbits-warning.py | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Copyright (c) 2016 The Bitcoin Unlimited developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
from test_framework.blocktools import create_block, create_coinbase
'''
Test version bits' warning system.
Generate chains with block versions that appear to be signalling unknown
soft-forks, and test that warning alerts are generated.
'''
VB_PERIOD = 144 # versionbits period length for regtest
VB_THRESHOLD = 108 # versionbits activation threshold for regtest
VB_TOP_BITS = 0x20000000
VB_UNKNOWN_BIT = 27 # Choose a bit unassigned to any deployment
# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
# p2p messages to a node, generating the messages in the main testing logic.
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
def on_inv(self, conn, message):
pass
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node after delivery of a block
def sync_with_ping(self, timeout=30):
self.connection.send_message(msg_ping(nonce=self.ping_counter))
received_pong = False
sleep_time = 0.05
while not received_pong and timeout > 0:
time.sleep(sleep_time)
timeout -= sleep_time
with mininode_lock:
if self.last_pong.nonce == self.ping_counter:
received_pong = True
self.ping_counter += 1
return received_pong
class VersionBitsWarningTest(BitcoinTestFramework):
def setup_chain(self):
initialize_chain_clean(self.options.tmpdir, 1)
def setup_network(self):
self.nodes = []
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
# Open and close to create zero-length file
with open(self.alert_filename, 'w') as f:
pass
self.node_options = ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]
self.nodes.append(start_node(0, self.options.tmpdir, self.node_options))
import re
self.vb_pattern = re.compile("^Warning.*versionbit")
# Send numblocks blocks via peer with nVersionToUse set.
def send_blocks_with_version(self, peer, numblocks, nVersionToUse):
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount()
block_time = self.nodes[0].getblockheader(tip)["time"]+1 | block = create_block(tip, create_coinbase(height+1), block_time)
block.nVersion = nVersionToUse
block.solve()
peer.send_message(msg_block(block))
block_time += 1
height += 1
tip = block.sha256
peer.sync_with_ping()
def test_versionbits_in_alert_file(self):
with open(self.alert_filename, 'r') as f:
alert_text = f.read()
assert(self.vb_pattern.match(alert_text))
def run_test(self):
# Setup the p2p connection and start up the network thread.
test_node = TestNode()
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
test_node.add_connection(connections[0])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
test_node.wait_for_verack()
# 1. Have the node mine one period worth of blocks
self.nodes[0].generate(VB_PERIOD)
# 2. Now build one period of blocks on the tip, with < VB_THRESHOLD
# blocks signaling some unknown bit.
nVersion = VB_TOP_BITS | (1<<VB_UNKNOWN_BIT)
self.send_blocks_with_version(test_node, VB_THRESHOLD-1, nVersion)
# Fill rest of period with regular version blocks
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD + 1)
# Check that we're not getting any versionbit-related errors in
# getinfo()
assert(not self.vb_pattern.match(self.nodes[0].getinfo()["errors"]))
# 3. Now build one period of blocks with >= VB_THRESHOLD blocks signaling
# some unknown bit
self.send_blocks_with_version(test_node, VB_THRESHOLD, nVersion)
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD)
# Might not get a versionbits-related alert yet, as we should
# have gotten a different alert due to more than 51/100 blocks
# being of unexpected version.
# Check that getinfo() shows some kind of error.
assert(len(self.nodes[0].getinfo()["errors"]) != 0)
# Mine a period worth of expected blocks so the generic block-version warning
# is cleared, and restart the node. This should move the versionbit state
# to ACTIVE.
self.nodes[0].generate(VB_PERIOD)
stop_node(self.nodes[0], 0)
wait_bitcoinds()
# Empty out the alert file
with open(self.alert_filename, 'w') as f:
pass
self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
# Connecting one block should be enough to generate an error.
self.nodes[0].generate(1)
assert(len(self.nodes[0].getinfo()["errors"]) != 0)
stop_node(self.nodes[0], 0)
wait_bitcoinds()
self.test_versionbits_in_alert_file()
# Test framework expects the node to still be running...
self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
if __name__ == '__main__':
VersionBitsWarningTest().main() | tip = int(tip, 16)
for i in range(numblocks): | random_line_split |
p2p-versionbits-warning.py | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Copyright (c) 2016 The Bitcoin Unlimited developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
from test_framework.blocktools import create_block, create_coinbase
'''
Test version bits' warning system.
Generate chains with block versions that appear to be signalling unknown
soft-forks, and test that warning alerts are generated.
'''
VB_PERIOD = 144 # versionbits period length for regtest
VB_THRESHOLD = 108 # versionbits activation threshold for regtest
VB_TOP_BITS = 0x20000000
VB_UNKNOWN_BIT = 27 # Choose a bit unassigned to any deployment
# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
# p2p messages to a node, generating the messages in the main testing logic.
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
def on_inv(self, conn, message):
pass
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node after delivery of a block
def sync_with_ping(self, timeout=30):
self.connection.send_message(msg_ping(nonce=self.ping_counter))
received_pong = False
sleep_time = 0.05
while not received_pong and timeout > 0:
|
self.ping_counter += 1
return received_pong
class VersionBitsWarningTest(BitcoinTestFramework):
def setup_chain(self):
initialize_chain_clean(self.options.tmpdir, 1)
def setup_network(self):
self.nodes = []
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
# Open and close to create zero-length file
with open(self.alert_filename, 'w') as f:
pass
self.node_options = ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]
self.nodes.append(start_node(0, self.options.tmpdir, self.node_options))
import re
self.vb_pattern = re.compile("^Warning.*versionbit")
# Send numblocks blocks via peer with nVersionToUse set.
def send_blocks_with_version(self, peer, numblocks, nVersionToUse):
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount()
block_time = self.nodes[0].getblockheader(tip)["time"]+1
tip = int(tip, 16)
for i in range(numblocks):
block = create_block(tip, create_coinbase(height+1), block_time)
block.nVersion = nVersionToUse
block.solve()
peer.send_message(msg_block(block))
block_time += 1
height += 1
tip = block.sha256
peer.sync_with_ping()
def test_versionbits_in_alert_file(self):
with open(self.alert_filename, 'r') as f:
alert_text = f.read()
assert(self.vb_pattern.match(alert_text))
def run_test(self):
# Setup the p2p connection and start up the network thread.
test_node = TestNode()
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
test_node.add_connection(connections[0])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
test_node.wait_for_verack()
# 1. Have the node mine one period worth of blocks
self.nodes[0].generate(VB_PERIOD)
# 2. Now build one period of blocks on the tip, with < VB_THRESHOLD
# blocks signaling some unknown bit.
nVersion = VB_TOP_BITS | (1<<VB_UNKNOWN_BIT)
self.send_blocks_with_version(test_node, VB_THRESHOLD-1, nVersion)
# Fill rest of period with regular version blocks
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD + 1)
# Check that we're not getting any versionbit-related errors in
# getinfo()
assert(not self.vb_pattern.match(self.nodes[0].getinfo()["errors"]))
# 3. Now build one period of blocks with >= VB_THRESHOLD blocks signaling
# some unknown bit
self.send_blocks_with_version(test_node, VB_THRESHOLD, nVersion)
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD)
# Might not get a versionbits-related alert yet, as we should
# have gotten a different alert due to more than 51/100 blocks
# being of unexpected version.
# Check that getinfo() shows some kind of error.
assert(len(self.nodes[0].getinfo()["errors"]) != 0)
# Mine a period worth of expected blocks so the generic block-version warning
# is cleared, and restart the node. This should move the versionbit state
# to ACTIVE.
self.nodes[0].generate(VB_PERIOD)
stop_node(self.nodes[0], 0)
wait_bitcoinds()
# Empty out the alert file
with open(self.alert_filename, 'w') as f:
pass
self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
# Connecting one block should be enough to generate an error.
self.nodes[0].generate(1)
assert(len(self.nodes[0].getinfo()["errors"]) != 0)
stop_node(self.nodes[0], 0)
wait_bitcoinds()
self.test_versionbits_in_alert_file()
# Test framework expects the node to still be running...
self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
if __name__ == '__main__':
VersionBitsWarningTest().main()
| time.sleep(sleep_time)
timeout -= sleep_time
with mininode_lock:
if self.last_pong.nonce == self.ping_counter:
received_pong = True | conditional_block |
p2p-versionbits-warning.py | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Copyright (c) 2016 The Bitcoin Unlimited developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
from test_framework.blocktools import create_block, create_coinbase
'''
Test version bits' warning system.
Generate chains with block versions that appear to be signalling unknown
soft-forks, and test that warning alerts are generated.
'''
VB_PERIOD = 144 # versionbits period length for regtest
VB_THRESHOLD = 108 # versionbits activation threshold for regtest
VB_TOP_BITS = 0x20000000
VB_UNKNOWN_BIT = 27 # Choose a bit unassigned to any deployment
# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
# p2p messages to a node, generating the messages in the main testing logic.
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
def on_inv(self, conn, message):
pass
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node after delivery of a block
def sync_with_ping(self, timeout=30):
self.connection.send_message(msg_ping(nonce=self.ping_counter))
received_pong = False
sleep_time = 0.05
while not received_pong and timeout > 0:
time.sleep(sleep_time)
timeout -= sleep_time
with mininode_lock:
if self.last_pong.nonce == self.ping_counter:
received_pong = True
self.ping_counter += 1
return received_pong
class | (BitcoinTestFramework):
def setup_chain(self):
initialize_chain_clean(self.options.tmpdir, 1)
def setup_network(self):
self.nodes = []
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
# Open and close to create zero-length file
with open(self.alert_filename, 'w') as f:
pass
self.node_options = ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]
self.nodes.append(start_node(0, self.options.tmpdir, self.node_options))
import re
self.vb_pattern = re.compile("^Warning.*versionbit")
# Send numblocks blocks via peer with nVersionToUse set.
def send_blocks_with_version(self, peer, numblocks, nVersionToUse):
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount()
block_time = self.nodes[0].getblockheader(tip)["time"]+1
tip = int(tip, 16)
for i in range(numblocks):
block = create_block(tip, create_coinbase(height+1), block_time)
block.nVersion = nVersionToUse
block.solve()
peer.send_message(msg_block(block))
block_time += 1
height += 1
tip = block.sha256
peer.sync_with_ping()
def test_versionbits_in_alert_file(self):
with open(self.alert_filename, 'r') as f:
alert_text = f.read()
assert(self.vb_pattern.match(alert_text))
def run_test(self):
# Setup the p2p connection and start up the network thread.
test_node = TestNode()
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
test_node.add_connection(connections[0])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
test_node.wait_for_verack()
# 1. Have the node mine one period worth of blocks
self.nodes[0].generate(VB_PERIOD)
# 2. Now build one period of blocks on the tip, with < VB_THRESHOLD
# blocks signaling some unknown bit.
nVersion = VB_TOP_BITS | (1<<VB_UNKNOWN_BIT)
self.send_blocks_with_version(test_node, VB_THRESHOLD-1, nVersion)
# Fill rest of period with regular version blocks
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD + 1)
# Check that we're not getting any versionbit-related errors in
# getinfo()
assert(not self.vb_pattern.match(self.nodes[0].getinfo()["errors"]))
# 3. Now build one period of blocks with >= VB_THRESHOLD blocks signaling
# some unknown bit
self.send_blocks_with_version(test_node, VB_THRESHOLD, nVersion)
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD)
# Might not get a versionbits-related alert yet, as we should
# have gotten a different alert due to more than 51/100 blocks
# being of unexpected version.
# Check that getinfo() shows some kind of error.
assert(len(self.nodes[0].getinfo()["errors"]) != 0)
# Mine a period worth of expected blocks so the generic block-version warning
# is cleared, and restart the node. This should move the versionbit state
# to ACTIVE.
self.nodes[0].generate(VB_PERIOD)
stop_node(self.nodes[0], 0)
wait_bitcoinds()
# Empty out the alert file
with open(self.alert_filename, 'w') as f:
pass
self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
# Connecting one block should be enough to generate an error.
self.nodes[0].generate(1)
assert(len(self.nodes[0].getinfo()["errors"]) != 0)
stop_node(self.nodes[0], 0)
wait_bitcoinds()
self.test_versionbits_in_alert_file()
# Test framework expects the node to still be running...
self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
if __name__ == '__main__':
VersionBitsWarningTest().main()
| VersionBitsWarningTest | identifier_name |
p2p-versionbits-warning.py | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Copyright (c) 2016 The Bitcoin Unlimited developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
from test_framework.blocktools import create_block, create_coinbase
'''
Test version bits' warning system.
Generate chains with block versions that appear to be signalling unknown
soft-forks, and test that warning alerts are generated.
'''
VB_PERIOD = 144 # versionbits period length for regtest
VB_THRESHOLD = 108 # versionbits activation threshold for regtest
VB_TOP_BITS = 0x20000000
VB_UNKNOWN_BIT = 27 # Choose a bit unassigned to any deployment
# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
# p2p messages to a node, generating the messages in the main testing logic.
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
def on_inv(self, conn, message):
pass
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node after delivery of a block
def sync_with_ping(self, timeout=30):
self.connection.send_message(msg_ping(nonce=self.ping_counter))
received_pong = False
sleep_time = 0.05
while not received_pong and timeout > 0:
time.sleep(sleep_time)
timeout -= sleep_time
with mininode_lock:
if self.last_pong.nonce == self.ping_counter:
received_pong = True
self.ping_counter += 1
return received_pong
class VersionBitsWarningTest(BitcoinTestFramework):
| tip = int(tip, 16)
for i in range(numblocks):
block = create_block(tip, create_coinbase(height+1), block_time)
block.nVersion = nVersionToUse
block.solve()
peer.send_message(msg_block(block))
block_time += 1
height += 1
tip = block.sha256
peer.sync_with_ping()
def test_versionbits_in_alert_file(self):
with open(self.alert_filename, 'r') as f:
alert_text = f.read()
assert(self.vb_pattern.match(alert_text))
def run_test(self):
# Setup the p2p connection and start up the network thread.
test_node = TestNode()
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
test_node.add_connection(connections[0])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
test_node.wait_for_verack()
# 1. Have the node mine one period worth of blocks
self.nodes[0].generate(VB_PERIOD)
# 2. Now build one period of blocks on the tip, with < VB_THRESHOLD
# blocks signaling some unknown bit.
nVersion = VB_TOP_BITS | (1<<VB_UNKNOWN_BIT)
self.send_blocks_with_version(test_node, VB_THRESHOLD-1, nVersion)
# Fill rest of period with regular version blocks
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD + 1)
# Check that we're not getting any versionbit-related errors in
# getinfo()
assert(not self.vb_pattern.match(self.nodes[0].getinfo()["errors"]))
# 3. Now build one period of blocks with >= VB_THRESHOLD blocks signaling
# some unknown bit
self.send_blocks_with_version(test_node, VB_THRESHOLD, nVersion)
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD)
# Might not get a versionbits-related alert yet, as we should
# have gotten a different alert due to more than 51/100 blocks
# being of unexpected version.
# Check that getinfo() shows some kind of error.
assert(len(self.nodes[0].getinfo()["errors"]) != 0)
# Mine a period worth of expected blocks so the generic block-version warning
# is cleared, and restart the node. This should move the versionbit state
# to ACTIVE.
self.nodes[0].generate(VB_PERIOD)
stop_node(self.nodes[0], 0)
wait_bitcoinds()
# Empty out the alert file
with open(self.alert_filename, 'w') as f:
pass
self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
# Connecting one block should be enough to generate an error.
self.nodes[0].generate(1)
assert(len(self.nodes[0].getinfo()["errors"]) != 0)
stop_node(self.nodes[0], 0)
wait_bitcoinds()
self.test_versionbits_in_alert_file()
# Test framework expects the node to still be running...
self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
if __name__ == '__main__':
VersionBitsWarningTest().main()
| def setup_chain(self):
initialize_chain_clean(self.options.tmpdir, 1)
def setup_network(self):
self.nodes = []
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
# Open and close to create zero-length file
with open(self.alert_filename, 'w') as f:
pass
self.node_options = ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]
self.nodes.append(start_node(0, self.options.tmpdir, self.node_options))
import re
self.vb_pattern = re.compile("^Warning.*versionbit")
# Send numblocks blocks via peer with nVersionToUse set.
def send_blocks_with_version(self, peer, numblocks, nVersionToUse):
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount()
block_time = self.nodes[0].getblockheader(tip)["time"]+1 | identifier_body |
AlertControl.tsx | import * as React from 'react';
import { AccessPropertyName, Alert, AlertType } from '../Models';
import { ApplicationState, AppThunkAction } from '../store';
import { CloseAlertAction } from '../store/Alert';
export interface AlertProps {
items: Alert[];
closeAlert(id: number): AppThunkAction<CloseAlertAction>;
}
export default class | extends React.Component<AlertProps, {}> {
public render() {
return (
<div className="alerts">
{this.props.items.map(this.renderAlert)}
</div>
);
}
private renderAlert = (item: Alert) => {
const closeAction = (e: any) => { e.preventDefault(); this.props.closeAlert(item.id); };
return (
<div key={item.id} className={'alert ' + item.alertType + ' alert-dismissable ' + `fade fade-${item.state}`} role="alert">
<a href="#" className="close" aria-label="close" onClick={closeAction}>×</a>
{item.message}
</div>
);
}
}
| App | identifier_name |
AlertControl.tsx |
import { ApplicationState, AppThunkAction } from '../store';
import { CloseAlertAction } from '../store/Alert';
export interface AlertProps {
items: Alert[];
closeAlert(id: number): AppThunkAction<CloseAlertAction>;
}
export default class App extends React.Component<AlertProps, {}> {
public render() {
return (
<div className="alerts">
{this.props.items.map(this.renderAlert)}
</div>
);
}
private renderAlert = (item: Alert) => {
const closeAction = (e: any) => { e.preventDefault(); this.props.closeAlert(item.id); };
return (
<div key={item.id} className={'alert ' + item.alertType + ' alert-dismissable ' + `fade fade-${item.state}`} role="alert">
<a href="#" className="close" aria-label="close" onClick={closeAction}>×</a>
{item.message}
</div>
);
}
} | import * as React from 'react';
import { AccessPropertyName, Alert, AlertType } from '../Models'; | random_line_split |
|
mdColors.js | /*
* @license MIT
* @file
* @copyright KeyW Corporation 2016
*/
(function () {
"use strict";
var _theme;
angular
.module('mdColors',['mdColors'])
.config(['$mdThemingProvider', function($mdThemingProvider){
_theme = $mdThemingProvider.theme();
}])
.directive('mdStyleColor', ['$mdColorPalette',
function ($mdColorPalette) {
return {
restrict: 'A', | * @param {} element
* @param {} attrs
*/
link: function (scope, element, attrs) {
for (var p in scope.mdStyleColor) {
if (scope.mdStyleColor.hasOwnProperty(p)) {
var themeColors = _theme.colors;
var split = (scope.mdStyleColor[p] || '').split('.');
if (split.length < 2) split.unshift('primary');
var hueR = split[1] || 'hue-1'; // 'hue-1'
var colorR = split[0] || 'primary'; // 'warn'
// Absolute color: 'orange'
var colorA = themeColors[colorR] ?
themeColors[colorR].name : colorR;
// Absolute Hue: '500'
var hueA =
themeColors[colorR] ?
themeColors[colorR].hues[hueR] || hueR :
hueR;
var colorValue = $mdColorPalette[colorA][hueA] ?
$mdColorPalette[colorA][hueA].value :
$mdColorPalette[colorA]['500'].value;
element.css(p, 'rgb('+colorValue.join(',')+')');
}
}
}
}
}]);
}()); | scope: { mdStyleColor: '=' },
/**
* Description
* @method link
* @param {} scope | random_line_split |
mdColors.js | /*
* @license MIT
* @file
* @copyright KeyW Corporation 2016
*/
(function () {
"use strict";
var _theme;
angular
.module('mdColors',['mdColors'])
.config(['$mdThemingProvider', function($mdThemingProvider){
_theme = $mdThemingProvider.theme();
}])
.directive('mdStyleColor', ['$mdColorPalette',
function ($mdColorPalette) {
return {
restrict: 'A',
scope: { mdStyleColor: '=' },
/**
* Description
* @method link
* @param {} scope
* @param {} element
* @param {} attrs
*/
link: function (scope, element, attrs) {
for (var p in scope.mdStyleColor) {
if (scope.mdStyleColor.hasOwnProperty(p)) | var colorValue = $mdColorPalette[colorA][hueA] ?
$mdColorPalette[colorA][hueA].value :
$mdColorPalette[colorA]['500'].value;
element.css(p, 'rgb('+colorValue.join(',')+')');
}
}
}
}
}]);
}());
| {
var themeColors = _theme.colors;
var split = (scope.mdStyleColor[p] || '').split('.');
if (split.length < 2) split.unshift('primary');
var hueR = split[1] || 'hue-1'; // 'hue-1'
var colorR = split[0] || 'primary'; // 'warn'
// Absolute color: 'orange'
var colorA = themeColors[colorR] ?
themeColors[colorR].name : colorR;
// Absolute Hue: '500'
var hueA =
themeColors[colorR] ?
themeColors[colorR].hues[hueR] || hueR :
hueR;
| conditional_block |
redact.test.ts | import * as samples from "../../support";
/**
* Test for $redact operator
* https://docs.mongodb.com/manual/reference/operator/aggregation/redact/
*/
samples.runTestPipeline("operators/pipeline/redact", [
{
message: "Evaluate Access at Every Document Level",
input: [
{
_id: 1,
title: "123 Department Report",
tags: ["G", "STLW"],
year: 2014,
subsections: [
{
subtitle: "Section 1: Overview",
tags: ["SI", "G"],
content: "Section 1: This is the content of section 1.",
},
{
subtitle: "Section 2: Analysis",
tags: ["STLW"],
content: "Section 2: This is the content of section 2.",
},
{
subtitle: "Section 3: Budgeting",
tags: ["TK"],
content: {
text: "Section 3: This is the content of section3.",
tags: ["HCS"],
},
},
],
},
],
pipeline: [
{ $match: { year: 2014 } },
{
$redact: {
$cond: {
if: {
$gt: [
{ $size: { $setIntersection: ["$tags", ["STLW", "G"]] } },
0,
],
},
then: "$$DESCEND",
else: "$$PRUNE",
},
},
},
],
expected: [
{
_id: 1,
title: "123 Department Report",
tags: ["G", "STLW"],
year: 2014,
subsections: [
{
subtitle: "Section 1: Overview",
tags: ["SI", "G"],
content: "Section 1: This is the content of section 1.",
},
{
subtitle: "Section 2: Analysis",
tags: ["STLW"],
content: "Section 2: This is the content of section 2.",
},
],
},
],
},
{
message: "Exclude All Fields at a Given Level",
input: [
{
_id: 1,
level: 1,
acct_id: "xyz123",
cc: {
level: 5,
type: "yy",
num: 0,
exp_date: new Date("2015-11-01T00:00:00.000Z"),
billing_addr: {
level: 5,
addr1: "123 ABC Street",
city: "Some City",
},
shipping_addr: [
{
level: 3,
addr1: "987 XYZ Ave",
city: "Some City",
},
{
level: 3,
addr1: "PO Box 0123",
city: "Some City",
},
],
},
status: "A",
},
],
pipeline: [
{ $match: { status: "A" } },
{
$redact: {
$cond: {
if: { $eq: ["$level", 5] },
then: "$$PRUNE",
else: "$$DESCEND",
},
},
},
], | {
_id: 1,
level: 1,
acct_id: "xyz123",
status: "A",
},
],
},
]); |
expected: [ | random_line_split |
Toggle.js | import React from 'react';
import PropTypes from 'prop-types';
import generateId from 'extensions/generateId';
import Skeleton from 'skeletons/Skeleton';
import * as styles from './styles';
/**
* A simple `Toggle` component thta can be turned on and off. Use `checked` to set
* whether the `Toggle` is selected.
*/
class Toggle extends React.PureComponent {
static propTypes = {
/**
* Adds a class name to the input element.
*/
className: PropTypes.string,
/**
* Adds an id to the input element.
*/
id: PropTypes.string,
/**
* The literal value this toggle represents. For example, if this toggle
* represents whether the app is in "Dark Mode", you might provide "darkMode"
* to this prop to represent that value key.
*/
value: PropTypes.string,
/**
* Whether the toggle is 'on' or 'off'.
*/
checked: PropTypes.bool,
/**
* Whether the toggle is required for form submission.
*/
required: PropTypes.bool,
/**
* Whether the user is prevented from interacting with the toggle.
*/
disabled: PropTypes.bool,
/**
* Adds a name to the underlying input.
*/
name: PropTypes.string,
/**
* A description to display next to the toggle.
*/
description: PropTypes.node,
/**
* Callback for the onChange event of the input.
*/
onChange: PropTypes.func,
/**
* A component to render the container around the toggle and label
*/
Container: PropTypes.oneOfType([PropTypes.func, PropTypes.object]),
/**
* A component to render the input element, usually hidden
*/
Input: PropTypes.oneOfType([PropTypes.func, PropTypes.object]),
/**
* A component to render the label, which usually also renders the toggle itself
*/
Label: PropTypes.oneOfType([PropTypes.func, PropTypes.object]),
};
static defaultProps = {
className: "scl-toggle",
id: null,
value: undefined,
checked: undefined,
name: null,
required: false,
disabled: false,
description: null,
onChange: () => null,
Container: styles.Container,
Input: styles.Input,
Label: styles.Label,
};
static styles = styles;
defaultId = generateId('toggle');
render() | id={finalId}
className={className}
name={name}
type="checkbox"
disabled={disabled}
checked={checked}
value={value}
required={required}
onChange={onChange}
{...rest}
/>
<Label htmlFor={finalId}>{description}</Label>
</Container>
);
}
}
Toggle.Skeleton = props => (
<Toggle
Input={() => <Skeleton width="58px" height="30px" />}
Label={() => (
<Skeleton style={{ marginLeft: '15px' }} width="150px" height="30px" />
)}
{...props}
/>
);
export default Toggle;
| {
const {
className,
disabled,
required,
name,
description,
onChange,
Container,
Input,
Label,
id,
value,
checked,
...rest
} = this.props;
const finalId = id || this.defaultId;
return (
<Container>
<Input | identifier_body |
Toggle.js | import React from 'react';
import PropTypes from 'prop-types';
import generateId from 'extensions/generateId';
import Skeleton from 'skeletons/Skeleton';
import * as styles from './styles';
/**
* A simple `Toggle` component thta can be turned on and off. Use `checked` to set
* whether the `Toggle` is selected.
*/
class | extends React.PureComponent {
static propTypes = {
/**
* Adds a class name to the input element.
*/
className: PropTypes.string,
/**
* Adds an id to the input element.
*/
id: PropTypes.string,
/**
* The literal value this toggle represents. For example, if this toggle
* represents whether the app is in "Dark Mode", you might provide "darkMode"
* to this prop to represent that value key.
*/
value: PropTypes.string,
/**
* Whether the toggle is 'on' or 'off'.
*/
checked: PropTypes.bool,
/**
* Whether the toggle is required for form submission.
*/
required: PropTypes.bool,
/**
* Whether the user is prevented from interacting with the toggle.
*/
disabled: PropTypes.bool,
/**
* Adds a name to the underlying input.
*/
name: PropTypes.string,
/**
* A description to display next to the toggle.
*/
description: PropTypes.node,
/**
* Callback for the onChange event of the input.
*/
onChange: PropTypes.func,
/**
* A component to render the container around the toggle and label
*/
Container: PropTypes.oneOfType([PropTypes.func, PropTypes.object]),
/**
* A component to render the input element, usually hidden
*/
Input: PropTypes.oneOfType([PropTypes.func, PropTypes.object]),
/**
* A component to render the label, which usually also renders the toggle itself
*/
Label: PropTypes.oneOfType([PropTypes.func, PropTypes.object]),
};
static defaultProps = {
className: "scl-toggle",
id: null,
value: undefined,
checked: undefined,
name: null,
required: false,
disabled: false,
description: null,
onChange: () => null,
Container: styles.Container,
Input: styles.Input,
Label: styles.Label,
};
static styles = styles;
defaultId = generateId('toggle');
render() {
const {
className,
disabled,
required,
name,
description,
onChange,
Container,
Input,
Label,
id,
value,
checked,
...rest
} = this.props;
const finalId = id || this.defaultId;
return (
<Container>
<Input
id={finalId}
className={className}
name={name}
type="checkbox"
disabled={disabled}
checked={checked}
value={value}
required={required}
onChange={onChange}
{...rest}
/>
<Label htmlFor={finalId}>{description}</Label>
</Container>
);
}
}
Toggle.Skeleton = props => (
<Toggle
Input={() => <Skeleton width="58px" height="30px" />}
Label={() => (
<Skeleton style={{ marginLeft: '15px' }} width="150px" height="30px" />
)}
{...props}
/>
);
export default Toggle;
| Toggle | identifier_name |
Toggle.js | import React from 'react';
import PropTypes from 'prop-types';
import generateId from 'extensions/generateId';
import Skeleton from 'skeletons/Skeleton';
import * as styles from './styles';
/**
* A simple `Toggle` component thta can be turned on and off. Use `checked` to set
* whether the `Toggle` is selected.
*/
class Toggle extends React.PureComponent { | * Adds a class name to the input element.
*/
className: PropTypes.string,
/**
* Adds an id to the input element.
*/
id: PropTypes.string,
/**
* The literal value this toggle represents. For example, if this toggle
* represents whether the app is in "Dark Mode", you might provide "darkMode"
* to this prop to represent that value key.
*/
value: PropTypes.string,
/**
* Whether the toggle is 'on' or 'off'.
*/
checked: PropTypes.bool,
/**
* Whether the toggle is required for form submission.
*/
required: PropTypes.bool,
/**
* Whether the user is prevented from interacting with the toggle.
*/
disabled: PropTypes.bool,
/**
* Adds a name to the underlying input.
*/
name: PropTypes.string,
/**
* A description to display next to the toggle.
*/
description: PropTypes.node,
/**
* Callback for the onChange event of the input.
*/
onChange: PropTypes.func,
/**
* A component to render the container around the toggle and label
*/
Container: PropTypes.oneOfType([PropTypes.func, PropTypes.object]),
/**
* A component to render the input element, usually hidden
*/
Input: PropTypes.oneOfType([PropTypes.func, PropTypes.object]),
/**
* A component to render the label, which usually also renders the toggle itself
*/
Label: PropTypes.oneOfType([PropTypes.func, PropTypes.object]),
};
static defaultProps = {
className: "scl-toggle",
id: null,
value: undefined,
checked: undefined,
name: null,
required: false,
disabled: false,
description: null,
onChange: () => null,
Container: styles.Container,
Input: styles.Input,
Label: styles.Label,
};
static styles = styles;
defaultId = generateId('toggle');
render() {
const {
className,
disabled,
required,
name,
description,
onChange,
Container,
Input,
Label,
id,
value,
checked,
...rest
} = this.props;
const finalId = id || this.defaultId;
return (
<Container>
<Input
id={finalId}
className={className}
name={name}
type="checkbox"
disabled={disabled}
checked={checked}
value={value}
required={required}
onChange={onChange}
{...rest}
/>
<Label htmlFor={finalId}>{description}</Label>
</Container>
);
}
}
Toggle.Skeleton = props => (
<Toggle
Input={() => <Skeleton width="58px" height="30px" />}
Label={() => (
<Skeleton style={{ marginLeft: '15px' }} width="150px" height="30px" />
)}
{...props}
/>
);
export default Toggle; | static propTypes = {
/** | random_line_split |
add-credit-card.directive.ts | /*
* Copyright (c) [2015] - [2017] Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
'use strict';
interface ICreditCardElement extends ng.IAugmentedJQuery {
card: Function;
}
/**
* Defines a directive for creating a credit card component.
* @author Oleksii Kurinnyi
*/
export class AddCreditCard {
$timeout: ng.ITimeoutService;
restrict: string = 'E';
replace: boolean = false;
templateUrl: string = 'app/billing/card-info/add-credit-card/add-credit-card.html';
bindToController: boolean = true;
controller: string = 'AddCreditCardController';
controllerAs: string = 'addCreditCardController';
scope: {
[propName: string]: string
};
/**
* Default constructor that is using resource
* @ngInject for Dependency injection
*/
constructor ($timeout: ng.ITimeoutService) {
this.$timeout = $timeout;
this.scope = {
creditCard: '='
};
}
link($scope: ng.IScope, $element: ICreditCardElement): void {
($element.find('.addCreditCardForm') as ICreditCardElement).card({
// a selector or jQuery object for the container
// where you want the card to appear
container: '.card-wrapper', // *required* | nameInput: 'input[name="deskcardholder"]', // optional - defaults input[name="name"]
// width: 200, // optional — default 350px
formatting: true // optional - default true
});
let deregistrationFn = $scope.$watch(() => { return $element.find('input[name="deskcardNumber"]').is(':visible'); }, (visible) => {
if (visible) {
deregistrationFn();
this.$timeout(() => {
$element.find('input[name="deskcardNumber"]').focus();
}, 100);
}
});
}
} | numberInput: 'input[name="deskcardNumber"]', // optional — default input[name="number"]
expiryInput: 'input[name="deskexpires"]', // optional — default input[name="expiry"]
cvcInput: 'input[name="deskcvv"]', // optional — default input[name="cvc"] | random_line_split |
add-credit-card.directive.ts | /*
* Copyright (c) [2015] - [2017] Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
'use strict';
interface ICreditCardElement extends ng.IAugmentedJQuery {
card: Function;
}
/**
* Defines a directive for creating a credit card component.
* @author Oleksii Kurinnyi
*/
export class | {
$timeout: ng.ITimeoutService;
restrict: string = 'E';
replace: boolean = false;
templateUrl: string = 'app/billing/card-info/add-credit-card/add-credit-card.html';
bindToController: boolean = true;
controller: string = 'AddCreditCardController';
controllerAs: string = 'addCreditCardController';
scope: {
[propName: string]: string
};
/**
* Default constructor that is using resource
* @ngInject for Dependency injection
*/
constructor ($timeout: ng.ITimeoutService) {
this.$timeout = $timeout;
this.scope = {
creditCard: '='
};
}
link($scope: ng.IScope, $element: ICreditCardElement): void {
($element.find('.addCreditCardForm') as ICreditCardElement).card({
// a selector or jQuery object for the container
// where you want the card to appear
container: '.card-wrapper', // *required*
numberInput: 'input[name="deskcardNumber"]', // optional — default input[name="number"]
expiryInput: 'input[name="deskexpires"]', // optional — default input[name="expiry"]
cvcInput: 'input[name="deskcvv"]', // optional — default input[name="cvc"]
nameInput: 'input[name="deskcardholder"]', // optional - defaults input[name="name"]
// width: 200, // optional — default 350px
formatting: true // optional - default true
});
let deregistrationFn = $scope.$watch(() => { return $element.find('input[name="deskcardNumber"]').is(':visible'); }, (visible) => {
if (visible) {
deregistrationFn();
this.$timeout(() => {
$element.find('input[name="deskcardNumber"]').focus();
}, 100);
}
});
}
}
| AddCreditCard | identifier_name |
add-credit-card.directive.ts | /*
* Copyright (c) [2015] - [2017] Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
'use strict';
interface ICreditCardElement extends ng.IAugmentedJQuery {
card: Function;
}
/**
* Defines a directive for creating a credit card component.
* @author Oleksii Kurinnyi
*/
export class AddCreditCard {
$timeout: ng.ITimeoutService;
restrict: string = 'E';
replace: boolean = false;
templateUrl: string = 'app/billing/card-info/add-credit-card/add-credit-card.html';
bindToController: boolean = true;
controller: string = 'AddCreditCardController';
controllerAs: string = 'addCreditCardController';
scope: {
[propName: string]: string
};
/**
* Default constructor that is using resource
* @ngInject for Dependency injection
*/
constructor ($timeout: ng.ITimeoutService) {
this.$timeout = $timeout;
this.scope = {
creditCard: '='
};
}
link($scope: ng.IScope, $element: ICreditCardElement): void {
($element.find('.addCreditCardForm') as ICreditCardElement).card({
// a selector or jQuery object for the container
// where you want the card to appear
container: '.card-wrapper', // *required*
numberInput: 'input[name="deskcardNumber"]', // optional — default input[name="number"]
expiryInput: 'input[name="deskexpires"]', // optional — default input[name="expiry"]
cvcInput: 'input[name="deskcvv"]', // optional — default input[name="cvc"]
nameInput: 'input[name="deskcardholder"]', // optional - defaults input[name="name"]
// width: 200, // optional — default 350px
formatting: true // optional - default true
});
let deregistrationFn = $scope.$watch(() => { return $element.find('input[name="deskcardNumber"]').is(':visible'); }, (visible) => {
if (visible) {
|
}
}
| deregistrationFn();
this.$timeout(() => {
$element.find('input[name="deskcardNumber"]').focus();
}, 100);
}
}); | conditional_block |
palette-sort.py | _PARTITIONED = 3
SELECTIONS = (SELECT_ALL, SELECT_SLICE, SELECT_AUTOSLICE, SELECT_PARTITIONED)
def noop(v, i):
return v
def to_hsv(v, i):
return v.to_hsv()
def to_hsl(v, i):
return v.to_hsl()
def to_yiq(v, i):
return rgb_to_yiq(*v[:-1])
def to_index(v, i):
return (i,)
def to_random(v, i):
return (randint(0, 0x7fffffff),)
channel_getters = [ (noop, 0), (noop, 1), (noop, 2),
(to_yiq, 0),
(to_hsv, 0), (to_hsv, 1), (to_hsv, 2),
(to_hsl, 1), (to_hsl, 2),
(to_index, 0),
(to_random, 0)]
try:
from colormath.color_objects import RGBColor, LabColor, LCHabColor
AVAILABLE_CHANNELS = AVAILABLE_CHANNELS + (_("Lightness (LAB)"),
_("A-color"), _("B-color"),
_("Chroma (LCHab)"),
_("Hue (LCHab)"))
to_lab = lambda v,i: RGBColor(*v[:-1]).convert_to('LAB').get_value_tuple()
to_lchab = (lambda v,i:
RGBColor(*v[:-1]).convert_to('LCHab').get_value_tuple())
channel_getters.extend([(to_lab, 0), (to_lab, 1), (to_lab, 2),
(to_lchab, 1), (to_lchab, 2)])
except ImportError:
pass
def parse_slice(s, numcolors):
"""Parse a slice spec and return (start, nrows, length)
All items are optional. Omitting them makes the largest possible selection that
exactly fits the other items.
start:nrows,length
'' selects all items, as does ':'
':4,' makes a 4-row selection out of all colors (length auto-determined)
':4' also.
':1,4' selects the first 4 colors
':,4' selects rows of 4 colors (nrows auto-determined)
':4,4' selects 4 rows of 4 colors
'4:' selects a single row of all colors after 4, inclusive.
'4:,4' selects rows of 4 colors, starting at 4 (nrows auto-determined)
'4:4,4' selects 4 rows of 4 colors (16 colors total), beginning at index 4.
'4' is illegal (ambiguous)
In general, slices are comparable to a numpy sub-array.
'start at element START, with shape (NROWS, LENGTH)'
"""
s = s.strip()
def notunderstood():
raise ValueError('Slice %r not understood. Should be in format'
' START?:NROWS?,ROWLENGTH? eg. "0:4,16".' % s)
def _int(v):
try:
return int(v)
except ValueError:
notunderstood()
if s in ('', ':', ':,'):
return 0, 1, numcolors # entire palette, one row
if s.count(':') != 1:
notunderstood()
rowpos = s.find(':')
start = 0
if rowpos > 0:
start = _int(s[:rowpos])
numcolors -= start
nrows = 1
if ',' in s:
commapos = s.find(',')
nrows = s[rowpos+1:commapos]
length = s[commapos+1:]
if not nrows:
if not length:
notunderstood()
else:
length = _int(length)
if length == 0:
notunderstood()
nrows = numcolors // length
if numcolors % length:
nrows = -nrows
elif not length:
nrows = _int(nrows)
if nrows == 0:
notunderstood()
length = numcolors // nrows
if numcolors % nrows:
length = -length
else:
nrows = _int(nrows)
if nrows == 0:
notunderstood()
length = _int(length)
if length == 0:
notunderstood()
else:
nrows = _int(s[rowpos+1:])
if nrows == 0:
notunderstood()
length = numcolors // nrows
if numcolors % nrows:
length = -length
return start, nrows, length
def quantization_grain(channel, g):
"Given a channel and a quantization, return the size of a quantization grain"
g = max(1.0, g)
if g <= 1.0:
g = 0.00001
else:
g = max(0.00001, GRAIN_SCALE[channel] / g)
return g
def palette_sort(palette, selection, slice_expr, channel1, ascending1,
channel2, ascending2, quantize, pchannel, pquantize):
grain1 = quantization_grain(channel1, quantize)
grain2 = quantization_grain(channel2, quantize)
pgrain = quantization_grain(pchannel, pquantize)
#If palette is read only, work on a copy:
editable = pdb.gimp_palette_is_editable(palette)
if not editable:
palette = pdb.gimp_palette_duplicate (palette)
num_colors = pdb.gimp_palette_get_info (palette)
start, nrows, length = None, None, None
if selection == SELECT_AUTOSLICE:
def find_index(color, startindex=0):
for i in range(startindex, num_colors):
c = pdb.gimp_palette_entry_get_color (palette, i)
if c == color:
return i
return None
def hexcolor(c):
return "#%02x%02x%02x" % tuple(c[:-1])
fg = pdb.gimp_context_get_foreground()
bg = pdb.gimp_context_get_background()
start = find_index(fg)
end = find_index(bg)
if start is None:
raise ValueError("Couldn't find foreground color %r in palette" % list(fg))
if end is None:
raise ValueError("Couldn't find background color %r in palette" % list(bg))
if find_index(fg, start + 1):
raise ValueError('Autoslice cannot be used when more than one'
' instance of an endpoint'
' (%s) is present' % hexcolor(fg))
if find_index(bg, end + 1):
raise ValueError('Autoslice cannot be used when more than one'
' instance of an endpoint'
' (%s) is present' % hexcolor(bg))
if start > end:
end, start = start, end
length = (end - start) + 1
try:
_, nrows, _ = parse_slice(slice_expr, length)
nrows = abs(nrows)
if length % nrows:
raise ValueError('Total length %d not evenly divisible'
' by number of rows %d' % (length, nrows))
length /= nrows
except ValueError:
# bad expression is okay here, just assume one row
nrows = 1
# remaining behaviour is implemented by SELECT_SLICE 'inheritance'.
selection= SELECT_SLICE
elif selection in (SELECT_SLICE, SELECT_PARTITIONED):
start, nrows, length = parse_slice(slice_expr, num_colors)
channels_getter_1, channel_index = channel_getters[channel1]
channels_getter_2, channel2_index = channel_getters[channel2]
def get_colors(start, end):
|
if selection == SELECT_ALL:
entry_list = get_colors(0, num_colors)
entry_list.sort(key=lambda v:v[0])
for i in range(num_colors):
pdb.gimp_palette_entry_set_name (palette, i, entry_list[i][1][0])
pdb.gimp_palette_entry_set_color (palette, i, entry_list[i][1][1])
elif selection == SELECT_PARTITIONED:
if num_colors < (start + length * nrows) - 1:
raise ValueError('Not enough entries in palette to '
'sort complete rows! Got %d, expected >=%d' %
(num_colors, start + length * nrows))
pchannels_getter, pchannel_index = channel_getters[pchannel]
for row in range(nrows):
partition_spans = [1]
rowstart = | result = []
for i in range(start, end):
entry = (pdb.gimp_palette_entry_get_name (palette, i),
pdb.gimp_palette_entry_get_color (palette, i))
index1 = channels_getter_1(entry[1], i)[channel_index]
index2 = channels_getter_2(entry[1], i)[channel2_index]
index = ((index1 - (index1 % grain1)) * (1 if ascending1 else -1),
(index2 - (index2 % grain2)) * (1 if ascending2 else -1)
)
result.append((index, entry))
return result | identifier_body |
palette-sort.py | _PARTITIONED = 3
SELECTIONS = (SELECT_ALL, SELECT_SLICE, SELECT_AUTOSLICE, SELECT_PARTITIONED)
def noop(v, i):
return v
def to_hsv(v, i):
return v.to_hsv()
def to_hsl(v, i):
return v.to_hsl()
def to_yiq(v, i):
return rgb_to_yiq(*v[:-1])
def to_index(v, i):
return (i,)
def to_random(v, i):
return (randint(0, 0x7fffffff),)
channel_getters = [ (noop, 0), (noop, 1), (noop, 2),
(to_yiq, 0),
(to_hsv, 0), (to_hsv, 1), (to_hsv, 2),
(to_hsl, 1), (to_hsl, 2),
(to_index, 0),
(to_random, 0)]
try:
from colormath.color_objects import RGBColor, LabColor, LCHabColor
AVAILABLE_CHANNELS = AVAILABLE_CHANNELS + (_("Lightness (LAB)"),
_("A-color"), _("B-color"),
_("Chroma (LCHab)"),
_("Hue (LCHab)"))
to_lab = lambda v,i: RGBColor(*v[:-1]).convert_to('LAB').get_value_tuple()
to_lchab = (lambda v,i:
RGBColor(*v[:-1]).convert_to('LCHab').get_value_tuple())
channel_getters.extend([(to_lab, 0), (to_lab, 1), (to_lab, 2),
(to_lchab, 1), (to_lchab, 2)])
except ImportError:
pass
def parse_slice(s, numcolors):
"""Parse a slice spec and return (start, nrows, length)
All items are optional. Omitting them makes the largest possible selection that
exactly fits the other items.
start:nrows,length
'' selects all items, as does ':'
':4,' makes a 4-row selection out of all colors (length auto-determined)
':4' also.
':1,4' selects the first 4 colors
':,4' selects rows of 4 colors (nrows auto-determined)
':4,4' selects 4 rows of 4 colors
'4:' selects a single row of all colors after 4, inclusive.
'4:,4' selects rows of 4 colors, starting at 4 (nrows auto-determined)
'4:4,4' selects 4 rows of 4 colors (16 colors total), beginning at index 4.
'4' is illegal (ambiguous)
In general, slices are comparable to a numpy sub-array.
'start at element START, with shape (NROWS, LENGTH)'
"""
s = s.strip()
def notunderstood():
raise ValueError('Slice %r not understood. Should be in format'
' START?:NROWS?,ROWLENGTH? eg. "0:4,16".' % s)
def _int(v):
try:
return int(v)
except ValueError:
notunderstood()
if s in ('', ':', ':,'):
return 0, 1, numcolors # entire palette, one row
if s.count(':') != 1:
notunderstood()
rowpos = s.find(':')
start = 0
if rowpos > 0:
start = _int(s[:rowpos])
numcolors -= start
nrows = 1
if ',' in s:
commapos = s.find(',')
nrows = s[rowpos+1:commapos]
length = s[commapos+1:]
if not nrows:
if not length:
notunderstood()
else:
length = _int(length)
if length == 0:
notunderstood()
nrows = numcolors // length
if numcolors % length:
nrows = -nrows
elif not length:
nrows = _int(nrows)
if nrows == 0:
notunderstood()
length = numcolors // nrows
if numcolors % nrows:
length = -length
else:
nrows = _int(nrows)
if nrows == 0:
notunderstood()
length = _int(length)
if length == 0:
notunderstood()
else:
nrows = _int(s[rowpos+1:])
if nrows == 0:
notunderstood()
length = numcolors // nrows
if numcolors % nrows:
length = -length
return start, nrows, length
def quantization_grain(channel, g):
"Given a channel and a quantization, return the size of a quantization grain"
g = max(1.0, g)
if g <= 1.0:
g = 0.00001
else:
g = max(0.00001, GRAIN_SCALE[channel] / g)
return g
| def palette_sort(palette, selection, slice_expr, channel1, ascending1,
channel2, ascending2, quantize, pchannel, pquantize):
grain1 = quantization_grain(channel1, quantize)
grain2 = quantization_grain(channel2, quantize)
pgrain = quantization_grain(pchannel, pquantize)
#If palette is read only, work on a copy:
editable = pdb.gimp_palette_is_editable(palette)
if not editable:
palette = pdb.gimp_palette_duplicate (palette)
num_colors = pdb.gimp_palette_get_info (palette)
start, nrows, length = None, None, None
if selection == SELECT_AUTOSLICE:
def find_index(color, startindex=0):
for i in range(startindex, num_colors):
c = pdb.gimp_palette_entry_get_color (palette, i)
if c == color:
return i
return None
def hexcolor(c):
return "#%02x%02x%02x" % tuple(c[:-1])
fg = pdb.gimp_context_get_foreground()
bg = pdb.gimp_context_get_background()
start = find_index(fg)
end = find_index(bg)
if start is None:
raise ValueError("Couldn't find foreground color %r in palette" % list(fg))
if end is None:
raise ValueError("Couldn't find background color %r in palette" % list(bg))
if find_index(fg, start + 1):
raise ValueError('Autoslice cannot be used when more than one'
' instance of an endpoint'
' (%s) is present' % hexcolor(fg))
if find_index(bg, end + 1):
raise ValueError('Autoslice cannot be used when more than one'
' instance of an endpoint'
' (%s) is present' % hexcolor(bg))
if start > end:
end, start = start, end
length = (end - start) + 1
try:
_, nrows, _ = parse_slice(slice_expr, length)
nrows = abs(nrows)
if length % nrows:
raise ValueError('Total length %d not evenly divisible'
' by number of rows %d' % (length, nrows))
length /= nrows
except ValueError:
# bad expression is okay here, just assume one row
nrows = 1
# remaining behaviour is implemented by SELECT_SLICE 'inheritance'.
selection= SELECT_SLICE
elif selection in (SELECT_SLICE, SELECT_PARTITIONED):
start, nrows, length = parse_slice(slice_expr, num_colors)
channels_getter_1, channel_index = channel_getters[channel1]
channels_getter_2, channel2_index = channel_getters[channel2]
def get_colors(start, end):
result = []
for i in range(start, end):
entry = (pdb.gimp_palette_entry_get_name (palette, i),
pdb.gimp_palette_entry_get_color (palette, i))
index1 = channels_getter_1(entry[1], i)[channel_index]
index2 = channels_getter_2(entry[1], i)[channel2_index]
index = ((index1 - (index1 % grain1)) * (1 if ascending1 else -1),
(index2 - (index2 % grain2)) * (1 if ascending2 else -1)
)
result.append((index, entry))
return result
if selection == SELECT_ALL:
entry_list = get_colors(0, num_colors)
entry_list.sort(key=lambda v:v[0])
for i in range(num_colors):
pdb.gimp_palette_entry_set_name (palette, i, entry_list[i][1][0])
pdb.gimp_palette_entry_set_color (palette, i, entry_list[i][1][1])
elif selection == SELECT_PARTITIONED:
if num_colors < (start + length * nrows) - 1:
raise ValueError('Not enough entries in palette to '
'sort complete rows! Got %d, expected >=%d' %
(num_colors, start + length * nrows))
pchannels_getter, pchannel_index = channel_getters[pchannel]
for row in range(nrows):
partition_spans = [1]
rowstart = | random_line_split |
|
palette-sort.py | :
pass
def parse_slice(s, numcolors):
"""Parse a slice spec and return (start, nrows, length)
All items are optional. Omitting them makes the largest possible selection that
exactly fits the other items.
start:nrows,length
'' selects all items, as does ':'
':4,' makes a 4-row selection out of all colors (length auto-determined)
':4' also.
':1,4' selects the first 4 colors
':,4' selects rows of 4 colors (nrows auto-determined)
':4,4' selects 4 rows of 4 colors
'4:' selects a single row of all colors after 4, inclusive.
'4:,4' selects rows of 4 colors, starting at 4 (nrows auto-determined)
'4:4,4' selects 4 rows of 4 colors (16 colors total), beginning at index 4.
'4' is illegal (ambiguous)
In general, slices are comparable to a numpy sub-array.
'start at element START, with shape (NROWS, LENGTH)'
"""
s = s.strip()
def notunderstood():
raise ValueError('Slice %r not understood. Should be in format'
' START?:NROWS?,ROWLENGTH? eg. "0:4,16".' % s)
def _int(v):
try:
return int(v)
except ValueError:
notunderstood()
if s in ('', ':', ':,'):
return 0, 1, numcolors # entire palette, one row
if s.count(':') != 1:
notunderstood()
rowpos = s.find(':')
start = 0
if rowpos > 0:
start = _int(s[:rowpos])
numcolors -= start
nrows = 1
if ',' in s:
commapos = s.find(',')
nrows = s[rowpos+1:commapos]
length = s[commapos+1:]
if not nrows:
if not length:
notunderstood()
else:
length = _int(length)
if length == 0:
notunderstood()
nrows = numcolors // length
if numcolors % length:
nrows = -nrows
elif not length:
nrows = _int(nrows)
if nrows == 0:
notunderstood()
length = numcolors // nrows
if numcolors % nrows:
length = -length
else:
nrows = _int(nrows)
if nrows == 0:
notunderstood()
length = _int(length)
if length == 0:
notunderstood()
else:
nrows = _int(s[rowpos+1:])
if nrows == 0:
notunderstood()
length = numcolors // nrows
if numcolors % nrows:
length = -length
return start, nrows, length
def quantization_grain(channel, g):
"Given a channel and a quantization, return the size of a quantization grain"
g = max(1.0, g)
if g <= 1.0:
g = 0.00001
else:
g = max(0.00001, GRAIN_SCALE[channel] / g)
return g
def palette_sort(palette, selection, slice_expr, channel1, ascending1,
channel2, ascending2, quantize, pchannel, pquantize):
grain1 = quantization_grain(channel1, quantize)
grain2 = quantization_grain(channel2, quantize)
pgrain = quantization_grain(pchannel, pquantize)
#If palette is read only, work on a copy:
editable = pdb.gimp_palette_is_editable(palette)
if not editable:
palette = pdb.gimp_palette_duplicate (palette)
num_colors = pdb.gimp_palette_get_info (palette)
start, nrows, length = None, None, None
if selection == SELECT_AUTOSLICE:
def find_index(color, startindex=0):
for i in range(startindex, num_colors):
c = pdb.gimp_palette_entry_get_color (palette, i)
if c == color:
return i
return None
def hexcolor(c):
return "#%02x%02x%02x" % tuple(c[:-1])
fg = pdb.gimp_context_get_foreground()
bg = pdb.gimp_context_get_background()
start = find_index(fg)
end = find_index(bg)
if start is None:
raise ValueError("Couldn't find foreground color %r in palette" % list(fg))
if end is None:
raise ValueError("Couldn't find background color %r in palette" % list(bg))
if find_index(fg, start + 1):
raise ValueError('Autoslice cannot be used when more than one'
' instance of an endpoint'
' (%s) is present' % hexcolor(fg))
if find_index(bg, end + 1):
raise ValueError('Autoslice cannot be used when more than one'
' instance of an endpoint'
' (%s) is present' % hexcolor(bg))
if start > end:
end, start = start, end
length = (end - start) + 1
try:
_, nrows, _ = parse_slice(slice_expr, length)
nrows = abs(nrows)
if length % nrows:
raise ValueError('Total length %d not evenly divisible'
' by number of rows %d' % (length, nrows))
length /= nrows
except ValueError:
# bad expression is okay here, just assume one row
nrows = 1
# remaining behaviour is implemented by SELECT_SLICE 'inheritance'.
selection= SELECT_SLICE
elif selection in (SELECT_SLICE, SELECT_PARTITIONED):
start, nrows, length = parse_slice(slice_expr, num_colors)
channels_getter_1, channel_index = channel_getters[channel1]
channels_getter_2, channel2_index = channel_getters[channel2]
def get_colors(start, end):
result = []
for i in range(start, end):
entry = (pdb.gimp_palette_entry_get_name (palette, i),
pdb.gimp_palette_entry_get_color (palette, i))
index1 = channels_getter_1(entry[1], i)[channel_index]
index2 = channels_getter_2(entry[1], i)[channel2_index]
index = ((index1 - (index1 % grain1)) * (1 if ascending1 else -1),
(index2 - (index2 % grain2)) * (1 if ascending2 else -1)
)
result.append((index, entry))
return result
if selection == SELECT_ALL:
entry_list = get_colors(0, num_colors)
entry_list.sort(key=lambda v:v[0])
for i in range(num_colors):
pdb.gimp_palette_entry_set_name (palette, i, entry_list[i][1][0])
pdb.gimp_palette_entry_set_color (palette, i, entry_list[i][1][1])
elif selection == SELECT_PARTITIONED:
if num_colors < (start + length * nrows) - 1:
raise ValueError('Not enough entries in palette to '
'sort complete rows! Got %d, expected >=%d' %
(num_colors, start + length * nrows))
pchannels_getter, pchannel_index = channel_getters[pchannel]
for row in range(nrows):
partition_spans = [1]
rowstart = start + (row * length)
old_color = pdb.gimp_palette_entry_get_color (palette,
rowstart)
old_partition = pchannels_getter(old_color, rowstart)[pchannel_index]
old_partition = old_partition - (old_partition % pgrain)
for i in range(rowstart + 1, rowstart + length):
this_color = pdb.gimp_palette_entry_get_color (palette, i)
this_partition = pchannels_getter(this_color, i)[pchannel_index]
this_partition = this_partition - (this_partition % pgrain)
if this_partition == old_partition:
partition_spans[-1] += 1
else:
partition_spans.append(1)
old_partition = this_partition
base = rowstart
for size in partition_spans:
palette_sort(palette, SELECT_SLICE, '%d:1,%d' % (base, size),
channel, quantize, ascending, 0, 1.0)
base += size
else:
stride = length
if num_colors < (start + stride * nrows) - 1:
raise ValueError('Not enough entries in palette to sort '
'complete rows! Got %d, expected >=%d' %
(num_colors, start + stride * nrows))
for row_start in range(start, start + stride * nrows, stride):
sublist = get_colors(row_start, row_start + stride)
sublist.sort(key=lambda v:v[0], reverse=not ascending)
for i, entry in zip(range(row_start, row_start + stride), sublist):
| pdb.gimp_palette_entry_set_name (palette, i, entry[1][0])
pdb.gimp_palette_entry_set_color (palette, i, entry[1][1]) | conditional_block |
|
palette-sort.py | _PARTITIONED = 3
SELECTIONS = (SELECT_ALL, SELECT_SLICE, SELECT_AUTOSLICE, SELECT_PARTITIONED)
def noop(v, i):
return v
def to_hsv(v, i):
return v.to_hsv()
def to_hsl(v, i):
return v.to_hsl()
def to_yiq(v, i):
return rgb_to_yiq(*v[:-1])
def to_index(v, i):
return (i,)
def to_random(v, i):
return (randint(0, 0x7fffffff),)
channel_getters = [ (noop, 0), (noop, 1), (noop, 2),
(to_yiq, 0),
(to_hsv, 0), (to_hsv, 1), (to_hsv, 2),
(to_hsl, 1), (to_hsl, 2),
(to_index, 0),
(to_random, 0)]
try:
from colormath.color_objects import RGBColor, LabColor, LCHabColor
AVAILABLE_CHANNELS = AVAILABLE_CHANNELS + (_("Lightness (LAB)"),
_("A-color"), _("B-color"),
_("Chroma (LCHab)"),
_("Hue (LCHab)"))
to_lab = lambda v,i: RGBColor(*v[:-1]).convert_to('LAB').get_value_tuple()
to_lchab = (lambda v,i:
RGBColor(*v[:-1]).convert_to('LCHab').get_value_tuple())
channel_getters.extend([(to_lab, 0), (to_lab, 1), (to_lab, 2),
(to_lchab, 1), (to_lchab, 2)])
except ImportError:
pass
def parse_slice(s, numcolors):
"""Parse a slice spec and return (start, nrows, length)
All items are optional. Omitting them makes the largest possible selection that
exactly fits the other items.
start:nrows,length
'' selects all items, as does ':'
':4,' makes a 4-row selection out of all colors (length auto-determined)
':4' also.
':1,4' selects the first 4 colors
':,4' selects rows of 4 colors (nrows auto-determined)
':4,4' selects 4 rows of 4 colors
'4:' selects a single row of all colors after 4, inclusive.
'4:,4' selects rows of 4 colors, starting at 4 (nrows auto-determined)
'4:4,4' selects 4 rows of 4 colors (16 colors total), beginning at index 4.
'4' is illegal (ambiguous)
In general, slices are comparable to a numpy sub-array.
'start at element START, with shape (NROWS, LENGTH)'
"""
s = s.strip()
def notunderstood():
raise ValueError('Slice %r not understood. Should be in format'
' START?:NROWS?,ROWLENGTH? eg. "0:4,16".' % s)
def _int(v):
try:
return int(v)
except ValueError:
notunderstood()
if s in ('', ':', ':,'):
return 0, 1, numcolors # entire palette, one row
if s.count(':') != 1:
notunderstood()
rowpos = s.find(':')
start = 0
if rowpos > 0:
start = _int(s[:rowpos])
numcolors -= start
nrows = 1
if ',' in s:
commapos = s.find(',')
nrows = s[rowpos+1:commapos]
length = s[commapos+1:]
if not nrows:
if not length:
notunderstood()
else:
length = _int(length)
if length == 0:
notunderstood()
nrows = numcolors // length
if numcolors % length:
nrows = -nrows
elif not length:
nrows = _int(nrows)
if nrows == 0:
notunderstood()
length = numcolors // nrows
if numcolors % nrows:
length = -length
else:
nrows = _int(nrows)
if nrows == 0:
notunderstood()
length = _int(length)
if length == 0:
notunderstood()
else:
nrows = _int(s[rowpos+1:])
if nrows == 0:
notunderstood()
length = numcolors // nrows
if numcolors % nrows:
length = -length
return start, nrows, length
def quantization_grain(channel, g):
"Given a channel and a quantization, return the size of a quantization grain"
g = max(1.0, g)
if g <= 1.0:
g = 0.00001
else:
g = max(0.00001, GRAIN_SCALE[channel] / g)
return g
def palette_sort(palette, selection, slice_expr, channel1, ascending1,
channel2, ascending2, quantize, pchannel, pquantize):
grain1 = quantization_grain(channel1, quantize)
grain2 = quantization_grain(channel2, quantize)
pgrain = quantization_grain(pchannel, pquantize)
#If palette is read only, work on a copy:
editable = pdb.gimp_palette_is_editable(palette)
if not editable:
palette = pdb.gimp_palette_duplicate (palette)
num_colors = pdb.gimp_palette_get_info (palette)
start, nrows, length = None, None, None
if selection == SELECT_AUTOSLICE:
def find_index(color, startindex=0):
for i in range(startindex, num_colors):
c = pdb.gimp_palette_entry_get_color (palette, i)
if c == color:
return i
return None
def | (c):
return "#%02x%02x%02x" % tuple(c[:-1])
fg = pdb.gimp_context_get_foreground()
bg = pdb.gimp_context_get_background()
start = find_index(fg)
end = find_index(bg)
if start is None:
raise ValueError("Couldn't find foreground color %r in palette" % list(fg))
if end is None:
raise ValueError("Couldn't find background color %r in palette" % list(bg))
if find_index(fg, start + 1):
raise ValueError('Autoslice cannot be used when more than one'
' instance of an endpoint'
' (%s) is present' % hexcolor(fg))
if find_index(bg, end + 1):
raise ValueError('Autoslice cannot be used when more than one'
' instance of an endpoint'
' (%s) is present' % hexcolor(bg))
if start > end:
end, start = start, end
length = (end - start) + 1
try:
_, nrows, _ = parse_slice(slice_expr, length)
nrows = abs(nrows)
if length % nrows:
raise ValueError('Total length %d not evenly divisible'
' by number of rows %d' % (length, nrows))
length /= nrows
except ValueError:
# bad expression is okay here, just assume one row
nrows = 1
# remaining behaviour is implemented by SELECT_SLICE 'inheritance'.
selection= SELECT_SLICE
elif selection in (SELECT_SLICE, SELECT_PARTITIONED):
start, nrows, length = parse_slice(slice_expr, num_colors)
channels_getter_1, channel_index = channel_getters[channel1]
channels_getter_2, channel2_index = channel_getters[channel2]
def get_colors(start, end):
result = []
for i in range(start, end):
entry = (pdb.gimp_palette_entry_get_name (palette, i),
pdb.gimp_palette_entry_get_color (palette, i))
index1 = channels_getter_1(entry[1], i)[channel_index]
index2 = channels_getter_2(entry[1], i)[channel2_index]
index = ((index1 - (index1 % grain1)) * (1 if ascending1 else -1),
(index2 - (index2 % grain2)) * (1 if ascending2 else -1)
)
result.append((index, entry))
return result
if selection == SELECT_ALL:
entry_list = get_colors(0, num_colors)
entry_list.sort(key=lambda v:v[0])
for i in range(num_colors):
pdb.gimp_palette_entry_set_name (palette, i, entry_list[i][1][0])
pdb.gimp_palette_entry_set_color (palette, i, entry_list[i][1][1])
elif selection == SELECT_PARTITIONED:
if num_colors < (start + length * nrows) - 1:
raise ValueError('Not enough entries in palette to '
'sort complete rows! Got %d, expected >=%d' %
(num_colors, start + length * nrows))
pchannels_getter, pchannel_index = channel_getters[pchannel]
for row in range(nrows):
partition_spans = [1]
rowstart | hexcolor | identifier_name |
extractMenu.ts | // Extract menu still frames.
/// <reference path="../../references.ts" />
'use strict';
import path = require('path');
import serverUtils = require('../../server/utils/index');
import utils = require('../../utils');
import editMetadataFile = require('../../server/utils/editMetadataFile');
export = extractMenu;
/**
* Extract menu still frames.
*
* @param {string} dvdPath
* @param {function} callback
*/
function extractMenu(dvdPath: string, callback) {
process.stdout.write('\nExtracting menu still frames:\n');
var webPath = serverUtils.getWebPath(dvdPath);
var ifoPath = getWebName('metadata');
var filesList = require(ifoPath);
var menu = [];
var pointer = 0;
next(filesList[pointer].ifo);
// There are better ways to do async...
function next(ifoFile: string) {
ifoFile = path.join(webPath, '../', ifoFile);
var json = require(ifoFile);
menu[pointer] = {};
menu[pointer].menu = {};
extractMenuData();
function extractMenuData() {
if (!json.pgci_ut || !json.pgci_ut.lu || !Array.isArray(json.pgci_ut.lu)) {
callNext();
return;
}
for (var i = 0; i < json.pgci_ut.nr_of_lus; i++) {
var lu = json.pgci_ut.lu[i];
var lang = utils.bit2str(lu.lang_code);
menu[pointer].menu[lang] = [];
for (var j = 0; j < lu.pgcit.nr_of_pgci_srp; j++) {
var pgci_srp = lu.pgcit.pgci_srp[j];
var pgcIndex = j + 1;
var vobID = null;
var cellID = null;
if (pgci_srp.pgc.cell_position && pgci_srp.pgc.cell_position.length) {
vobID = pgci_srp.pgc.cell_position[0].vob_id_nr;
cellID = pgci_srp.pgc.cell_position[0].cell_nr;
}
menu[pointer].menu[lang].push({
pgc: pgcIndex,
entry: pgci_srp.entry_id,
vobID: vobID,
cellID: cellID
});
}
}
callNext();
function callNext() {
pointer++;
if (pointer < filesList.length) {
setTimeout(function() {
next(filesList[pointer].ifo);
}, 0);
} else {
// At the end of all iterations.
// Save a metadata file containing the list of all IFO files.
editMetadataFile(getWebName('metadata'), menu, function() {
callback();
});
}
}
}
}
/**
* Return the file path for the web given a file.
* Used for naming both the IFO files and the metadata file.
*
* @param name A file name.
* @return {string}
*/
function | (name: string): string {
return path.join(webPath, getJsonFileName(name));
}
}
/**
* Transform the file name of a JSON file.
*
* @param {string} name A file name.
* @return {string}
*/
function getJsonFileName(name: string): string {
return name.replace(/\.IFO$/i, '') + '.json';
}
| getWebName | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.