file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
InfrastructureAugmenter.js | const fs = require('fs-promise');
const turf = require('turf');
const _ = require('underscore');
const complexify = require('geojson-tools').complexify;
class InfrastructureAugmenter {
constructor(callback) {
this.aggregatedData = null;
this.buildingsGeo = null;
this.landingsGeo = null;
this.landingsGeoById = null;
this.cablesGeo = null;
this.cablesGeoById = null;
this.oceanGeo = null;
this.loaded = false;
const p1 = fs.readFile('telegeography-data/aggregated-data.json', 'utf8');
p1.then(data => this.aggregatedData = JSON.parse(data));
const p2 = fs.readFile('telegeography-data/internetexchanges/buildings.geojson', 'utf8');
p2.then(data => this.buildingsGeo = JSON.parse(data));
const p3 = fs.readFile('maps/landingpoints.json', 'utf8');
p3.then(data => this.landingsGeo = JSON.parse(data));
const p4 = fs.readFile('maps/ocean.json', 'utf8');
p4.then(data => this.oceanGeo = JSON.parse(data));
const p5 = fs.readFile('maps/cable-data.json', 'utf8');
p5.then(data => this.cablesGeo = JSON.parse(data));
Promise.all([p1, p2, p3, p4, p5])
.then(() => {
this.loaded = true;
this.landingsGeoById = this._generateGeoById(this.landingsGeo, 'id');
this.cablesGeoById = this._generateGeoById(this.cablesGeo, 'cable_id');
callback(null);
}).catch(err => {
callback(err)
})
}
_generateGeoById(geoObj, propName) {
let geoById = {};
geoObj.features.forEach(feature => {
let prop = feature.properties[propName];
geoById[prop] = feature; // DANGER DANGER:
});
return geoById;
}
addInfrastructureData(hop, nextHop) {
var self = this; // gross
hop.infrastructure = {
exchanges: [],
landings: [],
cable: null
};
if (hop.geo && this.loaded && nextHop && nextHop.geo) {
let hopGeoJSON = {
type: "Feature",
properties: {},
geometry: {
type: "Point",
coordinates: [hop.geo.lon, hop.geo.lat]
}
};
// If there is an Autonymous System change
if (hop.geo.as !== nextHop.geo.as &&
hop.geo.as !== '*' && nextHop.geo.as !== '*') {
// console.log('AUTONYMOUS SYSTEM CHANGE')
// console.log(hop)
let radius = 25; // in kilometers
let nearby = [];
this.buildingsGeo.features.forEach(feature => {
let dist = turf.distance(hopGeoJSON, feature, 'kilometers');
/*if (dist <= radius)*/ nearby.push({
dist, feature, fromAsn: hop.geo.as, toAsn: nextHop.geo.as
});
});
if (nearby.length > 0) {
if (nearby.length > 1) nearby = _.sortBy(nearby, obj => obj.dist);
hop.infrastructure.exchanges.push(nearby[0]);
console.log("NEAREST EXCHANGE POINT IS " + nearby[0].dist + " MILES AWAY");
}
let asn = hop.geo.as.split(' ')[0].substring(2);
// console.log(`AS change detected for ${hop.ip}. ${hop.geo.as} -> ${nextHop.geo.as}`)
}
let nearby = [];
let points = [[hop.geo.lon, hop.geo.lat],[nextHop.geo.lon, nextHop.geo.lat]];
//console.log(`HOP: [${hop.geo.lat}, ${hop.geo.lon}] [${nextHop.geo.lat}, ${nextHop.geo.lon}]`)
if (this._crossesOcean(points)) {
let nextHopGeoJSON = {
type: "Feature",
properties: {},
geometry: {
type: "Point",
coordinates: [nextHop.geo.lon, nextHop.geo.lat]
}
};
let landingNearHop = [];
let landingNearNextHop = [];
this.landingsGeo.features.forEach((feature, i) => {
//console.log(feature);
//return;
landingNearHop.push({ dist: turf.distance(hopGeoJSON, feature, 'kilometers'),
feature: feature,
coords: feature.geometry.coordinates,
id: feature.properties.id,
cableId: feature.properties.cable_id});
landingNearNextHop.push({ dist: turf.distance(nextHopGeoJSON, feature, 'kilometers'),
feature: feature,
coords: feature.geometry.coordinates,
id: feature.properties.id,
cableId: feature.properties.cable_id});
});
landingNearHop = _.sortBy(landingNearHop, function(hop) { return hop.dist });
landingNearNextHop = _.sortBy(landingNearNextHop,function(hop) { return hop.dist });
let c = getCables()[0];
hop.infrastructure.landings.push(c.start);
hop.infrastructure.landings.push(c.end);
hop.infrastructure.cable = c.cable;
// console.log(`${c.cable.properties.name} START: ${c.distStart} END: ${c.distEnd} SUM: ${c.distSum}`);
// cables.forEach(c => {
// if (c) {
// console.log(`${c.cable.properties.name} START: ${c.distStart} END: ${c.distEnd} SUM: ${c.distSum}`);
// hop.infrastructure.landings.push(c.start);
// hop.infrastructure.landings.push(c.end);
// hop.infrastructure.cable = c.cable;
// } else {
// console.log('CABLE NOT FOUND');
// }
// });
function getCables() {
let cables = [];
// For each landing points near the hop
for (let i = 0; i < landingNearHop.length; i++) | }
}
}
return _.uniq(_.sortBy(cables, cable => cable.distSum), cable => cable.cable.properties.id);
}
function getCableIds(cables) {
let ids = [];
cables.forEach(({cable_id}) => ids.push(parseInt(cable_id)));
return ids;
}
}
}
}
_crossesOcean(points) {
let inside = false;
let numPointsMustBeInOcean = 2;
let numPointsInOcean = 0;
points = complexify(points, 150);
points.shift(); // first point is duplicated by complexify
points.shift(); // remove first point
points.pop(); // remove last point
if (points.length < numPointsMustBeInOcean) return false;
for (let i = 0; i < points.length; i++) {
//console.log(points[i]);
if (turf.inside(turf.point(points[i]), this.oceanGeo.features[0])) {
numPointsInOcean++;
if (numPointsInOcean == numPointsMustBeInOcean) {
inside = true;
break;
}
}
}
return inside;
}
}
module.exports = InfrastructureAugmenter;
| {
// get that landing point's id
let cableId = landingNearHop[i].feature.properties.cable_id;
// For each landing point that cable has
for (let k = 0; k < self.aggregatedData.cable[cableId].landing_points.length; k++) {
let landing = self.aggregatedData.cable[cableId].landing_points[k];
// For all landing points near the next hop
for (let l = 0; l < landingNearNextHop.length; l++) {
if (landingNearNextHop[l].feature.properties.id == landing.id &&
landingNearNextHop[l].feature.properties.id != landingNearHop[i].feature.properties.id) {
cables.push({
start: landingNearHop[i].feature,
end: landingNearNextHop[l].feature,
cable: self.cablesGeoById[cableId],
distSum: landingNearHop[i].dist + landingNearNextHop[l].dist,
distStart: landingNearHop[i].dist,
distEnd: landingNearNextHop[l].dist
});
} | conditional_block |
InfrastructureAugmenter.js | const fs = require('fs-promise');
const turf = require('turf');
const _ = require('underscore');
const complexify = require('geojson-tools').complexify;
class InfrastructureAugmenter {
constructor(callback) {
this.aggregatedData = null;
this.buildingsGeo = null;
this.landingsGeo = null;
this.landingsGeoById = null;
this.cablesGeo = null;
this.cablesGeoById = null;
this.oceanGeo = null;
this.loaded = false;
const p1 = fs.readFile('telegeography-data/aggregated-data.json', 'utf8');
p1.then(data => this.aggregatedData = JSON.parse(data));
const p2 = fs.readFile('telegeography-data/internetexchanges/buildings.geojson', 'utf8');
p2.then(data => this.buildingsGeo = JSON.parse(data));
const p3 = fs.readFile('maps/landingpoints.json', 'utf8');
p3.then(data => this.landingsGeo = JSON.parse(data));
const p4 = fs.readFile('maps/ocean.json', 'utf8');
p4.then(data => this.oceanGeo = JSON.parse(data));
const p5 = fs.readFile('maps/cable-data.json', 'utf8');
p5.then(data => this.cablesGeo = JSON.parse(data));
Promise.all([p1, p2, p3, p4, p5])
.then(() => {
this.loaded = true;
this.landingsGeoById = this._generateGeoById(this.landingsGeo, 'id');
this.cablesGeoById = this._generateGeoById(this.cablesGeo, 'cable_id');
callback(null);
}).catch(err => {
callback(err)
})
}
_generateGeoById(geoObj, propName) {
let geoById = {};
geoObj.features.forEach(feature => {
let prop = feature.properties[propName];
geoById[prop] = feature; // DANGER DANGER:
});
return geoById;
}
addInfrastructureData(hop, nextHop) {
var self = this; // gross
hop.infrastructure = {
exchanges: [],
landings: [],
cable: null
};
if (hop.geo && this.loaded && nextHop && nextHop.geo) {
let hopGeoJSON = {
type: "Feature",
properties: {},
geometry: {
type: "Point",
coordinates: [hop.geo.lon, hop.geo.lat]
}
};
// If there is an Autonymous System change
if (hop.geo.as !== nextHop.geo.as &&
hop.geo.as !== '*' && nextHop.geo.as !== '*') {
// console.log('AUTONYMOUS SYSTEM CHANGE')
// console.log(hop)
let radius = 25; // in kilometers
let nearby = [];
this.buildingsGeo.features.forEach(feature => {
let dist = turf.distance(hopGeoJSON, feature, 'kilometers');
/*if (dist <= radius)*/ nearby.push({
dist, feature, fromAsn: hop.geo.as, toAsn: nextHop.geo.as
});
});
if (nearby.length > 0) {
if (nearby.length > 1) nearby = _.sortBy(nearby, obj => obj.dist);
hop.infrastructure.exchanges.push(nearby[0]);
console.log("NEAREST EXCHANGE POINT IS " + nearby[0].dist + " MILES AWAY");
}
let asn = hop.geo.as.split(' ')[0].substring(2);
// console.log(`AS change detected for ${hop.ip}. ${hop.geo.as} -> ${nextHop.geo.as}`)
}
let nearby = [];
let points = [[hop.geo.lon, hop.geo.lat],[nextHop.geo.lon, nextHop.geo.lat]];
//console.log(`HOP: [${hop.geo.lat}, ${hop.geo.lon}] [${nextHop.geo.lat}, ${nextHop.geo.lon}]`)
if (this._crossesOcean(points)) {
let nextHopGeoJSON = {
type: "Feature",
properties: {},
geometry: {
type: "Point",
coordinates: [nextHop.geo.lon, nextHop.geo.lat]
}
};
let landingNearHop = [];
let landingNearNextHop = [];
this.landingsGeo.features.forEach((feature, i) => {
//console.log(feature);
//return;
landingNearHop.push({ dist: turf.distance(hopGeoJSON, feature, 'kilometers'),
feature: feature,
coords: feature.geometry.coordinates,
id: feature.properties.id,
cableId: feature.properties.cable_id});
landingNearNextHop.push({ dist: turf.distance(nextHopGeoJSON, feature, 'kilometers'),
feature: feature,
coords: feature.geometry.coordinates,
id: feature.properties.id,
cableId: feature.properties.cable_id});
});
landingNearHop = _.sortBy(landingNearHop, function(hop) { return hop.dist });
landingNearNextHop = _.sortBy(landingNearNextHop,function(hop) { return hop.dist });
let c = getCables()[0];
hop.infrastructure.landings.push(c.start);
hop.infrastructure.landings.push(c.end);
hop.infrastructure.cable = c.cable;
// console.log(`${c.cable.properties.name} START: ${c.distStart} END: ${c.distEnd} SUM: ${c.distSum}`);
// cables.forEach(c => {
// if (c) {
// console.log(`${c.cable.properties.name} START: ${c.distStart} END: ${c.distEnd} SUM: ${c.distSum}`);
// hop.infrastructure.landings.push(c.start);
// hop.infrastructure.landings.push(c.end);
// hop.infrastructure.cable = c.cable;
// } else {
// console.log('CABLE NOT FOUND');
// }
// });
function | () {
let cables = [];
// For each landing points near the hop
for (let i = 0; i < landingNearHop.length; i++) {
// get that landing point's id
let cableId = landingNearHop[i].feature.properties.cable_id;
// For each landing point that cable has
for (let k = 0; k < self.aggregatedData.cable[cableId].landing_points.length; k++) {
let landing = self.aggregatedData.cable[cableId].landing_points[k];
// For all landing points near the next hop
for (let l = 0; l < landingNearNextHop.length; l++) {
if (landingNearNextHop[l].feature.properties.id == landing.id &&
landingNearNextHop[l].feature.properties.id != landingNearHop[i].feature.properties.id) {
cables.push({
start: landingNearHop[i].feature,
end: landingNearNextHop[l].feature,
cable: self.cablesGeoById[cableId],
distSum: landingNearHop[i].dist + landingNearNextHop[l].dist,
distStart: landingNearHop[i].dist,
distEnd: landingNearNextHop[l].dist
});
}
}
}
}
return _.uniq(_.sortBy(cables, cable => cable.distSum), cable => cable.cable.properties.id);
}
function getCableIds(cables) {
let ids = [];
cables.forEach(({cable_id}) => ids.push(parseInt(cable_id)));
return ids;
}
}
}
}
_crossesOcean(points) {
let inside = false;
let numPointsMustBeInOcean = 2;
let numPointsInOcean = 0;
points = complexify(points, 150);
points.shift(); // first point is duplicated by complexify
points.shift(); // remove first point
points.pop(); // remove last point
if (points.length < numPointsMustBeInOcean) return false;
for (let i = 0; i < points.length; i++) {
//console.log(points[i]);
if (turf.inside(turf.point(points[i]), this.oceanGeo.features[0])) {
numPointsInOcean++;
if (numPointsInOcean == numPointsMustBeInOcean) {
inside = true;
break;
}
}
}
return inside;
}
}
module.exports = InfrastructureAugmenter;
| getCables | identifier_name |
rks_water_hybgga.py | #!/usr/bin/env python
#JSON {"lot": "RKS/6-31G(d)",
#JSON "scf": "EDIIS2SCFSolver",
#JSON "er": "cholesky",
#JSON "difficulty": 5,
#JSON "description": "Basic RKS DFT example with hyrbid GGA exhange-correlation functional (B3LYP)"}
import numpy as np
from horton import * # pylint: disable=wildcard-import,unused-wildcard-import
# Load the coordinates from file.
# Use the XYZ file from HORTON's test data directory.
fn_xyz = context.get_fn('test/water.xyz')
mol = IOData.from_file(fn_xyz)
# Create a Gaussian basis set
obasis = get_gobasis(mol.coordinates, mol.numbers, '6-31g(d)')
# Compute Gaussian integrals
olp = obasis.compute_overlap()
kin = obasis.compute_kinetic()
na = obasis.compute_nuclear_attraction(mol.coordinates, mol.pseudo_numbers)
er_vecs = obasis.compute_electron_repulsion_cholesky()
# Define a numerical integration grid needed the XC functionals
grid = BeckeMolGrid(mol.coordinates, mol.numbers, mol.pseudo_numbers)
# Create alpha orbitals
orb_alpha = Orbitals(obasis.nbasis)
# Initial guess
guess_core_hamiltonian(olp, kin + na, orb_alpha)
# Construct the restricted HF effective Hamiltonian
external = {'nn': compute_nucnuc(mol.coordinates, mol.pseudo_numbers)}
libxc_term = RLibXCHybridGGA('xc_b3lyp')
terms = [
RTwoIndexTerm(kin, 'kin'),
RDirectTerm(er_vecs, 'hartree'),
RGridGroup(obasis, grid, [libxc_term]),
RExchangeTerm(er_vecs, 'x_hf', libxc_term.get_exx_fraction()),
RTwoIndexTerm(na, 'ne'),
]
ham = REffHam(terms, external)
# Decide how to occupy the orbitals (5 alpha electrons)
occ_model = AufbauOccModel(5)
# Converge WFN with CDIIS+EDIIS SCF
# - Construct the initial density matrix (needed for CDIIS+EDIIS).
occ_model.assign(orb_alpha)
dm_alpha = orb_alpha.to_dm()
# - SCF solver
scf_solver = EDIIS2SCFSolver(1e-6)
scf_solver(ham, olp, occ_model, dm_alpha)
# Derive orbitals (coeffs, energies and occupations) from the Fock and density
# matrices. The energy is also computed to store it in the output file below.
fock_alpha = np.zeros(olp.shape)
ham.reset(dm_alpha)
ham.compute_energy()
ham.compute_fock(fock_alpha)
orb_alpha.from_fock_and_dm(fock_alpha, dm_alpha, olp)
# Assign results to the molecule object and write it to a file, e.g. for
# later analysis. Note that the CDIIS_EDIIS algorithm can only really construct
# an optimized density matrix and no orbitals.
mol.title = 'RKS computation on water'
mol.energy = ham.cache['energy']
mol.obasis = obasis
mol.orb_alpha = orb_alpha
mol.dm_alpha = dm_alpha
# useful for post-processing (results stored in double precision):
mol.to_file('water.h5')
# CODE BELOW IS FOR horton-regression-test.py ONLY. IT IS NOT PART OF THE EXAMPLE.
rt_results = {
'energy': ham.cache['energy'],
'orb_alpha': orb_alpha.energies,
'nn': ham.cache["energy_nn"],
'kin': ham.cache["energy_kin"],
'ne': ham.cache["energy_ne"],
'grid': ham.cache["energy_grid_group"],
'hartree': ham.cache["energy_hartree"],
'x_hf': ham.cache["energy_x_hf"],
}
# BEGIN AUTOGENERATED CODE. DO NOT CHANGE MANUALLY.
rt_previous = {
'energy': -76.406156776346975,
'orb_alpha': np.array([
-19.12494652215198, -0.99562109649344044, -0.52934359625260619,
-0.35973919172781244, -0.28895110439599314, 0.068187099284877942,
0.1532902668612677, 0.80078130036326101, 0.84958389626115138, 0.89305132504935913,
0.92182191946355896, 1.074508959522454, 1.3767806620540104, 1.7405943781554678,
1.7462666980125516, 1.7861275433424106, 2.3057917944397714, 2.5943014303914662
]),
'grid': -7.568923843396495,
'hartree': 46.893530019953076,
'kin': 76.03393036526309,
'ne': -199.129803256826,
'nn': 9.1571750364299866,
'x_hf': -1.792065097770653, | } | random_line_split |
|
init-res-into-things.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Resources can't be copied, but storing into data structures counts
// as a move unless the stored thing is used afterwards.
struct r {
i: @mut int,
}
struct Box { x: r }
#[unsafe_destructor]
impl Drop for r {
fn drop(&self) {
unsafe {
*(self.i) = *(self.i) + 1;
}
}
}
fn r(i: @mut int) -> r {
r {
i: i
}
}
fn test_box() |
fn test_rec() {
let i = @mut 0;
{
let a = Box {x: r(i)};
}
assert_eq!(*i, 1);
}
fn test_tag() {
enum t {
t0(r),
}
let i = @mut 0;
{
let a = t0(r(i));
}
assert_eq!(*i, 1);
}
fn test_tup() {
let i = @mut 0;
{
let a = (r(i), 0);
}
assert_eq!(*i, 1);
}
fn test_unique() {
let i = @mut 0;
{
let a = ~r(i);
}
assert_eq!(*i, 1);
}
fn test_box_rec() {
let i = @mut 0;
{
let a = @Box {
x: r(i)
};
}
assert_eq!(*i, 1);
}
pub fn main() {
test_box();
test_rec();
test_tag();
test_tup();
test_unique();
test_box_rec();
}
| {
let i = @mut 0;
{
let a = @r(i);
}
assert_eq!(*i, 1);
} | identifier_body |
init-res-into-things.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Resources can't be copied, but storing into data structures counts
// as a move unless the stored thing is used afterwards.
| struct Box { x: r }
#[unsafe_destructor]
impl Drop for r {
fn drop(&self) {
unsafe {
*(self.i) = *(self.i) + 1;
}
}
}
fn r(i: @mut int) -> r {
r {
i: i
}
}
fn test_box() {
let i = @mut 0;
{
let a = @r(i);
}
assert_eq!(*i, 1);
}
fn test_rec() {
let i = @mut 0;
{
let a = Box {x: r(i)};
}
assert_eq!(*i, 1);
}
fn test_tag() {
enum t {
t0(r),
}
let i = @mut 0;
{
let a = t0(r(i));
}
assert_eq!(*i, 1);
}
fn test_tup() {
let i = @mut 0;
{
let a = (r(i), 0);
}
assert_eq!(*i, 1);
}
fn test_unique() {
let i = @mut 0;
{
let a = ~r(i);
}
assert_eq!(*i, 1);
}
fn test_box_rec() {
let i = @mut 0;
{
let a = @Box {
x: r(i)
};
}
assert_eq!(*i, 1);
}
pub fn main() {
test_box();
test_rec();
test_tag();
test_tup();
test_unique();
test_box_rec();
} | struct r {
i: @mut int,
}
| random_line_split |
init-res-into-things.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Resources can't be copied, but storing into data structures counts
// as a move unless the stored thing is used afterwards.
struct r {
i: @mut int,
}
struct Box { x: r }
#[unsafe_destructor]
impl Drop for r {
fn drop(&self) {
unsafe {
*(self.i) = *(self.i) + 1;
}
}
}
fn r(i: @mut int) -> r {
r {
i: i
}
}
fn test_box() {
let i = @mut 0;
{
let a = @r(i);
}
assert_eq!(*i, 1);
}
fn test_rec() {
let i = @mut 0;
{
let a = Box {x: r(i)};
}
assert_eq!(*i, 1);
}
fn test_tag() {
enum t {
t0(r),
}
let i = @mut 0;
{
let a = t0(r(i));
}
assert_eq!(*i, 1);
}
fn | () {
let i = @mut 0;
{
let a = (r(i), 0);
}
assert_eq!(*i, 1);
}
fn test_unique() {
let i = @mut 0;
{
let a = ~r(i);
}
assert_eq!(*i, 1);
}
fn test_box_rec() {
let i = @mut 0;
{
let a = @Box {
x: r(i)
};
}
assert_eq!(*i, 1);
}
pub fn main() {
test_box();
test_rec();
test_tag();
test_tup();
test_unique();
test_box_rec();
}
| test_tup | identifier_name |
visus.js | /**
* Used by InPlaceEdit and Uneditable fields
* @module inputex-visus
*/
var lang = Y.Lang,
inputEx = Y.inputEx;
/**
* Contains the various visualization methods
* @class inputEx.visus
* @static
*/
inputEx.visus = {
/**
* Use a rendering function
* options = {visuType: 'func', func: function(data) { ...code here...} }
* @method func
*/
"func": function(options, data) {
return options.func(data);
},
/**
* Use Y.Lang.dump
* options = {visuType: 'dump'}
* @method dump
*/
dump: function(options, data) {
return Y.dump(data);
}
};
/**
* Render 'data' using a visualization function described by 'visuOptions'
* @method renderVisu
* @static
* @param {Object} visuOptions The visu parameters object with: visuType: 'myType', ...args...
* @param {Object} data The input data to send to the template
* @param {HTMLElement || String} parentEl optional Set the result as content of parentEl
* @return {HTMLElement || String} Either the inserted HTMLElement or the String set to parentEl.innerHTML
*/
inputEx.renderVisu = function(visuOptions,data, parentEl) {
var opts = visuOptions || {};
var visuType = opts.visuType || 'dump';
if( !inputEx.visus.hasOwnProperty(visuType) ) {
throw new Error("inputEx: no visu for visuType: "+visuType);
}
var f = inputEx.visus[visuType];
if( !lang.isFunction(f) ) {
throw new Error("inputEx: no visu for visuType: "+visuType);
}
var v = null;
try {
v = f(opts,data);
}
catch(ex) {
throw new Error("inputEx: error while running visu "+visuType+" : "+ex.message);
}
// Get the node
var node = null;
if(parentEl) {
if(lang.isString(parentEl)) {
node = Y.one(parentEl);
}
else {
node = parentEl;
}
}
// Insert it
if(node) {
if(Y.Lang.isObject(v) && v.tagName ) |
else {
node.innerHTML = v;
}
}
return v;
};
| {
node.innerHTML = "";
node.appendChild(v);
} | conditional_block |
visus.js | /**
* Used by InPlaceEdit and Uneditable fields
* @module inputex-visus
*/
var lang = Y.Lang,
inputEx = Y.inputEx;
/**
* Contains the various visualization methods
* @class inputEx.visus
* @static
*/ | /**
* Use a rendering function
* options = {visuType: 'func', func: function(data) { ...code here...} }
* @method func
*/
"func": function(options, data) {
return options.func(data);
},
/**
* Use Y.Lang.dump
* options = {visuType: 'dump'}
* @method dump
*/
dump: function(options, data) {
return Y.dump(data);
}
};
/**
* Render 'data' using a visualization function described by 'visuOptions'
* @method renderVisu
* @static
* @param {Object} visuOptions The visu parameters object with: visuType: 'myType', ...args...
* @param {Object} data The input data to send to the template
* @param {HTMLElement || String} parentEl optional Set the result as content of parentEl
* @return {HTMLElement || String} Either the inserted HTMLElement or the String set to parentEl.innerHTML
*/
inputEx.renderVisu = function(visuOptions,data, parentEl) {
var opts = visuOptions || {};
var visuType = opts.visuType || 'dump';
if( !inputEx.visus.hasOwnProperty(visuType) ) {
throw new Error("inputEx: no visu for visuType: "+visuType);
}
var f = inputEx.visus[visuType];
if( !lang.isFunction(f) ) {
throw new Error("inputEx: no visu for visuType: "+visuType);
}
var v = null;
try {
v = f(opts,data);
}
catch(ex) {
throw new Error("inputEx: error while running visu "+visuType+" : "+ex.message);
}
// Get the node
var node = null;
if(parentEl) {
if(lang.isString(parentEl)) {
node = Y.one(parentEl);
}
else {
node = parentEl;
}
}
// Insert it
if(node) {
if(Y.Lang.isObject(v) && v.tagName ) {
node.innerHTML = "";
node.appendChild(v);
}
else {
node.innerHTML = v;
}
}
return v;
}; | inputEx.visus = {
| random_line_split |
conf.py | # Copyright 2018 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'launch'
copyright = '2018, Open Source Robotics Foundation, Inc.' # noqa
author = 'Open Source Robotics Foundation, Inc.'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '0.4.0'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
# autodoc settings
autodoc_default_options = {
'special-members': '__init__',
'class-doc-from': 'class',
}
autodoc_class_signature = 'separated'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
|
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'launchdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'launch.tex', 'launch Documentation',
'Open Source Robotics Foundation, Inc.', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'launch', 'launch Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'launch', 'launch Documentation',
author, 'launch', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True | # Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static'] | random_line_split |
MDMC.py | .output import CovalentAutocorrelation, MeanSquareDisplacement
from ..cython_exts.LMC.PBCHelper import AtomBox
from ..LMC.jumprate_generators import JumpRate
logger = logging.getLogger(__name__)
logging.getLogger("matplotlib").setLevel(logging.WARN)
def get_git_version():
from mdlmc.version_hash import commit_hash, commit_date, commit_message
print("# Hello. I am from commit {}".format(commit_hash))
print("# Commit Date: {}".format(commit_date))
print("# Commit Message: {}".format(commit_message))
class | :
"""Implementation of the time-dependent Kinetic Monte Carlo Scheme"""
__show_in_config__ = True
__no_config_parameter__ = ["topology", "atom_box", "jumprate_function"]
def __init__(self, topology: "NeighborTopology", *,
atom_box: "AtomBox",
jumprate_function: "JumpRate",
lattice_size: int,
proton_number: int,
donor_atoms: str,
time_step: float,
extra_atoms: str = None):
"""
Parameters
----------
trajectory
lattice_size
proton_number
jumprate_function
donor_atoms:
name of donor / acceptor atoms
extra_atoms:
extra atoms used for the determination of the jump rate
"""
self.topology = topology
self._lattice = self._initialize_lattice(lattice_size, proton_number)
# Check whether the topology object has the method "take_lattice_reference
if hasattr(self.topology, "take_lattice_reference"):
logger.debug("topology has method take_lattice_reference")
self.topology.take_lattice_reference(self._lattice)
self._atom_box = atom_box
self._jumprate_function = jumprate_function
self._donor_atoms = donor_atoms
self._time_step = time_step
self._extra_atoms = extra_atoms
def _initialize_lattice(self, lattice_size, proton_number):
lattice = np.zeros(lattice_size, dtype=np.int32)
lattice[:proton_number] = range(1, proton_number + 1)
np.random.shuffle(lattice)
return lattice
def __iter__(self) -> Iterator[np.ndarray]:
yield from self.continuous_output()
def continuous_output(self):
current_frame_number = 0
topo = self.topology
lattice = self.lattice
topology_iterator, last_topo = remember_last_element(iter(self.topology))
jumprate_iterator, last_jumprates = remember_last_element(
jumprate_generator(self._jumprate_function, self.lattice, topology_iterator))
sum_of_jumprates = (np.sum(jumpr) for _, _, jumpr in jumprate_iterator)
kmc_routine = self.fastforward_to_next_jump(sum_of_jumprates,
self._time_step)
for f, df, kmc_time in kmc_routine:
current_time = kmc_time
logger.debug("Next jump at time %.2f", current_time)
logger.debug("df = %s; dt = %s", df, kmc_time)
logger.debug("Go to frame %s", f)
for frame in self.topology.get_cached_frames():
yield current_frame_number, current_time, frame
current_frame_number += 1
proton_idx = self.move_proton(*last_jumprates(), lattice)
topo.update_time_of_last_jump(proton_idx, kmc_time)
def move_proton(self, start, dest, jump_rates, lattice):
"""Given the hopping rates between the acceptor atoms, choose a connection randomly and
move the proton."""
start_occupied_destination_free = filter_allowed_transitions(start, dest, lattice)
start = start[start_occupied_destination_free]
dest = dest[start_occupied_destination_free]
jump_rates = jump_rates[start_occupied_destination_free]
cumsum = np.cumsum(jump_rates)
random_draw = np.random.uniform(0, cumsum[-1])
transition_idx = np.searchsorted(cumsum, random_draw)
start_idx = start[transition_idx]
destination_idx = dest[transition_idx]
proton_idx = self._lattice[start_idx]
logger.debug("Particle %s moves from %s to %s", proton_idx, start_idx, destination_idx)
logger.debug("lattice[%s] = %s", destination_idx, self._lattice[destination_idx])
self._lattice[destination_idx] = proton_idx
self._lattice[start_idx] = 0
return proton_idx
@staticmethod
def fastforward_to_next_jump(jumprates, dt):
"""Implements Kinetic Monte Carlo with time-dependent rates.
Parameters
----------
jumprates : generator / iterator
Unit: femtosecond^{-1}
Proton jump rate from an oxygen site to any neighbor
dt : float
Trajectory time step
Returns
-------
frame: int
Frame at which the next event occurs
delta_frame : int
Difference between frame and the index at which the next event occurs
kmc_time : float
Time of the next event
"""
sweep, kmc_time = 0, 0
current_rate = next(jumprates)
while True:
time_selector = -np.log(1 - np.random.random())
# Handle case where time selector is so small that the next frame is not reached
t_trial = time_selector / current_rate
if (kmc_time + t_trial) // dt == kmc_time // dt:
kmc_time += t_trial
delta_frame = 0
else:
delta_t, delta_frame = dt - kmc_time % dt, 1
current_probsum = current_rate * delta_t
next_rate = next(jumprates)
next_probsum = current_probsum + next_rate * dt
while next_probsum < time_selector:
delta_frame += 1
current_probsum = next_probsum
next_rate = next(jumprates)
next_probsum = current_probsum + next_rate * dt
rest = time_selector - current_probsum
delta_t += (delta_frame - 1) * dt + rest / next_rate
kmc_time += delta_t
sweep += delta_frame
yield sweep, delta_frame, kmc_time
def xyz_output(self, particle_type: str = "H"):
for f, t, frame in self:
particle_positions = frame[self.donor_atoms][self.occupied_sites]
particle_positions.atom_names = particle_type
yield frame.append(particle_positions)
def observables_output(self, reset_frequency: int, print_frequency: int):
"""
Parameters
----------
reset_frequency: int
print_frequency: int
Returns
-------
"""
kmc_iterator = iter(self)
donor_sites = self.donor_atoms
current_frame_number, current_time, frame = next(kmc_iterator)
autocorr = CovalentAutocorrelation(self.lattice)
msd = MeanSquareDisplacement(frame[donor_sites].atom_positions, self.lattice, self._atom_box)
for current_frame_number, current_time, frame in kmc_iterator:
if current_frame_number % reset_frequency == 0:
autocorr.reset(self.lattice)
msd.reset_displacement()
msd.update_displacement(frame[donor_sites].atom_positions, self.lattice)
if current_frame_number % print_frequency == 0:
auto = autocorr.calculate(self.lattice)
msd_result = msd.msd()
yield current_frame_number, current_time, msd_result, auto
@property
def lattice(self):
return self._lattice
@property
def donor_atoms(self):
# TODO: not needed (?)
return self._donor_atoms
@property
def extra_atoms(self):
return self._extra_atoms
@property
def occupied_sites(self):
return np.where(self._lattice > 0)[0]
def jumprate_generator(jumprate_function, lattice, topology_iterator):
for start, destination, *colvars in topology_iterator:
omega = jumprate_function(*colvars)
# select only jumprates from donors which are occupied
start_occupied_destination_free = filter_allowed_transitions(start, destination, lattice)
omega_allowed = omega[start_occupied_destination_free]
start_allowed = start[start_occupied_destination_free]
destination_allowed = destination[start_occupied_destination_free]
yield start_allowed, destination_allowed, omega_allowed
def filter_allowed_transitions(start, destination, lattice):
lattice_is_occupied = lattice > 0
occupied_sites, = np.where(lattice_is_occupied)
unoccupied_sites, = np.where(~lattice_is_occupied)
occupied_mask = np.in1d(start, occupied_sites)
unoccupied_mask = np.in1d(destination, unoccupied_sites)
start_occupied_destination_free = occupied_mask & unoccupied_mask
return start_occupied_destination_free
class Output(metaclass=ABCMeta):
__show_in_config__ = True
__no_config_parameter__ = ["kmc"]
class XYZOutput(Output):
def __init__(self,
kmc: KMCLattice,
particle_type: str) -> None:
self.kmc = k | KMCLattice | identifier_name |
MDMC.py | .output import CovalentAutocorrelation, MeanSquareDisplacement
from ..cython_exts.LMC.PBCHelper import AtomBox
from ..LMC.jumprate_generators import JumpRate
logger = logging.getLogger(__name__)
logging.getLogger("matplotlib").setLevel(logging.WARN)
def get_git_version():
from mdlmc.version_hash import commit_hash, commit_date, commit_message
print("# Hello. I am from commit {}".format(commit_hash))
print("# Commit Date: {}".format(commit_date))
print("# Commit Message: {}".format(commit_message))
class KMCLattice:
"""Implementation of the time-dependent Kinetic Monte Carlo Scheme"""
__show_in_config__ = True
__no_config_parameter__ = ["topology", "atom_box", "jumprate_function"]
def __init__(self, topology: "NeighborTopology", *,
atom_box: "AtomBox",
jumprate_function: "JumpRate",
lattice_size: int,
proton_number: int,
donor_atoms: str,
time_step: float,
extra_atoms: str = None):
"""
Parameters
----------
trajectory
lattice_size
proton_number
jumprate_function
donor_atoms:
name of donor / acceptor atoms
extra_atoms:
extra atoms used for the determination of the jump rate
"""
self.topology = topology
self._lattice = self._initialize_lattice(lattice_size, proton_number)
# Check whether the topology object has the method "take_lattice_reference
if hasattr(self.topology, "take_lattice_reference"):
logger.debug("topology has method take_lattice_reference")
self.topology.take_lattice_reference(self._lattice)
self._atom_box = atom_box
self._jumprate_function = jumprate_function
self._donor_atoms = donor_atoms
self._time_step = time_step
self._extra_atoms = extra_atoms
def _initialize_lattice(self, lattice_size, proton_number):
lattice = np.zeros(lattice_size, dtype=np.int32)
lattice[:proton_number] = range(1, proton_number + 1)
np.random.shuffle(lattice)
return lattice
def __iter__(self) -> Iterator[np.ndarray]:
yield from self.continuous_output()
def continuous_output(self):
current_frame_number = 0
topo = self.topology
lattice = self.lattice
topology_iterator, last_topo = remember_last_element(iter(self.topology))
jumprate_iterator, last_jumprates = remember_last_element(
jumprate_generator(self._jumprate_function, self.lattice, topology_iterator))
sum_of_jumprates = (np.sum(jumpr) for _, _, jumpr in jumprate_iterator)
kmc_routine = self.fastforward_to_next_jump(sum_of_jumprates,
self._time_step)
for f, df, kmc_time in kmc_routine:
current_time = kmc_time
logger.debug("Next jump at time %.2f", current_time)
logger.debug("df = %s; dt = %s", df, kmc_time)
logger.debug("Go to frame %s", f)
for frame in self.topology.get_cached_frames():
yield current_frame_number, current_time, frame
current_frame_number += 1
proton_idx = self.move_proton(*last_jumprates(), lattice)
topo.update_time_of_last_jump(proton_idx, kmc_time)
def move_proton(self, start, dest, jump_rates, lattice):
"""Given the hopping rates between the acceptor atoms, choose a connection randomly and
move the proton."""
start_occupied_destination_free = filter_allowed_transitions(start, dest, lattice)
start = start[start_occupied_destination_free]
dest = dest[start_occupied_destination_free]
jump_rates = jump_rates[start_occupied_destination_free]
cumsum = np.cumsum(jump_rates)
random_draw = np.random.uniform(0, cumsum[-1])
transition_idx = np.searchsorted(cumsum, random_draw)
start_idx = start[transition_idx]
destination_idx = dest[transition_idx]
proton_idx = self._lattice[start_idx]
logger.debug("Particle %s moves from %s to %s", proton_idx, start_idx, destination_idx)
logger.debug("lattice[%s] = %s", destination_idx, self._lattice[destination_idx])
self._lattice[destination_idx] = proton_idx
self._lattice[start_idx] = 0
return proton_idx
@staticmethod
def fastforward_to_next_jump(jumprates, dt):
"""Implements Kinetic Monte Carlo with time-dependent rates.
Parameters
----------
jumprates : generator / iterator
Unit: femtosecond^{-1}
Proton jump rate from an oxygen site to any neighbor
dt : float
Trajectory time step
Returns
-------
frame: int
Frame at which the next event occurs
delta_frame : int
Difference between frame and the index at which the next event occurs
kmc_time : float
Time of the next event
"""
sweep, kmc_time = 0, 0
current_rate = next(jumprates)
while True:
time_selector = -np.log(1 - np.random.random())
# Handle case where time selector is so small that the next frame is not reached
t_trial = time_selector / current_rate
if (kmc_time + t_trial) // dt == kmc_time // dt:
kmc_time += t_trial
delta_frame = 0
else:
delta_t, delta_frame = dt - kmc_time % dt, 1
current_probsum = current_rate * delta_t
next_rate = next(jumprates)
next_probsum = current_probsum + next_rate * dt
while next_probsum < time_selector:
delta_frame += 1
current_probsum = next_probsum
next_rate = next(jumprates)
next_probsum = current_probsum + next_rate * dt
rest = time_selector - current_probsum
delta_t += (delta_frame - 1) * dt + rest / next_rate
kmc_time += delta_t
sweep += delta_frame
yield sweep, delta_frame, kmc_time
def xyz_output(self, particle_type: str = "H"):
for f, t, frame in self:
particle_positions = frame[self.donor_atoms][self.occupied_sites]
particle_positions.atom_names = particle_type
yield frame.append(particle_positions)
def observables_output(self, reset_frequency: int, print_frequency: int):
"""
Parameters
----------
reset_frequency: int
print_frequency: int
Returns
-------
"""
kmc_iterator = iter(self)
donor_sites = self.donor_atoms
current_frame_number, current_time, frame = next(kmc_iterator)
autocorr = CovalentAutocorrelation(self.lattice)
msd = MeanSquareDisplacement(frame[donor_sites].atom_positions, self.lattice, self._atom_box)
for current_frame_number, current_time, frame in kmc_iterator:
if current_frame_number % reset_frequency == 0:
autocorr.reset(self.lattice)
msd.reset_displacement()
msd.update_displacement(frame[donor_sites].atom_positions, self.lattice)
if current_frame_number % print_frequency == 0:
auto = autocorr.calculate(self.lattice)
msd_result = msd.msd()
yield current_frame_number, current_time, msd_result, auto
@property
def lattice(self):
return self._lattice
@property
def donor_atoms(self):
# TODO: not needed (?)
return self._donor_atoms
@property
def extra_atoms(self):
return self._extra_atoms
@property
def occupied_sites(self):
return np.where(self._lattice > 0)[0]
def jumprate_generator(jumprate_function, lattice, topology_iterator):
for start, destination, *colvars in topology_iterator:
|
def filter_allowed_transitions(start, destination, lattice):
lattice_is_occupied = lattice > 0
occupied_sites, = np.where(lattice_is_occupied)
unoccupied_sites, = np.where(~lattice_is_occupied)
occupied_mask = np.in1d(start, occupied_sites)
unoccupied_mask = np.in1d(destination, unoccupied_sites)
start_occupied_destination_free = occupied_mask & unoccupied_mask
return start_occupied_destination_free
class Output(metaclass=ABCMeta):
__show_in_config__ = True
__no_config_parameter__ = ["kmc"]
class XYZOutput(Output):
def __init__(self,
kmc: KMCLattice,
particle_type: str) -> None:
self.kmc = kmc | omega = jumprate_function(*colvars)
# select only jumprates from donors which are occupied
start_occupied_destination_free = filter_allowed_transitions(start, destination, lattice)
omega_allowed = omega[start_occupied_destination_free]
start_allowed = start[start_occupied_destination_free]
destination_allowed = destination[start_occupied_destination_free]
yield start_allowed, destination_allowed, omega_allowed | conditional_block |
MDMC.py | .output import CovalentAutocorrelation, MeanSquareDisplacement
from ..cython_exts.LMC.PBCHelper import AtomBox
from ..LMC.jumprate_generators import JumpRate
logger = logging.getLogger(__name__)
logging.getLogger("matplotlib").setLevel(logging.WARN)
def get_git_version():
from mdlmc.version_hash import commit_hash, commit_date, commit_message
print("# Hello. I am from commit {}".format(commit_hash))
print("# Commit Date: {}".format(commit_date))
print("# Commit Message: {}".format(commit_message))
class KMCLattice:
"""Implementation of the time-dependent Kinetic Monte Carlo Scheme"""
__show_in_config__ = True
__no_config_parameter__ = ["topology", "atom_box", "jumprate_function"]
def __init__(self, topology: "NeighborTopology", *,
atom_box: "AtomBox",
jumprate_function: "JumpRate",
lattice_size: int,
proton_number: int,
donor_atoms: str,
time_step: float,
extra_atoms: str = None):
"""
Parameters
----------
trajectory
lattice_size
proton_number
jumprate_function
donor_atoms:
name of donor / acceptor atoms
extra_atoms:
extra atoms used for the determination of the jump rate
"""
self.topology = topology
self._lattice = self._initialize_lattice(lattice_size, proton_number)
# Check whether the topology object has the method "take_lattice_reference
if hasattr(self.topology, "take_lattice_reference"):
logger.debug("topology has method take_lattice_reference")
self.topology.take_lattice_reference(self._lattice)
self._atom_box = atom_box
self._jumprate_function = jumprate_function
self._donor_atoms = donor_atoms
self._time_step = time_step
self._extra_atoms = extra_atoms
def _initialize_lattice(self, lattice_size, proton_number):
lattice = np.zeros(lattice_size, dtype=np.int32)
lattice[:proton_number] = range(1, proton_number + 1)
np.random.shuffle(lattice)
return lattice
def __iter__(self) -> Iterator[np.ndarray]:
yield from self.continuous_output()
def continuous_output(self):
current_frame_number = 0
topo = self.topology
lattice = self.lattice
topology_iterator, last_topo = remember_last_element(iter(self.topology))
jumprate_iterator, last_jumprates = remember_last_element(
jumprate_generator(self._jumprate_function, self.lattice, topology_iterator))
sum_of_jumprates = (np.sum(jumpr) for _, _, jumpr in jumprate_iterator)
kmc_routine = self.fastforward_to_next_jump(sum_of_jumprates,
self._time_step)
for f, df, kmc_time in kmc_routine:
current_time = kmc_time
logger.debug("Next jump at time %.2f", current_time)
logger.debug("df = %s; dt = %s", df, kmc_time)
logger.debug("Go to frame %s", f)
for frame in self.topology.get_cached_frames():
yield current_frame_number, current_time, frame
current_frame_number += 1
proton_idx = self.move_proton(*last_jumprates(), lattice)
topo.update_time_of_last_jump(proton_idx, kmc_time)
def move_proton(self, start, dest, jump_rates, lattice):
"""Given the hopping rates between the acceptor atoms, choose a connection randomly and
move the proton."""
start_occupied_destination_free = filter_allowed_transitions(start, dest, lattice)
start = start[start_occupied_destination_free]
dest = dest[start_occupied_destination_free]
jump_rates = jump_rates[start_occupied_destination_free]
cumsum = np.cumsum(jump_rates)
random_draw = np.random.uniform(0, cumsum[-1])
transition_idx = np.searchsorted(cumsum, random_draw)
start_idx = start[transition_idx]
destination_idx = dest[transition_idx]
proton_idx = self._lattice[start_idx]
logger.debug("Particle %s moves from %s to %s", proton_idx, start_idx, destination_idx)
logger.debug("lattice[%s] = %s", destination_idx, self._lattice[destination_idx])
self._lattice[destination_idx] = proton_idx
self._lattice[start_idx] = 0
return proton_idx
@staticmethod
def fastforward_to_next_jump(jumprates, dt):
"""Implements Kinetic Monte Carlo with time-dependent rates.
Parameters
----------
jumprates : generator / iterator
Unit: femtosecond^{-1}
Proton jump rate from an oxygen site to any neighbor
dt : float
Trajectory time step
Returns
-------
frame: int
Frame at which the next event occurs
delta_frame : int
Difference between frame and the index at which the next event occurs
kmc_time : float
Time of the next event
"""
sweep, kmc_time = 0, 0
current_rate = next(jumprates)
while True:
time_selector = -np.log(1 - np.random.random())
# Handle case where time selector is so small that the next frame is not reached
t_trial = time_selector / current_rate
if (kmc_time + t_trial) // dt == kmc_time // dt:
kmc_time += t_trial
delta_frame = 0
else:
delta_t, delta_frame = dt - kmc_time % dt, 1
current_probsum = current_rate * delta_t
next_rate = next(jumprates)
next_probsum = current_probsum + next_rate * dt
while next_probsum < time_selector:
delta_frame += 1
current_probsum = next_probsum
next_rate = next(jumprates)
next_probsum = current_probsum + next_rate * dt
rest = time_selector - current_probsum
delta_t += (delta_frame - 1) * dt + rest / next_rate
kmc_time += delta_t
sweep += delta_frame
yield sweep, delta_frame, kmc_time
def xyz_output(self, particle_type: str = "H"):
for f, t, frame in self:
particle_positions = frame[self.donor_atoms][self.occupied_sites]
particle_positions.atom_names = particle_type
yield frame.append(particle_positions)
def observables_output(self, reset_frequency: int, print_frequency: int):
"""
Parameters
----------
reset_frequency: int
print_frequency: int
Returns
-------
"""
kmc_iterator = iter(self)
donor_sites = self.donor_atoms
current_frame_number, current_time, frame = next(kmc_iterator)
autocorr = CovalentAutocorrelation(self.lattice)
msd = MeanSquareDisplacement(frame[donor_sites].atom_positions, self.lattice, self._atom_box)
for current_frame_number, current_time, frame in kmc_iterator:
if current_frame_number % reset_frequency == 0:
autocorr.reset(self.lattice)
msd.reset_displacement()
msd.update_displacement(frame[donor_sites].atom_positions, self.lattice)
if current_frame_number % print_frequency == 0:
auto = autocorr.calculate(self.lattice)
msd_result = msd.msd()
yield current_frame_number, current_time, msd_result, auto
@property
def lattice(self):
|
@property
def donor_atoms(self):
# TODO: not needed (?)
return self._donor_atoms
@property
def extra_atoms(self):
return self._extra_atoms
@property
def occupied_sites(self):
return np.where(self._lattice > 0)[0]
def jumprate_generator(jumprate_function, lattice, topology_iterator):
for start, destination, *colvars in topology_iterator:
omega = jumprate_function(*colvars)
# select only jumprates from donors which are occupied
start_occupied_destination_free = filter_allowed_transitions(start, destination, lattice)
omega_allowed = omega[start_occupied_destination_free]
start_allowed = start[start_occupied_destination_free]
destination_allowed = destination[start_occupied_destination_free]
yield start_allowed, destination_allowed, omega_allowed
def filter_allowed_transitions(start, destination, lattice):
lattice_is_occupied = lattice > 0
occupied_sites, = np.where(lattice_is_occupied)
unoccupied_sites, = np.where(~lattice_is_occupied)
occupied_mask = np.in1d(start, occupied_sites)
unoccupied_mask = np.in1d(destination, unoccupied_sites)
start_occupied_destination_free = occupied_mask & unoccupied_mask
return start_occupied_destination_free
class Output(metaclass=ABCMeta):
__show_in_config__ = True
__no_config_parameter__ = ["kmc"]
class XYZOutput(Output):
def __init__(self,
kmc: KMCLattice,
particle_type: str) -> None:
self.kmc = kmc | return self._lattice | identifier_body |
MDMC.py | .output import CovalentAutocorrelation, MeanSquareDisplacement
from ..cython_exts.LMC.PBCHelper import AtomBox
from ..LMC.jumprate_generators import JumpRate
logger = logging.getLogger(__name__)
logging.getLogger("matplotlib").setLevel(logging.WARN)
def get_git_version():
from mdlmc.version_hash import commit_hash, commit_date, commit_message
print("# Hello. I am from commit {}".format(commit_hash))
print("# Commit Date: {}".format(commit_date))
print("# Commit Message: {}".format(commit_message))
class KMCLattice:
"""Implementation of the time-dependent Kinetic Monte Carlo Scheme"""
__show_in_config__ = True
__no_config_parameter__ = ["topology", "atom_box", "jumprate_function"]
def __init__(self, topology: "NeighborTopology", *,
atom_box: "AtomBox",
jumprate_function: "JumpRate",
lattice_size: int,
proton_number: int,
donor_atoms: str,
time_step: float,
extra_atoms: str = None):
"""
Parameters
----------
trajectory
lattice_size
proton_number
jumprate_function
donor_atoms:
name of donor / acceptor atoms
extra_atoms:
extra atoms used for the determination of the jump rate
"""
self.topology = topology
self._lattice = self._initialize_lattice(lattice_size, proton_number)
# Check whether the topology object has the method "take_lattice_reference
if hasattr(self.topology, "take_lattice_reference"):
logger.debug("topology has method take_lattice_reference")
self.topology.take_lattice_reference(self._lattice)
self._atom_box = atom_box
self._jumprate_function = jumprate_function
self._donor_atoms = donor_atoms
self._time_step = time_step
self._extra_atoms = extra_atoms
def _initialize_lattice(self, lattice_size, proton_number):
lattice = np.zeros(lattice_size, dtype=np.int32)
lattice[:proton_number] = range(1, proton_number + 1)
np.random.shuffle(lattice)
return lattice
def __iter__(self) -> Iterator[np.ndarray]:
yield from self.continuous_output()
def continuous_output(self):
current_frame_number = 0
topo = self.topology
lattice = self.lattice
topology_iterator, last_topo = remember_last_element(iter(self.topology))
jumprate_iterator, last_jumprates = remember_last_element(
jumprate_generator(self._jumprate_function, self.lattice, topology_iterator))
sum_of_jumprates = (np.sum(jumpr) for _, _, jumpr in jumprate_iterator)
kmc_routine = self.fastforward_to_next_jump(sum_of_jumprates,
self._time_step)
for f, df, kmc_time in kmc_routine:
current_time = kmc_time
logger.debug("Next jump at time %.2f", current_time)
logger.debug("df = %s; dt = %s", df, kmc_time)
logger.debug("Go to frame %s", f)
for frame in self.topology.get_cached_frames():
yield current_frame_number, current_time, frame
current_frame_number += 1
proton_idx = self.move_proton(*last_jumprates(), lattice)
topo.update_time_of_last_jump(proton_idx, kmc_time)
def move_proton(self, start, dest, jump_rates, lattice):
"""Given the hopping rates between the acceptor atoms, choose a connection randomly and
move the proton."""
start_occupied_destination_free = filter_allowed_transitions(start, dest, lattice)
start = start[start_occupied_destination_free]
dest = dest[start_occupied_destination_free]
jump_rates = jump_rates[start_occupied_destination_free]
cumsum = np.cumsum(jump_rates)
random_draw = np.random.uniform(0, cumsum[-1])
transition_idx = np.searchsorted(cumsum, random_draw)
start_idx = start[transition_idx]
destination_idx = dest[transition_idx]
proton_idx = self._lattice[start_idx]
logger.debug("Particle %s moves from %s to %s", proton_idx, start_idx, destination_idx)
logger.debug("lattice[%s] = %s", destination_idx, self._lattice[destination_idx])
self._lattice[destination_idx] = proton_idx
self._lattice[start_idx] = 0
return proton_idx
@staticmethod
def fastforward_to_next_jump(jumprates, dt):
"""Implements Kinetic Monte Carlo with time-dependent rates.
Parameters
----------
jumprates : generator / iterator
Unit: femtosecond^{-1}
Proton jump rate from an oxygen site to any neighbor
dt : float
Trajectory time step
Returns
-------
frame: int
Frame at which the next event occurs
delta_frame : int
Difference between frame and the index at which the next event occurs
kmc_time : float
Time of the next event
"""
sweep, kmc_time = 0, 0
current_rate = next(jumprates)
while True:
time_selector = -np.log(1 - np.random.random())
# Handle case where time selector is so small that the next frame is not reached
t_trial = time_selector / current_rate
if (kmc_time + t_trial) // dt == kmc_time // dt:
kmc_time += t_trial
delta_frame = 0
else:
delta_t, delta_frame = dt - kmc_time % dt, 1
current_probsum = current_rate * delta_t
next_rate = next(jumprates)
next_probsum = current_probsum + next_rate * dt
while next_probsum < time_selector:
delta_frame += 1
current_probsum = next_probsum
next_rate = next(jumprates)
next_probsum = current_probsum + next_rate * dt
rest = time_selector - current_probsum
delta_t += (delta_frame - 1) * dt + rest / next_rate
kmc_time += delta_t
sweep += delta_frame
yield sweep, delta_frame, kmc_time
def xyz_output(self, particle_type: str = "H"):
for f, t, frame in self:
particle_positions = frame[self.donor_atoms][self.occupied_sites]
particle_positions.atom_names = particle_type
yield frame.append(particle_positions)
def observables_output(self, reset_frequency: int, print_frequency: int):
"""
Parameters
----------
reset_frequency: int
print_frequency: int
Returns
-------
"""
kmc_iterator = iter(self)
donor_sites = self.donor_atoms
current_frame_number, current_time, frame = next(kmc_iterator)
autocorr = CovalentAutocorrelation(self.lattice)
msd = MeanSquareDisplacement(frame[donor_sites].atom_positions, self.lattice, self._atom_box)
for current_frame_number, current_time, frame in kmc_iterator: | autocorr.reset(self.lattice)
msd.reset_displacement()
msd.update_displacement(frame[donor_sites].atom_positions, self.lattice)
if current_frame_number % print_frequency == 0:
auto = autocorr.calculate(self.lattice)
msd_result = msd.msd()
yield current_frame_number, current_time, msd_result, auto
@property
def lattice(self):
return self._lattice
@property
def donor_atoms(self):
# TODO: not needed (?)
return self._donor_atoms
@property
def extra_atoms(self):
return self._extra_atoms
@property
def occupied_sites(self):
return np.where(self._lattice > 0)[0]
def jumprate_generator(jumprate_function, lattice, topology_iterator):
for start, destination, *colvars in topology_iterator:
omega = jumprate_function(*colvars)
# select only jumprates from donors which are occupied
start_occupied_destination_free = filter_allowed_transitions(start, destination, lattice)
omega_allowed = omega[start_occupied_destination_free]
start_allowed = start[start_occupied_destination_free]
destination_allowed = destination[start_occupied_destination_free]
yield start_allowed, destination_allowed, omega_allowed
def filter_allowed_transitions(start, destination, lattice):
lattice_is_occupied = lattice > 0
occupied_sites, = np.where(lattice_is_occupied)
unoccupied_sites, = np.where(~lattice_is_occupied)
occupied_mask = np.in1d(start, occupied_sites)
unoccupied_mask = np.in1d(destination, unoccupied_sites)
start_occupied_destination_free = occupied_mask & unoccupied_mask
return start_occupied_destination_free
class Output(metaclass=ABCMeta):
__show_in_config__ = True
__no_config_parameter__ = ["kmc"]
class XYZOutput(Output):
def __init__(self,
kmc: KMCLattice,
particle_type: str) -> None:
self.kmc = kmc | if current_frame_number % reset_frequency == 0: | random_line_split |
test_del_group.py | # -*- coding: utf-8 -*-
from random import randrange
from model.group import Group
import random
import pytest
def test_delete_some_group(app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(name = "test"))
with pytest.allure.step("Given a group list"):
old_groups = db.get_group_list()
with pytest.allure.step("When get random group"):
group = random.choice(old_groups)
with pytest.allure.step("When I delete %s" %group):
app.group.delete_group_by_id(group.id)
with pytest.allure.step("Then the new group list is equal to the old list with the deleted group"):
new_groups = db.get_group_list()
assert len(old_groups) - 1 == len(new_groups)
old_groups.remove(group)
assert old_groups == new_groups
if check_ui:
| assert sorted(new_groups, key=Group.id_or_max) == sorted(app.group.get_group_list(), key=Group.id_or_max) | conditional_block |
|
test_del_group.py | # -*- coding: utf-8 -*-
from random import randrange
from model.group import Group
import random
import pytest
def | (app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(name = "test"))
with pytest.allure.step("Given a group list"):
old_groups = db.get_group_list()
with pytest.allure.step("When get random group"):
group = random.choice(old_groups)
with pytest.allure.step("When I delete %s" %group):
app.group.delete_group_by_id(group.id)
with pytest.allure.step("Then the new group list is equal to the old list with the deleted group"):
new_groups = db.get_group_list()
assert len(old_groups) - 1 == len(new_groups)
old_groups.remove(group)
assert old_groups == new_groups
if check_ui:
assert sorted(new_groups, key=Group.id_or_max) == sorted(app.group.get_group_list(), key=Group.id_or_max) | test_delete_some_group | identifier_name |
test_del_group.py | # -*- coding: utf-8 -*-
from random import randrange
from model.group import Group
import random
import pytest
def test_delete_some_group(app, db, check_ui):
| if len(db.get_group_list()) == 0:
app.group.create(Group(name = "test"))
with pytest.allure.step("Given a group list"):
old_groups = db.get_group_list()
with pytest.allure.step("When get random group"):
group = random.choice(old_groups)
with pytest.allure.step("When I delete %s" %group):
app.group.delete_group_by_id(group.id)
with pytest.allure.step("Then the new group list is equal to the old list with the deleted group"):
new_groups = db.get_group_list()
assert len(old_groups) - 1 == len(new_groups)
old_groups.remove(group)
assert old_groups == new_groups
if check_ui:
assert sorted(new_groups, key=Group.id_or_max) == sorted(app.group.get_group_list(), key=Group.id_or_max) | identifier_body |
|
test_del_group.py | # -*- coding: utf-8 -*-
from random import randrange
from model.group import Group
import random
import pytest
def test_delete_some_group(app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(name = "test"))
with pytest.allure.step("Given a group list"):
old_groups = db.get_group_list()
with pytest.allure.step("When get random group"):
group = random.choice(old_groups)
with pytest.allure.step("When I delete %s" %group):
app.group.delete_group_by_id(group.id)
with pytest.allure.step("Then the new group list is equal to the old list with the deleted group"):
new_groups = db.get_group_list()
assert len(old_groups) - 1 == len(new_groups) | if check_ui:
assert sorted(new_groups, key=Group.id_or_max) == sorted(app.group.get_group_list(), key=Group.id_or_max) | old_groups.remove(group)
assert old_groups == new_groups | random_line_split |
keyframes.rs | pub struct KeyframePercentage(pub f32);
impl ::std::cmp::Ord for KeyframePercentage {
#[inline]
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
// We know we have a number from 0 to 1, so unwrap() here is safe.
self.0.partial_cmp(&other.0).unwrap()
}
}
impl ::std::cmp::Eq for KeyframePercentage { }
impl KeyframePercentage {
#[inline]
pub fn new(value: f32) -> KeyframePercentage {
debug_assert!(value >= 0. && value <= 1.);
KeyframePercentage(value)
}
fn parse(input: &mut Parser) -> Result<KeyframePercentage, ()> {
let percentage = if input.try(|input| input.expect_ident_matching("from")).is_ok() {
KeyframePercentage::new(0.)
} else if input.try(|input| input.expect_ident_matching("to")).is_ok() {
KeyframePercentage::new(1.)
} else {
let percentage = try!(input.expect_percentage());
if percentage > 1. || percentage < 0. {
return Err(());
}
KeyframePercentage::new(percentage)
};
Ok(percentage)
}
}
/// A keyframes selector is a list of percentages or from/to symbols, which are
/// converted at parse time to percentages.
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframeSelector(Vec<KeyframePercentage>);
impl KeyframeSelector {
#[inline]
pub fn percentages(&self) -> &[KeyframePercentage] {
&self.0
}
/// A dummy public function so we can write a unit test for this.
pub fn new_for_unit_testing(percentages: Vec<KeyframePercentage>) -> KeyframeSelector {
KeyframeSelector(percentages)
}
pub fn parse(input: &mut Parser) -> Result<Self, ()> {
input.parse_comma_separated(KeyframePercentage::parse)
.map(KeyframeSelector)
}
}
/// A keyframe.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct | {
pub selector: KeyframeSelector,
/// `!important` is not allowed in keyframe declarations,
/// so the second value of these tuples is always `Importance::Normal`.
/// But including them enables `compute_style_for_animation_step` to create a `ApplicableDeclarationBlock`
/// by cloning an `Arc<_>` (incrementing a reference count) rather than re-creating a `Vec<_>`.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
pub block: Arc<RwLock<PropertyDeclarationBlock>>,
}
impl ToCss for Keyframe {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
let mut iter = self.selector.percentages().iter();
try!(write!(dest, "{}%", iter.next().unwrap().0));
for percentage in iter {
try!(write!(dest, ", "));
try!(write!(dest, "{}%", percentage.0));
}
try!(dest.write_str(" { "));
try!(self.block.read().to_css(dest));
try!(dest.write_str(" }"));
Ok(())
}
}
impl Keyframe {
pub fn parse(css: &str, origin: Origin,
base_url: ServoUrl,
extra_data: ParserContextExtraData) -> Result<Arc<RwLock<Self>>, ()> {
let error_reporter = Box::new(MemoryHoleReporter);
let context = ParserContext::new_with_extra_data(origin, &base_url,
error_reporter,
extra_data);
let mut input = Parser::new(css);
let mut rule_parser = KeyframeListParser {
context: &context,
};
parse_one_rule(&mut input, &mut rule_parser)
}
}
/// A keyframes step value. This can be a synthetised keyframes animation, that
/// is, one autogenerated from the current computed values, or a list of
/// declarations to apply.
// TODO: Find a better name for this?
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum KeyframesStepValue {
/// See `Keyframe::declarations`’s docs about the presence of `Importance`.
Declarations {
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
block: Arc<RwLock<PropertyDeclarationBlock>>
},
ComputedValues,
}
/// A single step from a keyframe animation.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesStep {
/// The percentage of the animation duration when this step starts.
pub start_percentage: KeyframePercentage,
/// Declarations that will determine the final style during the step, or
/// `ComputedValues` if this is an autogenerated step.
pub value: KeyframesStepValue,
/// Wether a animation-timing-function declaration exists in the list of
/// declarations.
///
/// This is used to know when to override the keyframe animation style.
pub declared_timing_function: bool,
}
impl KeyframesStep {
#[allow(unsafe_code)]
#[inline]
fn new(percentage: KeyframePercentage,
value: KeyframesStepValue) -> Self {
let declared_timing_function = match value {
KeyframesStepValue::Declarations { ref block } => {
block.read().declarations.iter().any(|&(ref prop_decl, _)| {
match *prop_decl {
PropertyDeclaration::AnimationTimingFunction(..) => true,
_ => false,
}
})
}
_ => false,
};
KeyframesStep {
start_percentage: percentage,
value: value,
declared_timing_function: declared_timing_function,
}
}
}
/// This structure represents a list of animation steps computed from the list
/// of keyframes, in order.
///
/// It only takes into account animable properties.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesAnimation {
pub steps: Vec<KeyframesStep>,
/// The properties that change in this animation.
pub properties_changed: Vec<TransitionProperty>,
}
/// Get all the animated properties in a keyframes animation. Note that it's not
/// defined what happens when a property is not on a keyframe, so we only peek
/// the props of the first one.
///
/// In practice, browsers seem to try to do their best job at it, so we might
/// want to go through all the actual keyframes and deduplicate properties.
#[allow(unsafe_code)]
fn get_animated_properties(keyframe: &Keyframe) -> Vec<TransitionProperty> {
let mut ret = vec![];
// NB: declarations are already deduplicated, so we don't have to check for
// it here.
for &(ref declaration, _) in keyframe.block.read().declarations.iter() {
if let Some(property) = TransitionProperty::from_declaration(declaration) {
ret.push(property);
}
}
ret
}
impl KeyframesAnimation {
pub fn from_keyframes(keyframes: &[Arc<RwLock<Keyframe>>]) -> Option<Self> {
if keyframes.is_empty() {
return None;
}
let animated_properties = get_animated_properties(&keyframes[0].read());
if animated_properties.is_empty() {
return None;
}
let mut steps = vec![];
for keyframe in keyframes {
let keyframe = keyframe.read();
for percentage in keyframe.selector.0.iter() {
steps.push(KeyframesStep::new(*percentage, KeyframesStepValue::Declarations {
block: keyframe.block.clone(),
}));
}
}
// Sort by the start percentage, so we can easily find a frame.
steps.sort_by_key(|step| step.start_percentage);
// Prepend autogenerated keyframes if appropriate.
if steps[0].start_percentage.0 != 0. {
steps.insert(0, KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues));
}
if steps.last().unwrap().start_percentage.0 != 1. {
steps.push(KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues));
}
Some(KeyframesAnimation {
steps: steps,
properties_changed: animated_properties,
})
}
}
/// Parses a keyframes list, like:
/// 0%, 50% {
/// width: 50%;
/// }
///
/// 40%, 60%, 100% {
/// width: 100%;
/// }
struct KeyframeListParser<'a> {
context: &'a ParserContext<'a>,
}
pub fn parse_keyframe_list(context: &ParserContext, input: &mut Parser) -> Vec<Arc<RwLock<Keyframe>>> {
RuleListParser::new_for_nested_rule(input, KeyframeListParser { context: context })
.filter_map(Result::ok)
.collect()
}
enum Void {}
impl<'a> AtRuleParser for KeyframeListParser<'a> {
type Prelude = Void;
type AtRule = Arc<RwLock<Keyframe>>;
}
impl<'a> QualifiedRuleParser for KeyframeListParser<'a> {
| Keyframe | identifier_name |
keyframes.rs | <_>` (incrementing a reference count) rather than re-creating a `Vec<_>`.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
pub block: Arc<RwLock<PropertyDeclarationBlock>>,
}
impl ToCss for Keyframe {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
let mut iter = self.selector.percentages().iter();
try!(write!(dest, "{}%", iter.next().unwrap().0));
for percentage in iter {
try!(write!(dest, ", "));
try!(write!(dest, "{}%", percentage.0));
}
try!(dest.write_str(" { "));
try!(self.block.read().to_css(dest));
try!(dest.write_str(" }"));
Ok(())
}
}
impl Keyframe {
pub fn parse(css: &str, origin: Origin,
base_url: ServoUrl,
extra_data: ParserContextExtraData) -> Result<Arc<RwLock<Self>>, ()> {
let error_reporter = Box::new(MemoryHoleReporter);
let context = ParserContext::new_with_extra_data(origin, &base_url,
error_reporter,
extra_data);
let mut input = Parser::new(css);
let mut rule_parser = KeyframeListParser {
context: &context,
};
parse_one_rule(&mut input, &mut rule_parser)
}
}
/// A keyframes step value. This can be a synthetised keyframes animation, that
/// is, one autogenerated from the current computed values, or a list of
/// declarations to apply.
// TODO: Find a better name for this?
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum KeyframesStepValue {
/// See `Keyframe::declarations`’s docs about the presence of `Importance`.
Declarations {
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
block: Arc<RwLock<PropertyDeclarationBlock>>
},
ComputedValues,
}
/// A single step from a keyframe animation.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesStep {
/// The percentage of the animation duration when this step starts.
pub start_percentage: KeyframePercentage,
/// Declarations that will determine the final style during the step, or
/// `ComputedValues` if this is an autogenerated step.
pub value: KeyframesStepValue,
/// Wether a animation-timing-function declaration exists in the list of
/// declarations.
///
/// This is used to know when to override the keyframe animation style.
pub declared_timing_function: bool,
}
impl KeyframesStep {
#[allow(unsafe_code)]
#[inline]
fn new(percentage: KeyframePercentage,
value: KeyframesStepValue) -> Self {
let declared_timing_function = match value {
KeyframesStepValue::Declarations { ref block } => {
block.read().declarations.iter().any(|&(ref prop_decl, _)| {
match *prop_decl {
PropertyDeclaration::AnimationTimingFunction(..) => true,
_ => false,
}
})
}
_ => false,
};
KeyframesStep {
start_percentage: percentage,
value: value,
declared_timing_function: declared_timing_function,
}
}
}
/// This structure represents a list of animation steps computed from the list
/// of keyframes, in order.
///
/// It only takes into account animable properties.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesAnimation {
pub steps: Vec<KeyframesStep>,
/// The properties that change in this animation.
pub properties_changed: Vec<TransitionProperty>,
}
/// Get all the animated properties in a keyframes animation. Note that it's not
/// defined what happens when a property is not on a keyframe, so we only peek
/// the props of the first one.
///
/// In practice, browsers seem to try to do their best job at it, so we might
/// want to go through all the actual keyframes and deduplicate properties.
#[allow(unsafe_code)]
fn get_animated_properties(keyframe: &Keyframe) -> Vec<TransitionProperty> {
let mut ret = vec![];
// NB: declarations are already deduplicated, so we don't have to check for
// it here.
for &(ref declaration, _) in keyframe.block.read().declarations.iter() {
if let Some(property) = TransitionProperty::from_declaration(declaration) {
ret.push(property);
}
}
ret
}
impl KeyframesAnimation {
pub fn from_keyframes(keyframes: &[Arc<RwLock<Keyframe>>]) -> Option<Self> {
if keyframes.is_empty() {
return None;
}
let animated_properties = get_animated_properties(&keyframes[0].read());
if animated_properties.is_empty() {
return None;
}
let mut steps = vec![];
for keyframe in keyframes {
let keyframe = keyframe.read();
for percentage in keyframe.selector.0.iter() {
steps.push(KeyframesStep::new(*percentage, KeyframesStepValue::Declarations {
block: keyframe.block.clone(),
}));
}
}
// Sort by the start percentage, so we can easily find a frame.
steps.sort_by_key(|step| step.start_percentage);
// Prepend autogenerated keyframes if appropriate.
if steps[0].start_percentage.0 != 0. {
steps.insert(0, KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues));
}
if steps.last().unwrap().start_percentage.0 != 1. {
steps.push(KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues));
}
Some(KeyframesAnimation {
steps: steps,
properties_changed: animated_properties,
})
}
}
/// Parses a keyframes list, like:
/// 0%, 50% {
/// width: 50%;
/// }
///
/// 40%, 60%, 100% {
/// width: 100%;
/// }
struct KeyframeListParser<'a> {
context: &'a ParserContext<'a>,
}
pub fn parse_keyframe_list(context: &ParserContext, input: &mut Parser) -> Vec<Arc<RwLock<Keyframe>>> {
RuleListParser::new_for_nested_rule(input, KeyframeListParser { context: context })
.filter_map(Result::ok)
.collect()
}
enum Void {}
impl<'a> AtRuleParser for KeyframeListParser<'a> {
type Prelude = Void;
type AtRule = Arc<RwLock<Keyframe>>;
}
impl<'a> QualifiedRuleParser for KeyframeListParser<'a> {
type Prelude = KeyframeSelector;
type QualifiedRule = Arc<RwLock<Keyframe>>;
fn parse_prelude(&mut self, input: &mut Parser) -> Result<Self::Prelude, ()> {
let start = input.position();
match KeyframeSelector::parse(input) {
Ok(sel) => Ok(sel),
Err(()) => {
let message = format!("Invalid keyframe rule: '{}'", input.slice_from(start));
log_css_error(input, start, &message, self.context);
Err(())
}
}
}
fn parse_block(&mut self, prelude: Self::Prelude, input: &mut Parser)
-> Result<Self::QualifiedRule, ()> {
let mut declarations = Vec::new();
let parser = KeyframeDeclarationParser {
context: self.context,
};
let mut iter = DeclarationListParser::new(input, parser);
while let Some(declaration) = iter.next() {
match declaration {
Ok(d) => declarations.extend(d.into_iter().map(|d| (d, Importance::Normal))),
Err(range) => {
let pos = range.start;
let message = format!("Unsupported keyframe property declaration: '{}'",
iter.input.slice(range));
log_css_error(iter.input, pos, &*message, self.context);
}
}
// `parse_important` is not called here, `!important` is not allowed in keyframe blocks.
}
Ok(Arc::new(RwLock::new(Keyframe {
selector: prelude,
block: Arc::new(RwLock::new(PropertyDeclarationBlock {
declarations: declarations,
important_count: 0,
})),
})))
}
}
struct KeyframeDeclarationParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
}
/// Default methods reject all at rules.
impl<'a, 'b> AtRuleParser for KeyframeDeclarationParser<'a, 'b> {
type Prelude = ();
type AtRule = Vec<PropertyDeclaration>;
}
impl<'a, 'b> DeclarationParser for KeyframeDeclarationParser<'a, 'b> {
type Declaration = Vec<PropertyDeclaration>;
fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result<Vec<PropertyDeclaration>, ()> {
| let mut results = Vec::new();
match PropertyDeclaration::parse(name, self.context, input, &mut results, true) {
PropertyDeclarationParseResult::ValidOrIgnoredDeclaration => {}
_ => return Err(())
}
Ok(results)
}
} | identifier_body |
|
keyframes.rs | pub struct KeyframePercentage(pub f32);
impl ::std::cmp::Ord for KeyframePercentage {
#[inline]
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
// We know we have a number from 0 to 1, so unwrap() here is safe.
self.0.partial_cmp(&other.0).unwrap()
}
}
impl ::std::cmp::Eq for KeyframePercentage { }
impl KeyframePercentage {
#[inline]
pub fn new(value: f32) -> KeyframePercentage {
debug_assert!(value >= 0. && value <= 1.);
KeyframePercentage(value)
}
fn parse(input: &mut Parser) -> Result<KeyframePercentage, ()> {
let percentage = if input.try(|input| input.expect_ident_matching("from")).is_ok() {
KeyframePercentage::new(0.)
} else if input.try(|input| input.expect_ident_matching("to")).is_ok() {
KeyframePercentage::new(1.)
} else {
let percentage = try!(input.expect_percentage());
if percentage > 1. || percentage < 0. {
return Err(());
}
KeyframePercentage::new(percentage)
};
Ok(percentage)
}
}
/// A keyframes selector is a list of percentages or from/to symbols, which are
/// converted at parse time to percentages.
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframeSelector(Vec<KeyframePercentage>);
impl KeyframeSelector {
#[inline]
pub fn percentages(&self) -> &[KeyframePercentage] {
&self.0
}
/// A dummy public function so we can write a unit test for this.
pub fn new_for_unit_testing(percentages: Vec<KeyframePercentage>) -> KeyframeSelector {
KeyframeSelector(percentages)
}
pub fn parse(input: &mut Parser) -> Result<Self, ()> {
input.parse_comma_separated(KeyframePercentage::parse)
.map(KeyframeSelector)
}
}
/// A keyframe.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct Keyframe {
pub selector: KeyframeSelector,
/// `!important` is not allowed in keyframe declarations,
/// so the second value of these tuples is always `Importance::Normal`.
/// But including them enables `compute_style_for_animation_step` to create a `ApplicableDeclarationBlock`
/// by cloning an `Arc<_>` (incrementing a reference count) rather than re-creating a `Vec<_>`.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
pub block: Arc<RwLock<PropertyDeclarationBlock>>,
}
impl ToCss for Keyframe {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
let mut iter = self.selector.percentages().iter();
try!(write!(dest, "{}%", iter.next().unwrap().0));
for percentage in iter {
try!(write!(dest, ", "));
try!(write!(dest, "{}%", percentage.0));
}
try!(dest.write_str(" { "));
try!(self.block.read().to_css(dest));
try!(dest.write_str(" }"));
Ok(())
}
}
impl Keyframe {
pub fn parse(css: &str, origin: Origin,
base_url: ServoUrl,
extra_data: ParserContextExtraData) -> Result<Arc<RwLock<Self>>, ()> {
let error_reporter = Box::new(MemoryHoleReporter);
let context = ParserContext::new_with_extra_data(origin, &base_url,
error_reporter,
extra_data);
let mut input = Parser::new(css);
let mut rule_parser = KeyframeListParser {
context: &context,
};
parse_one_rule(&mut input, &mut rule_parser)
}
}
/// A keyframes step value. This can be a synthetised keyframes animation, that
/// is, one autogenerated from the current computed values, or a list of
/// declarations to apply.
// TODO: Find a better name for this?
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum KeyframesStepValue {
/// See `Keyframe::declarations`’s docs about the presence of `Importance`.
Declarations {
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
block: Arc<RwLock<PropertyDeclarationBlock>>
},
ComputedValues,
}
/// A single step from a keyframe animation.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesStep {
/// The percentage of the animation duration when this step starts.
pub start_percentage: KeyframePercentage,
/// Declarations that will determine the final style during the step, or
/// `ComputedValues` if this is an autogenerated step.
pub value: KeyframesStepValue,
/// Wether a animation-timing-function declaration exists in the list of
/// declarations.
///
/// This is used to know when to override the keyframe animation style.
pub declared_timing_function: bool,
}
impl KeyframesStep {
#[allow(unsafe_code)]
#[inline]
fn new(percentage: KeyframePercentage,
value: KeyframesStepValue) -> Self {
let declared_timing_function = match value {
KeyframesStepValue::Declarations { ref block } => {
| _ => false,
};
KeyframesStep {
start_percentage: percentage,
value: value,
declared_timing_function: declared_timing_function,
}
}
}
/// This structure represents a list of animation steps computed from the list
/// of keyframes, in order.
///
/// It only takes into account animable properties.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesAnimation {
pub steps: Vec<KeyframesStep>,
/// The properties that change in this animation.
pub properties_changed: Vec<TransitionProperty>,
}
/// Get all the animated properties in a keyframes animation. Note that it's not
/// defined what happens when a property is not on a keyframe, so we only peek
/// the props of the first one.
///
/// In practice, browsers seem to try to do their best job at it, so we might
/// want to go through all the actual keyframes and deduplicate properties.
#[allow(unsafe_code)]
fn get_animated_properties(keyframe: &Keyframe) -> Vec<TransitionProperty> {
let mut ret = vec![];
// NB: declarations are already deduplicated, so we don't have to check for
// it here.
for &(ref declaration, _) in keyframe.block.read().declarations.iter() {
if let Some(property) = TransitionProperty::from_declaration(declaration) {
ret.push(property);
}
}
ret
}
impl KeyframesAnimation {
pub fn from_keyframes(keyframes: &[Arc<RwLock<Keyframe>>]) -> Option<Self> {
if keyframes.is_empty() {
return None;
}
let animated_properties = get_animated_properties(&keyframes[0].read());
if animated_properties.is_empty() {
return None;
}
let mut steps = vec![];
for keyframe in keyframes {
let keyframe = keyframe.read();
for percentage in keyframe.selector.0.iter() {
steps.push(KeyframesStep::new(*percentage, KeyframesStepValue::Declarations {
block: keyframe.block.clone(),
}));
}
}
// Sort by the start percentage, so we can easily find a frame.
steps.sort_by_key(|step| step.start_percentage);
// Prepend autogenerated keyframes if appropriate.
if steps[0].start_percentage.0 != 0. {
steps.insert(0, KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues));
}
if steps.last().unwrap().start_percentage.0 != 1. {
steps.push(KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues));
}
Some(KeyframesAnimation {
steps: steps,
properties_changed: animated_properties,
})
}
}
/// Parses a keyframes list, like:
/// 0%, 50% {
/// width: 50%;
/// }
///
/// 40%, 60%, 100% {
/// width: 100%;
/// }
struct KeyframeListParser<'a> {
context: &'a ParserContext<'a>,
}
pub fn parse_keyframe_list(context: &ParserContext, input: &mut Parser) -> Vec<Arc<RwLock<Keyframe>>> {
RuleListParser::new_for_nested_rule(input, KeyframeListParser { context: context })
.filter_map(Result::ok)
.collect()
}
enum Void {}
impl<'a> AtRuleParser for KeyframeListParser<'a> {
type Prelude = Void;
type AtRule = Arc<RwLock<Keyframe>>;
}
impl<'a> QualifiedRuleParser for KeyframeListParser<'a> {
| block.read().declarations.iter().any(|&(ref prop_decl, _)| {
match *prop_decl {
PropertyDeclaration::AnimationTimingFunction(..) => true,
_ => false,
}
})
}
| conditional_block |
keyframes.rs | pub struct KeyframePercentage(pub f32);
impl ::std::cmp::Ord for KeyframePercentage {
#[inline]
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
// We know we have a number from 0 to 1, so unwrap() here is safe.
self.0.partial_cmp(&other.0).unwrap()
}
}
impl ::std::cmp::Eq for KeyframePercentage { }
impl KeyframePercentage {
#[inline]
pub fn new(value: f32) -> KeyframePercentage {
debug_assert!(value >= 0. && value <= 1.);
KeyframePercentage(value)
}
fn parse(input: &mut Parser) -> Result<KeyframePercentage, ()> {
let percentage = if input.try(|input| input.expect_ident_matching("from")).is_ok() {
KeyframePercentage::new(0.)
} else if input.try(|input| input.expect_ident_matching("to")).is_ok() {
KeyframePercentage::new(1.)
} else {
let percentage = try!(input.expect_percentage());
if percentage > 1. || percentage < 0. {
return Err(());
}
KeyframePercentage::new(percentage)
};
Ok(percentage)
}
}
/// A keyframes selector is a list of percentages or from/to symbols, which are
/// converted at parse time to percentages.
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframeSelector(Vec<KeyframePercentage>);
impl KeyframeSelector {
#[inline]
pub fn percentages(&self) -> &[KeyframePercentage] {
&self.0
}
/// A dummy public function so we can write a unit test for this.
pub fn new_for_unit_testing(percentages: Vec<KeyframePercentage>) -> KeyframeSelector {
KeyframeSelector(percentages)
}
pub fn parse(input: &mut Parser) -> Result<Self, ()> {
input.parse_comma_separated(KeyframePercentage::parse)
.map(KeyframeSelector)
}
}
/// A keyframe.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct Keyframe {
pub selector: KeyframeSelector,
/// `!important` is not allowed in keyframe declarations,
/// so the second value of these tuples is always `Importance::Normal`.
/// But including them enables `compute_style_for_animation_step` to create a `ApplicableDeclarationBlock`
/// by cloning an `Arc<_>` (incrementing a reference count) rather than re-creating a `Vec<_>`.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
pub block: Arc<RwLock<PropertyDeclarationBlock>>,
}
impl ToCss for Keyframe {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
let mut iter = self.selector.percentages().iter();
try!(write!(dest, "{}%", iter.next().unwrap().0));
for percentage in iter {
try!(write!(dest, ", "));
try!(write!(dest, "{}%", percentage.0));
}
try!(dest.write_str(" { "));
try!(self.block.read().to_css(dest));
try!(dest.write_str(" }"));
Ok(())
}
}
impl Keyframe {
pub fn parse(css: &str, origin: Origin,
base_url: ServoUrl,
extra_data: ParserContextExtraData) -> Result<Arc<RwLock<Self>>, ()> {
let error_reporter = Box::new(MemoryHoleReporter);
let context = ParserContext::new_with_extra_data(origin, &base_url,
error_reporter,
extra_data);
let mut input = Parser::new(css);
let mut rule_parser = KeyframeListParser {
context: &context,
};
parse_one_rule(&mut input, &mut rule_parser)
}
}
/// A keyframes step value. This can be a synthetised keyframes animation, that
/// is, one autogenerated from the current computed values, or a list of
/// declarations to apply.
// TODO: Find a better name for this?
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum KeyframesStepValue {
/// See `Keyframe::declarations`’s docs about the presence of `Importance`.
Declarations {
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
block: Arc<RwLock<PropertyDeclarationBlock>>
},
ComputedValues,
}
/// A single step from a keyframe animation.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesStep {
/// The percentage of the animation duration when this step starts.
pub start_percentage: KeyframePercentage,
/// Declarations that will determine the final style during the step, or
/// `ComputedValues` if this is an autogenerated step.
pub value: KeyframesStepValue,
/// Wether a animation-timing-function declaration exists in the list of
/// declarations.
///
/// This is used to know when to override the keyframe animation style.
pub declared_timing_function: bool,
}
impl KeyframesStep {
#[allow(unsafe_code)]
#[inline]
fn new(percentage: KeyframePercentage,
value: KeyframesStepValue) -> Self {
let declared_timing_function = match value {
KeyframesStepValue::Declarations { ref block } => {
block.read().declarations.iter().any(|&(ref prop_decl, _)| {
match *prop_decl {
PropertyDeclaration::AnimationTimingFunction(..) => true,
_ => false,
}
})
}
_ => false,
};
KeyframesStep {
start_percentage: percentage,
value: value,
declared_timing_function: declared_timing_function,
}
}
}
/// This structure represents a list of animation steps computed from the list
/// of keyframes, in order.
///
/// It only takes into account animable properties.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesAnimation {
pub steps: Vec<KeyframesStep>,
/// The properties that change in this animation.
pub properties_changed: Vec<TransitionProperty>,
}
/// Get all the animated properties in a keyframes animation. Note that it's not
/// defined what happens when a property is not on a keyframe, so we only peek
/// the props of the first one.
///
/// In practice, browsers seem to try to do their best job at it, so we might
/// want to go through all the actual keyframes and deduplicate properties.
#[allow(unsafe_code)]
fn get_animated_properties(keyframe: &Keyframe) -> Vec<TransitionProperty> {
let mut ret = vec![];
// NB: declarations are already deduplicated, so we don't have to check for
// it here.
for &(ref declaration, _) in keyframe.block.read().declarations.iter() {
if let Some(property) = TransitionProperty::from_declaration(declaration) {
ret.push(property);
}
}
ret
}
impl KeyframesAnimation {
pub fn from_keyframes(keyframes: &[Arc<RwLock<Keyframe>>]) -> Option<Self> {
if keyframes.is_empty() {
return None;
}
let animated_properties = get_animated_properties(&keyframes[0].read());
if animated_properties.is_empty() {
return None;
}
let mut steps = vec![];
for keyframe in keyframes {
let keyframe = keyframe.read();
for percentage in keyframe.selector.0.iter() {
steps.push(KeyframesStep::new(*percentage, KeyframesStepValue::Declarations {
block: keyframe.block.clone(),
}));
}
}
// Sort by the start percentage, so we can easily find a frame.
steps.sort_by_key(|step| step.start_percentage);
// Prepend autogenerated keyframes if appropriate.
if steps[0].start_percentage.0 != 0. {
steps.insert(0, KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues));
}
| Some(KeyframesAnimation {
steps: steps,
properties_changed: animated_properties,
})
}
}
/// Parses a keyframes list, like:
/// 0%, 50% {
/// width: 50%;
/// }
///
/// 40%, 60%, 100% {
/// width: 100%;
/// }
struct KeyframeListParser<'a> {
context: &'a ParserContext<'a>,
}
pub fn parse_keyframe_list(context: &ParserContext, input: &mut Parser) -> Vec<Arc<RwLock<Keyframe>>> {
RuleListParser::new_for_nested_rule(input, KeyframeListParser { context: context })
.filter_map(Result::ok)
.collect()
}
enum Void {}
impl<'a> AtRuleParser for KeyframeListParser<'a> {
type Prelude = Void;
type AtRule = Arc<RwLock<Keyframe>>;
}
impl<'a> QualifiedRuleParser for KeyframeListParser<'a> {
type | if steps.last().unwrap().start_percentage.0 != 1. {
steps.push(KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues));
}
| random_line_split |
change_detector_ref.d.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @stable
*/
export declare abstract class ChangeDetectorRef {
/**
* Marks all {@link ChangeDetectionStrategy#OnPush} ancestors as to be checked.
*
* <!-- TODO: Add a link to a chapter on OnPush components -->
*
* ### Example ([live demo](http://plnkr.co/edit/GC512b?p=preview))
*
* ```typescript
* @Component({
* selector: 'cmp',
* changeDetection: ChangeDetectionStrategy.OnPush,
* template: `Number of ticks: {{numberOfTicks}}`
* })
* class Cmp {
* numberOfTicks = 0;
* | * setInterval(() => {
* this.numberOfTicks ++
* // the following is required, otherwise the view will not be updated
* this.ref.markForCheck();
* }, 1000);
* }
* }
*
* @Component({
* selector: 'app',
* changeDetection: ChangeDetectionStrategy.OnPush,
* template: `
* <cmp><cmp>
* `,
* })
* class App {
* }
* ```
*/
abstract markForCheck(): void;
/**
* Detaches the change detector from the change detector tree.
*
* The detached change detector will not be checked until it is reattached.
*
* This can also be used in combination with {@link ChangeDetectorRef#detectChanges} to implement
* local change
* detection checks.
*
* <!-- TODO: Add a link to a chapter on detach/reattach/local digest -->
* <!-- TODO: Add a live demo once ref.detectChanges is merged into master -->
*
* ### Example
*
* The following example defines a component with a large list of data.
* Imagine the data changes constantly, many times per second. For performance reasons,
* we want to check and update the list every five seconds. We can do that by detaching
* the component's change detector and doing a local check every five seconds.
*
* ```typescript
* class DataProvider {
* // in a real application the returned data will be different every time
* get data() {
* return [1,2,3,4,5];
* }
* }
*
* @Component({
* selector: 'giant-list',
* template: `
* <li *ngFor="let d of dataProvider.data">Data {{d}}</lig>
* `,
* })
* class GiantList {
* constructor(private ref: ChangeDetectorRef, private dataProvider:DataProvider) {
* ref.detach();
* setInterval(() => {
* this.ref.detectChanges();
* }, 5000);
* }
* }
*
* @Component({
* selector: 'app',
* providers: [DataProvider],
* template: `
* <giant-list><giant-list>
* `,
* })
* class App {
* }
* ```
*/
abstract detach(): void;
/**
* Checks the change detector and its children.
*
* This can also be used in combination with {@link ChangeDetectorRef#detach} to implement local
* change detection
* checks.
*
* <!-- TODO: Add a link to a chapter on detach/reattach/local digest -->
* <!-- TODO: Add a live demo once ref.detectChanges is merged into master -->
*
* ### Example
*
* The following example defines a component with a large list of data.
* Imagine, the data changes constantly, many times per second. For performance reasons,
* we want to check and update the list every five seconds.
*
* We can do that by detaching the component's change detector and doing a local change detection
* check
* every five seconds.
*
* See {@link ChangeDetectorRef#detach} for more information.
*/
abstract detectChanges(): void;
/**
* Checks the change detector and its children, and throws if any changes are detected.
*
* This is used in development mode to verify that running change detection doesn't introduce
* other changes.
*/
abstract checkNoChanges(): void;
/**
* Reattach the change detector to the change detector tree.
*
* This also marks OnPush ancestors as to be checked. This reattached change detector will be
* checked during the next change detection run.
*
* <!-- TODO: Add a link to a chapter on detach/reattach/local digest -->
*
* ### Example ([live demo](http://plnkr.co/edit/aUhZha?p=preview))
*
* The following example creates a component displaying `live` data. The component will detach
* its change detector from the main change detector tree when the component's live property
* is set to false.
*
* ```typescript
* class DataProvider {
* data = 1;
*
* constructor() {
* setInterval(() => {
* this.data = this.data * 2;
* }, 500);
* }
* }
*
* @Component({
* selector: 'live-data',
* inputs: ['live'],
* template: 'Data: {{dataProvider.data}}'
* })
* class LiveData {
* constructor(private ref: ChangeDetectorRef, private dataProvider:DataProvider) {}
*
* set live(value) {
* if (value)
* this.ref.reattach();
* else
* this.ref.detach();
* }
* }
*
* @Component({
* selector: 'app',
* providers: [DataProvider],
* template: `
* Live Update: <input type="checkbox" [(ngModel)]="live">
* <live-data [live]="live"><live-data>
* `,
* })
* class App {
* live = true;
* }
* ```
*/
abstract reattach(): void;
} | * constructor(ref: ChangeDetectorRef) { | random_line_split |
p074.rs | //! [Problem 74](https://projecteuler.net/problem=74) solver.
#![warn(
bad_style,
unused,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
use std::collections::HashMap;
#[derive(Clone)]
enum Length {
Loop(usize),
Chain(usize),
Unknown,
}
fn fact_sum(mut n: u32, fs: &[u32; 10]) -> u32 {
if n == 0 {
return 1;
}
let mut sum = 0;
while n > 0 {
sum += fs[(n % 10) as usize];
n /= 10;
}
sum
}
fn get_chain_len(n: u32, map: &mut [Length], fs: &[u32; 10]) -> usize {
let mut chain_map = HashMap::new();
let mut idx = n;
let mut chain_len = 0;
let mut loop_len = 0;
loop {
match map[idx as usize] {
Length::Loop(c) => {
loop_len += c;
break;
}
Length::Chain(c) => {
chain_len += c;
break;
}
Length::Unknown => match chain_map.get(&idx) {
Some(&chain_idx) => {
loop_len = chain_len - chain_idx;
chain_len = chain_idx;
break;
}
None => {
let _ = chain_map.insert(idx, chain_len);
idx = fact_sum(idx, fs);
chain_len += 1;
}
},
}
}
for (&key, &idx) in &chain_map {
if idx >= chain_len {
map[key as usize] = Length::Loop(loop_len);
} else {
map[key as usize] = Length::Chain(loop_len + chain_len - idx);
}
}
chain_len + loop_len
}
fn solve() -> String {
let limit = 1000000;
let factorial = {
let mut val = [1; 10];
for i in 1..10 {
val[i] = val[i - 1] * (i as u32);
}
val
};
let mut map = vec![Length::Unknown; (factorial[9] * 6 + 1) as usize];
let mut cnt = 0;
for n in 1..(limit + 1) {
let len = get_chain_len(n, &mut map, &factorial);
if len == 60 {
cnt += 1;
}
}
cnt.to_string()
}
common::problem!("402", solve);
#[cfg(test)]
mod tests {
use std::iter;
#[test]
fn | () {
let factorial = {
let mut val = [1; 10];
for i in 1..10 {
val[i] = val[i - 1] * (i as u32);
}
val
};
let mut map = iter::repeat(super::Length::Unknown)
.take((factorial[9] * 6 + 1) as usize)
.collect::<Vec<_>>();
assert_eq!(3, super::get_chain_len(169, &mut map, &factorial));
assert_eq!(2, super::get_chain_len(871, &mut map, &factorial));
assert_eq!(2, super::get_chain_len(872, &mut map, &factorial));
assert_eq!(5, super::get_chain_len(69, &mut map, &factorial));
assert_eq!(4, super::get_chain_len(78, &mut map, &factorial));
assert_eq!(2, super::get_chain_len(540, &mut map, &factorial));
}
}
| len | identifier_name |
p074.rs | //! [Problem 74](https://projecteuler.net/problem=74) solver.
#![warn(
bad_style,
unused,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
use std::collections::HashMap;
#[derive(Clone)]
enum Length {
Loop(usize),
Chain(usize),
Unknown, | fn fact_sum(mut n: u32, fs: &[u32; 10]) -> u32 {
if n == 0 {
return 1;
}
let mut sum = 0;
while n > 0 {
sum += fs[(n % 10) as usize];
n /= 10;
}
sum
}
fn get_chain_len(n: u32, map: &mut [Length], fs: &[u32; 10]) -> usize {
let mut chain_map = HashMap::new();
let mut idx = n;
let mut chain_len = 0;
let mut loop_len = 0;
loop {
match map[idx as usize] {
Length::Loop(c) => {
loop_len += c;
break;
}
Length::Chain(c) => {
chain_len += c;
break;
}
Length::Unknown => match chain_map.get(&idx) {
Some(&chain_idx) => {
loop_len = chain_len - chain_idx;
chain_len = chain_idx;
break;
}
None => {
let _ = chain_map.insert(idx, chain_len);
idx = fact_sum(idx, fs);
chain_len += 1;
}
},
}
}
for (&key, &idx) in &chain_map {
if idx >= chain_len {
map[key as usize] = Length::Loop(loop_len);
} else {
map[key as usize] = Length::Chain(loop_len + chain_len - idx);
}
}
chain_len + loop_len
}
fn solve() -> String {
let limit = 1000000;
let factorial = {
let mut val = [1; 10];
for i in 1..10 {
val[i] = val[i - 1] * (i as u32);
}
val
};
let mut map = vec![Length::Unknown; (factorial[9] * 6 + 1) as usize];
let mut cnt = 0;
for n in 1..(limit + 1) {
let len = get_chain_len(n, &mut map, &factorial);
if len == 60 {
cnt += 1;
}
}
cnt.to_string()
}
common::problem!("402", solve);
#[cfg(test)]
mod tests {
use std::iter;
#[test]
fn len() {
let factorial = {
let mut val = [1; 10];
for i in 1..10 {
val[i] = val[i - 1] * (i as u32);
}
val
};
let mut map = iter::repeat(super::Length::Unknown)
.take((factorial[9] * 6 + 1) as usize)
.collect::<Vec<_>>();
assert_eq!(3, super::get_chain_len(169, &mut map, &factorial));
assert_eq!(2, super::get_chain_len(871, &mut map, &factorial));
assert_eq!(2, super::get_chain_len(872, &mut map, &factorial));
assert_eq!(5, super::get_chain_len(69, &mut map, &factorial));
assert_eq!(4, super::get_chain_len(78, &mut map, &factorial));
assert_eq!(2, super::get_chain_len(540, &mut map, &factorial));
}
} | }
| random_line_split |
p074.rs | //! [Problem 74](https://projecteuler.net/problem=74) solver.
#![warn(
bad_style,
unused,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
use std::collections::HashMap;
#[derive(Clone)]
enum Length {
Loop(usize),
Chain(usize),
Unknown,
}
fn fact_sum(mut n: u32, fs: &[u32; 10]) -> u32 {
if n == 0 {
return 1;
}
let mut sum = 0;
while n > 0 {
sum += fs[(n % 10) as usize];
n /= 10;
}
sum
}
fn get_chain_len(n: u32, map: &mut [Length], fs: &[u32; 10]) -> usize {
let mut chain_map = HashMap::new();
let mut idx = n;
let mut chain_len = 0;
let mut loop_len = 0;
loop {
match map[idx as usize] {
Length::Loop(c) => {
loop_len += c;
break;
}
Length::Chain(c) => {
chain_len += c;
break;
}
Length::Unknown => match chain_map.get(&idx) {
Some(&chain_idx) => {
loop_len = chain_len - chain_idx;
chain_len = chain_idx;
break;
}
None => {
let _ = chain_map.insert(idx, chain_len);
idx = fact_sum(idx, fs);
chain_len += 1;
}
},
}
}
for (&key, &idx) in &chain_map {
if idx >= chain_len {
map[key as usize] = Length::Loop(loop_len);
} else |
}
chain_len + loop_len
}
fn solve() -> String {
let limit = 1000000;
let factorial = {
let mut val = [1; 10];
for i in 1..10 {
val[i] = val[i - 1] * (i as u32);
}
val
};
let mut map = vec![Length::Unknown; (factorial[9] * 6 + 1) as usize];
let mut cnt = 0;
for n in 1..(limit + 1) {
let len = get_chain_len(n, &mut map, &factorial);
if len == 60 {
cnt += 1;
}
}
cnt.to_string()
}
common::problem!("402", solve);
#[cfg(test)]
mod tests {
use std::iter;
#[test]
fn len() {
let factorial = {
let mut val = [1; 10];
for i in 1..10 {
val[i] = val[i - 1] * (i as u32);
}
val
};
let mut map = iter::repeat(super::Length::Unknown)
.take((factorial[9] * 6 + 1) as usize)
.collect::<Vec<_>>();
assert_eq!(3, super::get_chain_len(169, &mut map, &factorial));
assert_eq!(2, super::get_chain_len(871, &mut map, &factorial));
assert_eq!(2, super::get_chain_len(872, &mut map, &factorial));
assert_eq!(5, super::get_chain_len(69, &mut map, &factorial));
assert_eq!(4, super::get_chain_len(78, &mut map, &factorial));
assert_eq!(2, super::get_chain_len(540, &mut map, &factorial));
}
}
| {
map[key as usize] = Length::Chain(loop_len + chain_len - idx);
} | conditional_block |
test_hive_partition.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import unittest
from unittest.mock import patch
from airflow.providers.apache.hive.sensors.hive_partition import HivePartitionSensor
from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment
from tests.test_utils.mock_hooks import MockHiveMetastoreHook
@unittest.skipIf('AIRFLOW_RUNALL_TESTS' not in os.environ, "Skipped because AIRFLOW_RUNALL_TESTS is not set")
@patch(
'airflow.providers.apache.hive.sensors.hive_partition.HiveMetastoreHook',
side_effect=MockHiveMetastoreHook,
)
class TestHivePartitionSensor(TestHiveEnvironment):
def | (self, mock_hive_metastore_hook):
op = HivePartitionSensor(
task_id='hive_partition_check', table='airflow.static_babynames_partitioned', dag=self.dag
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
| test_hive_partition_sensor | identifier_name |
test_hive_partition.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| import os
import unittest
from unittest.mock import patch
from airflow.providers.apache.hive.sensors.hive_partition import HivePartitionSensor
from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment
from tests.test_utils.mock_hooks import MockHiveMetastoreHook
@unittest.skipIf('AIRFLOW_RUNALL_TESTS' not in os.environ, "Skipped because AIRFLOW_RUNALL_TESTS is not set")
@patch(
'airflow.providers.apache.hive.sensors.hive_partition.HiveMetastoreHook',
side_effect=MockHiveMetastoreHook,
)
class TestHivePartitionSensor(TestHiveEnvironment):
def test_hive_partition_sensor(self, mock_hive_metastore_hook):
op = HivePartitionSensor(
task_id='hive_partition_check', table='airflow.static_babynames_partitioned', dag=self.dag
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) | random_line_split |
|
test_hive_partition.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import unittest
from unittest.mock import patch
from airflow.providers.apache.hive.sensors.hive_partition import HivePartitionSensor
from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment
from tests.test_utils.mock_hooks import MockHiveMetastoreHook
@unittest.skipIf('AIRFLOW_RUNALL_TESTS' not in os.environ, "Skipped because AIRFLOW_RUNALL_TESTS is not set")
@patch(
'airflow.providers.apache.hive.sensors.hive_partition.HiveMetastoreHook',
side_effect=MockHiveMetastoreHook,
)
class TestHivePartitionSensor(TestHiveEnvironment):
def test_hive_partition_sensor(self, mock_hive_metastore_hook):
| op = HivePartitionSensor(
task_id='hive_partition_check', table='airflow.static_babynames_partitioned', dag=self.dag
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) | identifier_body |
|
util.py | import tornado.web
import json
from tornado_cors import CorsMixin
from common import ParameterFormat, EnumEncoder
class DefaultRequestHandler(CorsMixin, tornado.web.RequestHandler):
CORS_ORIGIN = '*'
def initialize(self):
self.default_format = self.get_argument("format", "json", True)
self.show_about = self.get_argument("show_about", True, True)
self.pg_version = self.get_argument("pg_version", 9.6, True)
self.version = "2.0 beta"
def write_about_stuff(self, format_type="alter_system"):
|
def write_comment(self, format_type, comment):
default_comment = "--"
if format_type == "conf":
default_comment = "#"
if comment != "NONE":
self.write("\n{} {}\n".format(default_comment, comment))
def write_config(self, output_data):
if self.show_about is True:
self.write_about_stuff("conf")
for category in output_data:
self.write("# {}\n".format(category["description"]))
for parameter in category["parameters"]:
config_value = parameter.get("config_value", "NI")
value_format = parameter.get("format", ParameterFormat.NONE)
if value_format in (ParameterFormat.String,
ParameterFormat.Time):
config_value = "'{}'".format(config_value)
parameter_comment = parameter.get("comment", "NONE")
if parameter_comment != "NONE":
self.write_comment("conf", parameter_comment)
self.write("{} = {}\n".format(parameter["name"], config_value))
self.write("\n")
def write_alter_system(self, output_data):
if float(self.pg_version) <= 9.3:
self.write("-- ALTER SYSTEM format it's only supported on version 9.4 and higher. Use 'conf' format instead.")
else:
if self.show_about is True:
self.write_about_stuff()
for category in output_data:
self.write("-- {}\n".format(category["description"]))
for parameter in category["parameters"]:
config_value = parameter.get("config_value", "NI")
parameter_comment = parameter.get("comment", "NONE")
self.write_comment("alter_system", parameter_comment)
self.write("ALTER SYSTEM SET {} TO '{}';\n".format(parameter[
"name"], config_value))
self.write("\n")
def write_plain(self, message=list()):
if len(message) == 1:
self.write(message[0])
else:
for line in message:
self.write(line + '\n')
def write_bash(self, message=list()):
bash_script = """
#!/bin/bash
"""
self.write(bash_script)
if len(message) == 1:
self.write('SQL_QUERY="{}"\n'.format(message[0]))
self.write('psql -c "${SQL_QUERY}"\n')
else:
for line in message:
self.write('SQL_QUERY="{}"\n'.format(line))
self.write('psql -c "${SQL_QUERY}"\n\n')
def write_json_api(self, message):
self.set_header('Content-Type', 'application/vnd.api+json')
_document = {}
_document["data"] = message
_meta = {}
_meta["copyright"] = "PGConfig API"
_meta["version"] = self.version
_meta["arguments"] = self.request.arguments
_document["meta"] = _meta
_document["jsonapi"] = {"version": "1.0"}
full_url = self.request.protocol + "://" + self.request.host + self.request.uri
_document["links"] = {"self": full_url}
self.write(
json.dumps(
_document,
sort_keys=True,
separators=(',', ': '),
cls=EnumEncoder))
def write_json(self, message=list()):
self.set_header('Content-Type', 'application/json')
if len(message) == 1:
self.write("{ \"output\": \"" + message[0] + "\"}")
else:
new_output = "{ \"output\": ["
first_line = True
for line in message:
if not first_line:
new_output += ","
else:
first_line = False
new_output += "\"{}\"".format(line)
new_output += "] } "
self.write(new_output)
def return_output(self, message=list()):
# default_format=self.get_argument("format", "json", True)
# converting string input into a list (for solve issue with multiline strings)
process_data = []
if not isinstance(message, list):
process_data.insert(0, message)
else:
process_data = message
if self.default_format == "json":
self.write_json_api(message)
elif self.default_format == "bash":
self.write_bash(message)
elif self.default_format == "conf":
self.write_config(message)
elif self.default_format == "alter_system":
self.write_alter_system(message)
else:
self.write_plain(message)
class GeneratorRequestHandler(DefaultRequestHandler):
pass
| default_comment = "--"
if format_type == "conf":
default_comment = "#"
self.write("{} Generated by PGConfig {}\n".format(default_comment,
self.version))
self.write("{} http://pgconfig.org\n\n".format(default_comment * 2)) | identifier_body |
util.py | import tornado.web
import json
from tornado_cors import CorsMixin
from common import ParameterFormat, EnumEncoder
class DefaultRequestHandler(CorsMixin, tornado.web.RequestHandler):
CORS_ORIGIN = '*'
def initialize(self):
self.default_format = self.get_argument("format", "json", True)
self.show_about = self.get_argument("show_about", True, True)
self.pg_version = self.get_argument("pg_version", 9.6, True)
self.version = "2.0 beta"
def write_about_stuff(self, format_type="alter_system"):
default_comment = "--"
if format_type == "conf":
default_comment = "#"
self.write("{} Generated by PGConfig {}\n".format(default_comment,
self.version))
self.write("{} http://pgconfig.org\n\n".format(default_comment * 2))
def write_comment(self, format_type, comment):
default_comment = "--"
if format_type == "conf":
default_comment = "#"
if comment != "NONE":
self.write("\n{} {}\n".format(default_comment, comment))
def write_config(self, output_data):
if self.show_about is True:
self.write_about_stuff("conf")
for category in output_data:
self.write("# {}\n".format(category["description"]))
for parameter in category["parameters"]:
config_value = parameter.get("config_value", "NI")
value_format = parameter.get("format", ParameterFormat.NONE)
if value_format in (ParameterFormat.String,
ParameterFormat.Time):
config_value = "'{}'".format(config_value)
parameter_comment = parameter.get("comment", "NONE")
if parameter_comment != "NONE":
|
self.write("{} = {}\n".format(parameter["name"], config_value))
self.write("\n")
def write_alter_system(self, output_data):
if float(self.pg_version) <= 9.3:
self.write("-- ALTER SYSTEM format it's only supported on version 9.4 and higher. Use 'conf' format instead.")
else:
if self.show_about is True:
self.write_about_stuff()
for category in output_data:
self.write("-- {}\n".format(category["description"]))
for parameter in category["parameters"]:
config_value = parameter.get("config_value", "NI")
parameter_comment = parameter.get("comment", "NONE")
self.write_comment("alter_system", parameter_comment)
self.write("ALTER SYSTEM SET {} TO '{}';\n".format(parameter[
"name"], config_value))
self.write("\n")
def write_plain(self, message=list()):
if len(message) == 1:
self.write(message[0])
else:
for line in message:
self.write(line + '\n')
def write_bash(self, message=list()):
bash_script = """
#!/bin/bash
"""
self.write(bash_script)
if len(message) == 1:
self.write('SQL_QUERY="{}"\n'.format(message[0]))
self.write('psql -c "${SQL_QUERY}"\n')
else:
for line in message:
self.write('SQL_QUERY="{}"\n'.format(line))
self.write('psql -c "${SQL_QUERY}"\n\n')
def write_json_api(self, message):
self.set_header('Content-Type', 'application/vnd.api+json')
_document = {}
_document["data"] = message
_meta = {}
_meta["copyright"] = "PGConfig API"
_meta["version"] = self.version
_meta["arguments"] = self.request.arguments
_document["meta"] = _meta
_document["jsonapi"] = {"version": "1.0"}
full_url = self.request.protocol + "://" + self.request.host + self.request.uri
_document["links"] = {"self": full_url}
self.write(
json.dumps(
_document,
sort_keys=True,
separators=(',', ': '),
cls=EnumEncoder))
def write_json(self, message=list()):
self.set_header('Content-Type', 'application/json')
if len(message) == 1:
self.write("{ \"output\": \"" + message[0] + "\"}")
else:
new_output = "{ \"output\": ["
first_line = True
for line in message:
if not first_line:
new_output += ","
else:
first_line = False
new_output += "\"{}\"".format(line)
new_output += "] } "
self.write(new_output)
def return_output(self, message=list()):
# default_format=self.get_argument("format", "json", True)
# converting string input into a list (for solve issue with multiline strings)
process_data = []
if not isinstance(message, list):
process_data.insert(0, message)
else:
process_data = message
if self.default_format == "json":
self.write_json_api(message)
elif self.default_format == "bash":
self.write_bash(message)
elif self.default_format == "conf":
self.write_config(message)
elif self.default_format == "alter_system":
self.write_alter_system(message)
else:
self.write_plain(message)
class GeneratorRequestHandler(DefaultRequestHandler):
pass
| self.write_comment("conf", parameter_comment) | conditional_block |
util.py | import tornado.web
import json
from tornado_cors import CorsMixin
from common import ParameterFormat, EnumEncoder
class DefaultRequestHandler(CorsMixin, tornado.web.RequestHandler):
CORS_ORIGIN = '*'
def initialize(self):
self.default_format = self.get_argument("format", "json", True)
self.show_about = self.get_argument("show_about", True, True)
self.pg_version = self.get_argument("pg_version", 9.6, True)
self.version = "2.0 beta"
def write_about_stuff(self, format_type="alter_system"):
default_comment = "--"
if format_type == "conf":
default_comment = "#"
self.write("{} Generated by PGConfig {}\n".format(default_comment,
self.version))
self.write("{} http://pgconfig.org\n\n".format(default_comment * 2))
def write_comment(self, format_type, comment):
default_comment = "--"
if format_type == "conf":
default_comment = "#"
if comment != "NONE":
self.write("\n{} {}\n".format(default_comment, comment))
def write_config(self, output_data):
if self.show_about is True:
self.write_about_stuff("conf")
for category in output_data:
self.write("# {}\n".format(category["description"]))
for parameter in category["parameters"]:
config_value = parameter.get("config_value", "NI")
value_format = parameter.get("format", ParameterFormat.NONE)
if value_format in (ParameterFormat.String,
ParameterFormat.Time):
config_value = "'{}'".format(config_value)
parameter_comment = parameter.get("comment", "NONE") |
self.write("\n")
def write_alter_system(self, output_data):
if float(self.pg_version) <= 9.3:
self.write("-- ALTER SYSTEM format it's only supported on version 9.4 and higher. Use 'conf' format instead.")
else:
if self.show_about is True:
self.write_about_stuff()
for category in output_data:
self.write("-- {}\n".format(category["description"]))
for parameter in category["parameters"]:
config_value = parameter.get("config_value", "NI")
parameter_comment = parameter.get("comment", "NONE")
self.write_comment("alter_system", parameter_comment)
self.write("ALTER SYSTEM SET {} TO '{}';\n".format(parameter[
"name"], config_value))
self.write("\n")
def write_plain(self, message=list()):
if len(message) == 1:
self.write(message[0])
else:
for line in message:
self.write(line + '\n')
def write_bash(self, message=list()):
bash_script = """
#!/bin/bash
"""
self.write(bash_script)
if len(message) == 1:
self.write('SQL_QUERY="{}"\n'.format(message[0]))
self.write('psql -c "${SQL_QUERY}"\n')
else:
for line in message:
self.write('SQL_QUERY="{}"\n'.format(line))
self.write('psql -c "${SQL_QUERY}"\n\n')
def write_json_api(self, message):
self.set_header('Content-Type', 'application/vnd.api+json')
_document = {}
_document["data"] = message
_meta = {}
_meta["copyright"] = "PGConfig API"
_meta["version"] = self.version
_meta["arguments"] = self.request.arguments
_document["meta"] = _meta
_document["jsonapi"] = {"version": "1.0"}
full_url = self.request.protocol + "://" + self.request.host + self.request.uri
_document["links"] = {"self": full_url}
self.write(
json.dumps(
_document,
sort_keys=True,
separators=(',', ': '),
cls=EnumEncoder))
def write_json(self, message=list()):
self.set_header('Content-Type', 'application/json')
if len(message) == 1:
self.write("{ \"output\": \"" + message[0] + "\"}")
else:
new_output = "{ \"output\": ["
first_line = True
for line in message:
if not first_line:
new_output += ","
else:
first_line = False
new_output += "\"{}\"".format(line)
new_output += "] } "
self.write(new_output)
def return_output(self, message=list()):
# default_format=self.get_argument("format", "json", True)
# converting string input into a list (for solve issue with multiline strings)
process_data = []
if not isinstance(message, list):
process_data.insert(0, message)
else:
process_data = message
if self.default_format == "json":
self.write_json_api(message)
elif self.default_format == "bash":
self.write_bash(message)
elif self.default_format == "conf":
self.write_config(message)
elif self.default_format == "alter_system":
self.write_alter_system(message)
else:
self.write_plain(message)
class GeneratorRequestHandler(DefaultRequestHandler):
pass |
if parameter_comment != "NONE":
self.write_comment("conf", parameter_comment)
self.write("{} = {}\n".format(parameter["name"], config_value)) | random_line_split |
util.py | import tornado.web
import json
from tornado_cors import CorsMixin
from common import ParameterFormat, EnumEncoder
class DefaultRequestHandler(CorsMixin, tornado.web.RequestHandler):
CORS_ORIGIN = '*'
def initialize(self):
self.default_format = self.get_argument("format", "json", True)
self.show_about = self.get_argument("show_about", True, True)
self.pg_version = self.get_argument("pg_version", 9.6, True)
self.version = "2.0 beta"
def write_about_stuff(self, format_type="alter_system"):
default_comment = "--"
if format_type == "conf":
default_comment = "#"
self.write("{} Generated by PGConfig {}\n".format(default_comment,
self.version))
self.write("{} http://pgconfig.org\n\n".format(default_comment * 2))
def write_comment(self, format_type, comment):
default_comment = "--"
if format_type == "conf":
default_comment = "#"
if comment != "NONE":
self.write("\n{} {}\n".format(default_comment, comment))
def write_config(self, output_data):
if self.show_about is True:
self.write_about_stuff("conf")
for category in output_data:
self.write("# {}\n".format(category["description"]))
for parameter in category["parameters"]:
config_value = parameter.get("config_value", "NI")
value_format = parameter.get("format", ParameterFormat.NONE)
if value_format in (ParameterFormat.String,
ParameterFormat.Time):
config_value = "'{}'".format(config_value)
parameter_comment = parameter.get("comment", "NONE")
if parameter_comment != "NONE":
self.write_comment("conf", parameter_comment)
self.write("{} = {}\n".format(parameter["name"], config_value))
self.write("\n")
def write_alter_system(self, output_data):
if float(self.pg_version) <= 9.3:
self.write("-- ALTER SYSTEM format it's only supported on version 9.4 and higher. Use 'conf' format instead.")
else:
if self.show_about is True:
self.write_about_stuff()
for category in output_data:
self.write("-- {}\n".format(category["description"]))
for parameter in category["parameters"]:
config_value = parameter.get("config_value", "NI")
parameter_comment = parameter.get("comment", "NONE")
self.write_comment("alter_system", parameter_comment)
self.write("ALTER SYSTEM SET {} TO '{}';\n".format(parameter[
"name"], config_value))
self.write("\n")
def write_plain(self, message=list()):
if len(message) == 1:
self.write(message[0])
else:
for line in message:
self.write(line + '\n')
def write_bash(self, message=list()):
bash_script = """
#!/bin/bash
"""
self.write(bash_script)
if len(message) == 1:
self.write('SQL_QUERY="{}"\n'.format(message[0]))
self.write('psql -c "${SQL_QUERY}"\n')
else:
for line in message:
self.write('SQL_QUERY="{}"\n'.format(line))
self.write('psql -c "${SQL_QUERY}"\n\n')
def | (self, message):
self.set_header('Content-Type', 'application/vnd.api+json')
_document = {}
_document["data"] = message
_meta = {}
_meta["copyright"] = "PGConfig API"
_meta["version"] = self.version
_meta["arguments"] = self.request.arguments
_document["meta"] = _meta
_document["jsonapi"] = {"version": "1.0"}
full_url = self.request.protocol + "://" + self.request.host + self.request.uri
_document["links"] = {"self": full_url}
self.write(
json.dumps(
_document,
sort_keys=True,
separators=(',', ': '),
cls=EnumEncoder))
def write_json(self, message=list()):
self.set_header('Content-Type', 'application/json')
if len(message) == 1:
self.write("{ \"output\": \"" + message[0] + "\"}")
else:
new_output = "{ \"output\": ["
first_line = True
for line in message:
if not first_line:
new_output += ","
else:
first_line = False
new_output += "\"{}\"".format(line)
new_output += "] } "
self.write(new_output)
def return_output(self, message=list()):
# default_format=self.get_argument("format", "json", True)
# converting string input into a list (for solve issue with multiline strings)
process_data = []
if not isinstance(message, list):
process_data.insert(0, message)
else:
process_data = message
if self.default_format == "json":
self.write_json_api(message)
elif self.default_format == "bash":
self.write_bash(message)
elif self.default_format == "conf":
self.write_config(message)
elif self.default_format == "alter_system":
self.write_alter_system(message)
else:
self.write_plain(message)
class GeneratorRequestHandler(DefaultRequestHandler):
pass
| write_json_api | identifier_name |
test_chart_axis23.py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, [email protected]
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_axis23.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [46332160, 47470848]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
| chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
chart.set_x_axis({'num_format': 'dd/mm/yyyy'})
chart.set_y_axis({'num_format': '0.00%'})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual() | chart.add_series({'values': '=Sheet1!$A$1:$A$5'}) | random_line_split |
test_chart_axis23.py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, [email protected]
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def | (self):
self.maxDiff = None
filename = 'chart_axis23.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [46332160, 47470848]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({'values': '=Sheet1!$A$1:$A$5'})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
chart.set_x_axis({'num_format': 'dd/mm/yyyy'})
chart.set_y_axis({'num_format': '0.00%'})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| setUp | identifier_name |
test_chart_axis23.py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, [email protected]
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_axis23.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
| chart.add_series({'values': '=Sheet1!$A$1:$A$5'})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
chart.set_x_axis({'num_format': 'dd/mm/yyyy'})
chart.set_y_axis({'num_format': '0.00%'})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| """Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [46332160, 47470848]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
| identifier_body |
base.py | from openerp.osv import osv, fields
class IrActionsActWindowMenu(osv.Model):
|
class IrActionsActWindowButton(osv.Model):
_name = 'ir.actions.act_window.button'
_description = 'Button to display'
_order = 'name'
_columns = {
'action_from_id': fields.many2one('ir.actions.act_window', 'from Action',
required=True),
'action_to_open_id': fields.many2one('ir.actions.actions', 'to Action',
required=True),
'name': fields.char('Label', size=64, required=True, translate=True),
'menu_id': fields.many2one('ir.actions.act_window.menu', 'Menu'),
'active': fields.boolean(
'Active', help='if check, this object is always available'),
'visibility_model_name': fields.char(u"Modele",
help=u"Model where visible_button_method_name is"
u"define to manage the button visibility."),
'visible_button_method_name': fields.char('Visibility method name',
help=u"Method that tell if the button should be "
u"visible or not, return True if it must "
u"be visible False otherwise."
u"def Method(cr, uid, context=None)"),
}
_defaults = {
'active': True,
}
def format_buttons(self, cr, uid, ids, context=None):
res = {}
action = self.pool.get('ir.actions.actions')
def get_action(action_id):
model = self.pool.get(action.read(cr, uid, action_id, ['type'],
context=context)['type'])
return model.read(cr, uid, action_id, [], load="_classic_write",
context=context)
for this in self.browse(cr, uid, ids, context=context):
if not this.active:
continue
if this.menu_id:
if not this.menu_id.active:
continue
if this.visibility_model_name and this.visible_button_method_name:
model = self.pool.get(this.visibility_model_name)
if not getattr(model, this.visible_button_method_name)(
cr, uid, context=context):
continue
menu = this.menu_id.name if this.menu_id else False
if menu not in res.keys():
res[menu] = []
val = get_action(this.action_to_open_id.id)
val.update({'name': this.name})
res[menu].append(val)
return res
class IrActionsActWindow(osv.Model):
_inherit = 'ir.actions.act_window'
_columns = {
'buttons_ids': fields.one2many('ir.actions.act_window.button',
'action_from_id', 'Buttons'),
}
def get_menus_and_buttons(self, cr, uid, ids, context=None):
res = {}
button = self.pool.get('ir.actions.act_window.button')
for this in self.browse(cr, uid, ids, context=context):
res[this.id] = button.format_buttons(
cr, uid, [x.id for x in this.buttons_ids], context=context)
return res
| _name = 'ir.actions.act_window.menu'
_description = 'Menu on the actions'
_columns = {
'name': fields.char('Label', size=64, required=True, translate=True),
'active': fields.boolean(
'Active', help='if check, this object is always available'),
}
_defaults = {
'active': True,
} | identifier_body |
base.py | from openerp.osv import osv, fields
class IrActionsActWindowMenu(osv.Model):
_name = 'ir.actions.act_window.menu'
_description = 'Menu on the actions'
_columns = {
'name': fields.char('Label', size=64, required=True, translate=True),
'active': fields.boolean(
'Active', help='if check, this object is always available'),
}
_defaults = {
'active': True,
}
class IrActionsActWindowButton(osv.Model):
_name = 'ir.actions.act_window.button'
_description = 'Button to display'
_order = 'name'
_columns = {
'action_from_id': fields.many2one('ir.actions.act_window', 'from Action',
required=True),
'action_to_open_id': fields.many2one('ir.actions.actions', 'to Action',
required=True),
'name': fields.char('Label', size=64, required=True, translate=True),
'menu_id': fields.many2one('ir.actions.act_window.menu', 'Menu'),
'active': fields.boolean(
'Active', help='if check, this object is always available'),
'visibility_model_name': fields.char(u"Modele",
help=u"Model where visible_button_method_name is"
u"define to manage the button visibility."),
'visible_button_method_name': fields.char('Visibility method name',
help=u"Method that tell if the button should be "
u"visible or not, return True if it must "
u"be visible False otherwise."
u"def Method(cr, uid, context=None)"),
}
_defaults = {
'active': True,
}
def format_buttons(self, cr, uid, ids, context=None):
res = {}
action = self.pool.get('ir.actions.actions')
def get_action(action_id):
model = self.pool.get(action.read(cr, uid, action_id, ['type'],
context=context)['type'])
return model.read(cr, uid, action_id, [], load="_classic_write",
context=context)
for this in self.browse(cr, uid, ids, context=context):
if not this.active:
continue
if this.menu_id:
if not this.menu_id.active:
continue
if this.visibility_model_name and this.visible_button_method_name:
model = self.pool.get(this.visibility_model_name)
if not getattr(model, this.visible_button_method_name)(
cr, uid, context=context):
continue
menu = this.menu_id.name if this.menu_id else False
if menu not in res.keys():
res[menu] = []
val = get_action(this.action_to_open_id.id)
val.update({'name': this.name})
res[menu].append(val) |
return res
class IrActionsActWindow(osv.Model):
_inherit = 'ir.actions.act_window'
_columns = {
'buttons_ids': fields.one2many('ir.actions.act_window.button',
'action_from_id', 'Buttons'),
}
def get_menus_and_buttons(self, cr, uid, ids, context=None):
res = {}
button = self.pool.get('ir.actions.act_window.button')
for this in self.browse(cr, uid, ids, context=context):
res[this.id] = button.format_buttons(
cr, uid, [x.id for x in this.buttons_ids], context=context)
return res | random_line_split |
|
base.py | from openerp.osv import osv, fields
class IrActionsActWindowMenu(osv.Model):
_name = 'ir.actions.act_window.menu'
_description = 'Menu on the actions'
_columns = {
'name': fields.char('Label', size=64, required=True, translate=True),
'active': fields.boolean(
'Active', help='if check, this object is always available'),
}
_defaults = {
'active': True,
}
class IrActionsActWindowButton(osv.Model):
_name = 'ir.actions.act_window.button'
_description = 'Button to display'
_order = 'name'
_columns = {
'action_from_id': fields.many2one('ir.actions.act_window', 'from Action',
required=True),
'action_to_open_id': fields.many2one('ir.actions.actions', 'to Action',
required=True),
'name': fields.char('Label', size=64, required=True, translate=True),
'menu_id': fields.many2one('ir.actions.act_window.menu', 'Menu'),
'active': fields.boolean(
'Active', help='if check, this object is always available'),
'visibility_model_name': fields.char(u"Modele",
help=u"Model where visible_button_method_name is"
u"define to manage the button visibility."),
'visible_button_method_name': fields.char('Visibility method name',
help=u"Method that tell if the button should be "
u"visible or not, return True if it must "
u"be visible False otherwise."
u"def Method(cr, uid, context=None)"),
}
_defaults = {
'active': True,
}
def format_buttons(self, cr, uid, ids, context=None):
res = {}
action = self.pool.get('ir.actions.actions')
def get_action(action_id):
model = self.pool.get(action.read(cr, uid, action_id, ['type'],
context=context)['type'])
return model.read(cr, uid, action_id, [], load="_classic_write",
context=context)
for this in self.browse(cr, uid, ids, context=context):
if not this.active:
continue
if this.menu_id:
if not this.menu_id.active:
|
if this.visibility_model_name and this.visible_button_method_name:
model = self.pool.get(this.visibility_model_name)
if not getattr(model, this.visible_button_method_name)(
cr, uid, context=context):
continue
menu = this.menu_id.name if this.menu_id else False
if menu not in res.keys():
res[menu] = []
val = get_action(this.action_to_open_id.id)
val.update({'name': this.name})
res[menu].append(val)
return res
class IrActionsActWindow(osv.Model):
_inherit = 'ir.actions.act_window'
_columns = {
'buttons_ids': fields.one2many('ir.actions.act_window.button',
'action_from_id', 'Buttons'),
}
def get_menus_and_buttons(self, cr, uid, ids, context=None):
res = {}
button = self.pool.get('ir.actions.act_window.button')
for this in self.browse(cr, uid, ids, context=context):
res[this.id] = button.format_buttons(
cr, uid, [x.id for x in this.buttons_ids], context=context)
return res
| continue | conditional_block |
base.py | from openerp.osv import osv, fields
class IrActionsActWindowMenu(osv.Model):
_name = 'ir.actions.act_window.menu'
_description = 'Menu on the actions'
_columns = {
'name': fields.char('Label', size=64, required=True, translate=True),
'active': fields.boolean(
'Active', help='if check, this object is always available'),
}
_defaults = {
'active': True,
}
class IrActionsActWindowButton(osv.Model):
_name = 'ir.actions.act_window.button'
_description = 'Button to display'
_order = 'name'
_columns = {
'action_from_id': fields.many2one('ir.actions.act_window', 'from Action',
required=True),
'action_to_open_id': fields.many2one('ir.actions.actions', 'to Action',
required=True),
'name': fields.char('Label', size=64, required=True, translate=True),
'menu_id': fields.many2one('ir.actions.act_window.menu', 'Menu'),
'active': fields.boolean(
'Active', help='if check, this object is always available'),
'visibility_model_name': fields.char(u"Modele",
help=u"Model where visible_button_method_name is"
u"define to manage the button visibility."),
'visible_button_method_name': fields.char('Visibility method name',
help=u"Method that tell if the button should be "
u"visible or not, return True if it must "
u"be visible False otherwise."
u"def Method(cr, uid, context=None)"),
}
_defaults = {
'active': True,
}
def format_buttons(self, cr, uid, ids, context=None):
res = {}
action = self.pool.get('ir.actions.actions')
def | (action_id):
model = self.pool.get(action.read(cr, uid, action_id, ['type'],
context=context)['type'])
return model.read(cr, uid, action_id, [], load="_classic_write",
context=context)
for this in self.browse(cr, uid, ids, context=context):
if not this.active:
continue
if this.menu_id:
if not this.menu_id.active:
continue
if this.visibility_model_name and this.visible_button_method_name:
model = self.pool.get(this.visibility_model_name)
if not getattr(model, this.visible_button_method_name)(
cr, uid, context=context):
continue
menu = this.menu_id.name if this.menu_id else False
if menu not in res.keys():
res[menu] = []
val = get_action(this.action_to_open_id.id)
val.update({'name': this.name})
res[menu].append(val)
return res
class IrActionsActWindow(osv.Model):
_inherit = 'ir.actions.act_window'
_columns = {
'buttons_ids': fields.one2many('ir.actions.act_window.button',
'action_from_id', 'Buttons'),
}
def get_menus_and_buttons(self, cr, uid, ids, context=None):
res = {}
button = self.pool.get('ir.actions.act_window.button')
for this in self.browse(cr, uid, ids, context=context):
res[this.id] = button.format_buttons(
cr, uid, [x.id for x in this.buttons_ids], context=context)
return res
| get_action | identifier_name |
lib.rs | // Copyright (C) 2015, Alberto Corona <[email protected]>
// All rights reserved. This file is part of core-utils, distributed under the
// GPL v3 license. For full terms please see the LICENSE file.
#![crate_type = "lib"]
#![feature(path_relative_from,exit_status)]
extern crate term;
use std::io::prelude::Write;
use std::process;
use std::env;
use std::path::{PathBuf,Path};
pub enum Status {
Ok,
Error,
OptError,
ArgError,
}
pub fn exit(status: Status) {
process::exit(status as i32);
}
pub fn set_exit_status(status: Status) {
env::set_exit_status(status as i32);
}
pub fn path_err(status: Status, mesg: String, item: PathBuf) |
pub fn err(prog: &str, status: Status, mesg: String) {
match term::stdout() {
Some(mut term) => {
term.fg(term::color::RED).unwrap();
(write!(term, "{}: {}\n", prog, mesg)).unwrap();
term.reset().unwrap();
exit(status);
}
None => {},
};
}
pub fn copyright(prog: &str, vers: &str, yr: &str, auth: Vec<&str>) {
print!("{} (core-utils) {}\n\
Copyright (C) {} core-utils developers\n\
License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.\n\
This is free software: you are free to change and redistribute it.\n\
There is NO WARRANTY, to the extent permitted by law.\n\n", prog, vers, yr);
print!("Written by ");
for pers in auth.iter() {
print!("{} ", pers);
}
print!("\n");
}
pub fn prog_try(prog: &str) {
println!("{}: Missing arguments\n\
Try '{} --help' for more information", prog, prog);
set_exit_status(Status::ArgError);
}
pub trait PathMod {
fn last_component(&self) -> PathBuf;
fn first_component(&self) -> PathBuf;
fn rel_to(&self, rel_from: &PathBuf) -> PathBuf;
}
impl PathMod for PathBuf {
fn last_component(&self) -> PathBuf {
let last = match self.components().last() {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return last;
}
fn first_component(&self) -> PathBuf {
let first = match self.components().nth(0) {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return first;
}
fn rel_to(&self, rel_from: &PathBuf) -> PathBuf {
self.relative_from(&rel_from).unwrap_or(&PathBuf::new()).to_path_buf()
}
}
impl PathMod for Path {
fn last_component(&self) -> PathBuf {
let last = match self.components().last() {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return last;
}
fn first_component(&self) -> PathBuf {
let first = match self.components().nth(0) {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return first;
}
fn rel_to(&self, rel_from: &PathBuf) -> PathBuf {
self.relative_from(&rel_from).unwrap_or(&PathBuf::new()).to_path_buf()
}
}
| {
match term::stdout() {
Some(mut term) => {
term.fg(term::color::RED).unwrap();
(write!(term, "{}: {}\n",item.display(), mesg)).unwrap();
term.reset().unwrap();
exit(status);
}
None => {},
};
} | identifier_body |
lib.rs | // Copyright (C) 2015, Alberto Corona <[email protected]>
// All rights reserved. This file is part of core-utils, distributed under the
// GPL v3 license. For full terms please see the LICENSE file.
#![crate_type = "lib"]
#![feature(path_relative_from,exit_status)]
extern crate term;
use std::io::prelude::Write;
use std::process;
use std::env;
use std::path::{PathBuf,Path};
pub enum Status {
Ok,
Error,
OptError,
ArgError,
}
pub fn exit(status: Status) {
process::exit(status as i32);
}
pub fn set_exit_status(status: Status) {
env::set_exit_status(status as i32);
}
pub fn path_err(status: Status, mesg: String, item: PathBuf) {
match term::stdout() {
Some(mut term) => {
term.fg(term::color::RED).unwrap();
(write!(term, "{}: {}\n",item.display(), mesg)).unwrap();
term.reset().unwrap();
exit(status);
}
None => | ,
};
}
pub fn err(prog: &str, status: Status, mesg: String) {
match term::stdout() {
Some(mut term) => {
term.fg(term::color::RED).unwrap();
(write!(term, "{}: {}\n", prog, mesg)).unwrap();
term.reset().unwrap();
exit(status);
}
None => {},
};
}
pub fn copyright(prog: &str, vers: &str, yr: &str, auth: Vec<&str>) {
print!("{} (core-utils) {}\n\
Copyright (C) {} core-utils developers\n\
License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.\n\
This is free software: you are free to change and redistribute it.\n\
There is NO WARRANTY, to the extent permitted by law.\n\n", prog, vers, yr);
print!("Written by ");
for pers in auth.iter() {
print!("{} ", pers);
}
print!("\n");
}
pub fn prog_try(prog: &str) {
println!("{}: Missing arguments\n\
Try '{} --help' for more information", prog, prog);
set_exit_status(Status::ArgError);
}
pub trait PathMod {
fn last_component(&self) -> PathBuf;
fn first_component(&self) -> PathBuf;
fn rel_to(&self, rel_from: &PathBuf) -> PathBuf;
}
impl PathMod for PathBuf {
fn last_component(&self) -> PathBuf {
let last = match self.components().last() {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return last;
}
fn first_component(&self) -> PathBuf {
let first = match self.components().nth(0) {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return first;
}
fn rel_to(&self, rel_from: &PathBuf) -> PathBuf {
self.relative_from(&rel_from).unwrap_or(&PathBuf::new()).to_path_buf()
}
}
impl PathMod for Path {
fn last_component(&self) -> PathBuf {
let last = match self.components().last() {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return last;
}
fn first_component(&self) -> PathBuf {
let first = match self.components().nth(0) {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return first;
}
fn rel_to(&self, rel_from: &PathBuf) -> PathBuf {
self.relative_from(&rel_from).unwrap_or(&PathBuf::new()).to_path_buf()
}
}
| {} | conditional_block |
lib.rs | // Copyright (C) 2015, Alberto Corona <[email protected]>
// All rights reserved. This file is part of core-utils, distributed under the
// GPL v3 license. For full terms please see the LICENSE file.
#![crate_type = "lib"]
#![feature(path_relative_from,exit_status)]
extern crate term;
use std::io::prelude::Write;
use std::process; | use std::env;
use std::path::{PathBuf,Path};
pub enum Status {
Ok,
Error,
OptError,
ArgError,
}
pub fn exit(status: Status) {
process::exit(status as i32);
}
pub fn set_exit_status(status: Status) {
env::set_exit_status(status as i32);
}
pub fn path_err(status: Status, mesg: String, item: PathBuf) {
match term::stdout() {
Some(mut term) => {
term.fg(term::color::RED).unwrap();
(write!(term, "{}: {}\n",item.display(), mesg)).unwrap();
term.reset().unwrap();
exit(status);
}
None => {},
};
}
pub fn err(prog: &str, status: Status, mesg: String) {
match term::stdout() {
Some(mut term) => {
term.fg(term::color::RED).unwrap();
(write!(term, "{}: {}\n", prog, mesg)).unwrap();
term.reset().unwrap();
exit(status);
}
None => {},
};
}
pub fn copyright(prog: &str, vers: &str, yr: &str, auth: Vec<&str>) {
print!("{} (core-utils) {}\n\
Copyright (C) {} core-utils developers\n\
License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.\n\
This is free software: you are free to change and redistribute it.\n\
There is NO WARRANTY, to the extent permitted by law.\n\n", prog, vers, yr);
print!("Written by ");
for pers in auth.iter() {
print!("{} ", pers);
}
print!("\n");
}
pub fn prog_try(prog: &str) {
println!("{}: Missing arguments\n\
Try '{} --help' for more information", prog, prog);
set_exit_status(Status::ArgError);
}
pub trait PathMod {
fn last_component(&self) -> PathBuf;
fn first_component(&self) -> PathBuf;
fn rel_to(&self, rel_from: &PathBuf) -> PathBuf;
}
impl PathMod for PathBuf {
fn last_component(&self) -> PathBuf {
let last = match self.components().last() {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return last;
}
fn first_component(&self) -> PathBuf {
let first = match self.components().nth(0) {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return first;
}
fn rel_to(&self, rel_from: &PathBuf) -> PathBuf {
self.relative_from(&rel_from).unwrap_or(&PathBuf::new()).to_path_buf()
}
}
impl PathMod for Path {
fn last_component(&self) -> PathBuf {
let last = match self.components().last() {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return last;
}
fn first_component(&self) -> PathBuf {
let first = match self.components().nth(0) {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return first;
}
fn rel_to(&self, rel_from: &PathBuf) -> PathBuf {
self.relative_from(&rel_from).unwrap_or(&PathBuf::new()).to_path_buf()
}
} | random_line_split |
|
lib.rs | // Copyright (C) 2015, Alberto Corona <[email protected]>
// All rights reserved. This file is part of core-utils, distributed under the
// GPL v3 license. For full terms please see the LICENSE file.
#![crate_type = "lib"]
#![feature(path_relative_from,exit_status)]
extern crate term;
use std::io::prelude::Write;
use std::process;
use std::env;
use std::path::{PathBuf,Path};
pub enum Status {
Ok,
Error,
OptError,
ArgError,
}
pub fn exit(status: Status) {
process::exit(status as i32);
}
pub fn set_exit_status(status: Status) {
env::set_exit_status(status as i32);
}
pub fn path_err(status: Status, mesg: String, item: PathBuf) {
match term::stdout() {
Some(mut term) => {
term.fg(term::color::RED).unwrap();
(write!(term, "{}: {}\n",item.display(), mesg)).unwrap();
term.reset().unwrap();
exit(status);
}
None => {},
};
}
pub fn err(prog: &str, status: Status, mesg: String) {
match term::stdout() {
Some(mut term) => {
term.fg(term::color::RED).unwrap();
(write!(term, "{}: {}\n", prog, mesg)).unwrap();
term.reset().unwrap();
exit(status);
}
None => {},
};
}
pub fn copyright(prog: &str, vers: &str, yr: &str, auth: Vec<&str>) {
print!("{} (core-utils) {}\n\
Copyright (C) {} core-utils developers\n\
License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.\n\
This is free software: you are free to change and redistribute it.\n\
There is NO WARRANTY, to the extent permitted by law.\n\n", prog, vers, yr);
print!("Written by ");
for pers in auth.iter() {
print!("{} ", pers);
}
print!("\n");
}
pub fn prog_try(prog: &str) {
println!("{}: Missing arguments\n\
Try '{} --help' for more information", prog, prog);
set_exit_status(Status::ArgError);
}
pub trait PathMod {
fn last_component(&self) -> PathBuf;
fn first_component(&self) -> PathBuf;
fn rel_to(&self, rel_from: &PathBuf) -> PathBuf;
}
impl PathMod for PathBuf {
fn last_component(&self) -> PathBuf {
let last = match self.components().last() {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return last;
}
fn first_component(&self) -> PathBuf {
let first = match self.components().nth(0) {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return first;
}
fn rel_to(&self, rel_from: &PathBuf) -> PathBuf {
self.relative_from(&rel_from).unwrap_or(&PathBuf::new()).to_path_buf()
}
}
impl PathMod for Path {
fn last_component(&self) -> PathBuf {
let last = match self.components().last() {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return last;
}
fn first_component(&self) -> PathBuf {
let first = match self.components().nth(0) {
Some(s) => { PathBuf::from(s.as_os_str()) },
None => { PathBuf::new() },
};
return first;
}
fn | (&self, rel_from: &PathBuf) -> PathBuf {
self.relative_from(&rel_from).unwrap_or(&PathBuf::new()).to_path_buf()
}
}
| rel_to | identifier_name |
ShiftAction.js | "use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function | () { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var Action_1 = require("../Action");
var ShiftAction = (function (_super) {
__extends(ShiftAction, _super);
function ShiftAction(_a) {
var time = _a.time, container = _a.container, _b = _a.n, n = _b === void 0 ? 1 : _b;
var _this = _super.call(this, { time: time }) || this;
_this.container = container;
_this.n = n;
return _this;
}
ShiftAction.prototype.reduce = function (state) {
var fn = this.fn(state).bind(state);
return fn({
n: this.n,
time: this.time,
container: this.container
});
};
return ShiftAction;
}(Action_1.default));
exports.default = ShiftAction;
//# sourceMappingURL=ShiftAction.js.map | __ | identifier_name |
ShiftAction.js | "use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var Action_1 = require("../Action");
var ShiftAction = (function (_super) {
__extends(ShiftAction, _super);
function ShiftAction(_a) {
var time = _a.time, container = _a.container, _b = _a.n, n = _b === void 0 ? 1 : _b;
var _this = _super.call(this, { time: time }) || this;
_this.container = container; | return fn({
n: this.n,
time: this.time,
container: this.container
});
};
return ShiftAction;
}(Action_1.default));
exports.default = ShiftAction;
//# sourceMappingURL=ShiftAction.js.map | _this.n = n;
return _this;
}
ShiftAction.prototype.reduce = function (state) {
var fn = this.fn(state).bind(state); | random_line_split |
ShiftAction.js | "use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var Action_1 = require("../Action");
var ShiftAction = (function (_super) {
__extends(ShiftAction, _super);
function ShiftAction(_a) |
ShiftAction.prototype.reduce = function (state) {
var fn = this.fn(state).bind(state);
return fn({
n: this.n,
time: this.time,
container: this.container
});
};
return ShiftAction;
}(Action_1.default));
exports.default = ShiftAction;
//# sourceMappingURL=ShiftAction.js.map | {
var time = _a.time, container = _a.container, _b = _a.n, n = _b === void 0 ? 1 : _b;
var _this = _super.call(this, { time: time }) || this;
_this.container = container;
_this.n = n;
return _this;
} | identifier_body |
run.py | #!/usr/bin/env python
"""
Goal: Implement the application entry point.
@authors:
Andrei Sura <[email protected]>
"""
import argparse
from olass.olass_client import OlassClient
from olass.version import __version__
DEFAULT_SETTINGS_FILE = 'config/settings.py'
def | ():
""" Read args """
parser = argparse.ArgumentParser()
parser.add_argument("-v", "--version",
default=False,
action='store_true',
help="Show the version number")
parser.add_argument("-c", "--config",
default=DEFAULT_SETTINGS_FILE,
help="Application config file")
parser.add_argument('--interactive',
default=True,
help="When `true` ask for confirmation")
parser.add_argument('--rows',
default=100,
help="Number of rows/batch sent to the server")
args = parser.parse_args()
if args.version:
import sys
print("olass, version {}".format(__version__))
sys.exit()
app = OlassClient(config_file=args.config,
interactive=args.interactive,
rows_per_batch=args.rows)
app.run()
if __name__ == "__main__":
main()
| main | identifier_name |
run.py | #!/usr/bin/env python
"""
Goal: Implement the application entry point.
@authors:
Andrei Sura <[email protected]>
"""
import argparse
from olass.olass_client import OlassClient
from olass.version import __version__
DEFAULT_SETTINGS_FILE = 'config/settings.py'
def main():
""" Read args """
parser = argparse.ArgumentParser()
parser.add_argument("-v", "--version",
default=False,
action='store_true',
help="Show the version number")
parser.add_argument("-c", "--config",
default=DEFAULT_SETTINGS_FILE,
help="Application config file")
parser.add_argument('--interactive',
default=True,
help="When `true` ask for confirmation")
parser.add_argument('--rows',
default=100,
help="Number of rows/batch sent to the server")
args = parser.parse_args()
if args.version:
import sys
print("olass, version {}".format(__version__))
sys.exit()
app = OlassClient(config_file=args.config,
interactive=args.interactive,
rows_per_batch=args.rows)
app.run()
if __name__ == "__main__":
| main() | conditional_block |
|
run.py | #!/usr/bin/env python
"""
Goal: Implement the application entry point.
@authors:
Andrei Sura <[email protected]>
"""
import argparse
from olass.olass_client import OlassClient
from olass.version import __version__
DEFAULT_SETTINGS_FILE = 'config/settings.py'
def main():
""" Read args """
parser = argparse.ArgumentParser()
parser.add_argument("-v", "--version",
default=False,
action='store_true',
help="Show the version number")
parser.add_argument("-c", "--config",
default=DEFAULT_SETTINGS_FILE,
help="Application config file")
parser.add_argument('--interactive',
default=True,
help="When `true` ask for confirmation")
parser.add_argument('--rows',
default=100,
help="Number of rows/batch sent to the server")
args = parser.parse_args()
if args.version:
import sys
print("olass, version {}".format(__version__))
sys.exit()
| interactive=args.interactive,
rows_per_batch=args.rows)
app.run()
if __name__ == "__main__":
main() | app = OlassClient(config_file=args.config, | random_line_split |
run.py | #!/usr/bin/env python
"""
Goal: Implement the application entry point.
@authors:
Andrei Sura <[email protected]>
"""
import argparse
from olass.olass_client import OlassClient
from olass.version import __version__
DEFAULT_SETTINGS_FILE = 'config/settings.py'
def main():
| import sys
print("olass, version {}".format(__version__))
sys.exit()
app = OlassClient(config_file=args.config,
interactive=args.interactive,
rows_per_batch=args.rows)
app.run()
if __name__ == "__main__":
main()
| """ Read args """
parser = argparse.ArgumentParser()
parser.add_argument("-v", "--version",
default=False,
action='store_true',
help="Show the version number")
parser.add_argument("-c", "--config",
default=DEFAULT_SETTINGS_FILE,
help="Application config file")
parser.add_argument('--interactive',
default=True,
help="When `true` ask for confirmation")
parser.add_argument('--rows',
default=100,
help="Number of rows/batch sent to the server")
args = parser.parse_args()
if args.version: | identifier_body |
__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from sqlalchemy import engine_from_config
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import configure_mappers
import zope.sqlalchemy
# import or define all models here to ensure they are attached to the
# Base.metadata prior to any initialization routines
#from .mymodel import MyModel # noqa
from .user import User
from .event import Event
from .pick import Pick
# import all models so that they are in scope for configure_mappers
# run configure_mappers after defining all of the models to ensure
# all relationships can be setup
configure_mappers()
def get_engine(settings, prefix='sqlalchemy.'):
return engine_from_config(settings, prefix)
def get_session_factory(engine):
factory = sessionmaker()
factory.configure(bind=engine)
return factory
def get_tm_session(session_factory, transaction_manager):
""" | Get a ``sqlalchemy.orm.Session`` instance backed by a transaction.
This function will hook the session to the transaction manager which
will take care of committing any changes.
- When using pyramid_tm it will automatically be committed or aborted
depending on whether an exception is raised.
- When using scripts you should wrap the session in a manager yourself.
For example::
import transaction
engine = get_engine(settings)
session_factory = get_session_factory(engine)
with transaction.manager:
dbsession = get_tm_session(session_factory, transaction.manager)
"""
dbsession = session_factory()
zope.sqlalchemy.register(
dbsession, transaction_manager=transaction_manager)
return dbsession
def includeme(config):
"""
Initialize the model for a Pyramid app.
Activate this setup using ``config.include('survivor-pool.models')``.
"""
settings = config.get_settings()
# use pyramid_tm to hook the transaction lifecycle to the request
config.include('pyramid_tm')
session_factory = get_session_factory(get_engine(settings))
config.registry['dbsession_factory'] = session_factory
# make request.dbsession available for use in Pyramid
config.add_request_method(
# r.tm is the transaction manager used by pyramid_tm
lambda r: get_tm_session(session_factory, r.tm),
'dbsession',
reify=True
) | random_line_split |
|
__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from sqlalchemy import engine_from_config
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import configure_mappers
import zope.sqlalchemy
# import or define all models here to ensure they are attached to the
# Base.metadata prior to any initialization routines
#from .mymodel import MyModel # noqa
from .user import User
from .event import Event
from .pick import Pick
# import all models so that they are in scope for configure_mappers
# run configure_mappers after defining all of the models to ensure
# all relationships can be setup
configure_mappers()
def get_engine(settings, prefix='sqlalchemy.'):
return engine_from_config(settings, prefix)
def get_session_factory(engine):
factory = sessionmaker()
factory.configure(bind=engine)
return factory
def get_tm_session(session_factory, transaction_manager):
"""
Get a ``sqlalchemy.orm.Session`` instance backed by a transaction.
This function will hook the session to the transaction manager which
will take care of committing any changes.
- When using pyramid_tm it will automatically be committed or aborted
depending on whether an exception is raised.
- When using scripts you should wrap the session in a manager yourself.
For example::
import transaction
engine = get_engine(settings)
session_factory = get_session_factory(engine)
with transaction.manager:
dbsession = get_tm_session(session_factory, transaction.manager)
"""
dbsession = session_factory()
zope.sqlalchemy.register(
dbsession, transaction_manager=transaction_manager)
return dbsession
def | (config):
"""
Initialize the model for a Pyramid app.
Activate this setup using ``config.include('survivor-pool.models')``.
"""
settings = config.get_settings()
# use pyramid_tm to hook the transaction lifecycle to the request
config.include('pyramid_tm')
session_factory = get_session_factory(get_engine(settings))
config.registry['dbsession_factory'] = session_factory
# make request.dbsession available for use in Pyramid
config.add_request_method(
# r.tm is the transaction manager used by pyramid_tm
lambda r: get_tm_session(session_factory, r.tm),
'dbsession',
reify=True
)
| includeme | identifier_name |
__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from sqlalchemy import engine_from_config
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import configure_mappers
import zope.sqlalchemy
# import or define all models here to ensure they are attached to the
# Base.metadata prior to any initialization routines
#from .mymodel import MyModel # noqa
from .user import User
from .event import Event
from .pick import Pick
# import all models so that they are in scope for configure_mappers
# run configure_mappers after defining all of the models to ensure
# all relationships can be setup
configure_mappers()
def get_engine(settings, prefix='sqlalchemy.'):
return engine_from_config(settings, prefix)
def get_session_factory(engine):
|
def get_tm_session(session_factory, transaction_manager):
"""
Get a ``sqlalchemy.orm.Session`` instance backed by a transaction.
This function will hook the session to the transaction manager which
will take care of committing any changes.
- When using pyramid_tm it will automatically be committed or aborted
depending on whether an exception is raised.
- When using scripts you should wrap the session in a manager yourself.
For example::
import transaction
engine = get_engine(settings)
session_factory = get_session_factory(engine)
with transaction.manager:
dbsession = get_tm_session(session_factory, transaction.manager)
"""
dbsession = session_factory()
zope.sqlalchemy.register(
dbsession, transaction_manager=transaction_manager)
return dbsession
def includeme(config):
"""
Initialize the model for a Pyramid app.
Activate this setup using ``config.include('survivor-pool.models')``.
"""
settings = config.get_settings()
# use pyramid_tm to hook the transaction lifecycle to the request
config.include('pyramid_tm')
session_factory = get_session_factory(get_engine(settings))
config.registry['dbsession_factory'] = session_factory
# make request.dbsession available for use in Pyramid
config.add_request_method(
# r.tm is the transaction manager used by pyramid_tm
lambda r: get_tm_session(session_factory, r.tm),
'dbsession',
reify=True
)
| factory = sessionmaker()
factory.configure(bind=engine)
return factory | identifier_body |
Resurgence.tsx | import { Trans } from '@lingui/macro';
import { formatNumber, formatPercentage } from 'common/format';
import SPELLS from 'common/SPELLS';
import HIT_TYPES from 'game/HIT_TYPES';
import { SpellLink } from 'interface';
import { SpellIcon } from 'interface';
import ManaIcon from 'interface/icons/Mana';
import Analyzer, { Options, SELECTED_PLAYER } from 'parser/core/Analyzer';
import Events, { EnergizeEvent, HealEvent } from 'parser/core/Events';
import ManaTracker from 'parser/core/healingEfficiency/ManaTracker';
import BoringValue from 'parser/ui/BoringValueText';
import Statistic from 'parser/ui/Statistic';
import { STATISTIC_ORDER } from 'parser/ui/StatisticBox';
import React from 'react';
import './ManaTideTotem.scss';
import ManaTideTotem, { MANA_REGEN_PER_SECOND } from './ManaTideTotem';
import WaterShield from './WaterShield';
const SPELLS_PROCCING_RESURGENCE = {
[SPELLS.HEALING_SURGE.id]: 0.006,
[SPELLS.HEALING_WAVE.id]: 0.01,
[SPELLS.CHAIN_HEAL.id]: 0.0025,
[SPELLS.UNLEASH_LIFE_TALENT.id]: 0.006,
[SPELLS.RIPTIDE.id]: 0.006,
};
interface ResurgenceInfo {
spellId: number;
resurgenceTotal: number;
castAmount: number;
}
class Resurgence extends Analyzer {
static dependencies = {
manaTracker: ManaTracker,
manaTideTotem: ManaTideTotem,
waterShield: WaterShield,
};
protected manaTracker!: ManaTracker;
protected manaTideTotem!: ManaTideTotem;
protected waterShield!: WaterShield;
otherManaGain = 0;
resurgence: ResurgenceInfo[] = [];
totalResurgenceGain = 0;
constructor(options: Options) {
super(options);
this.addEventListener(
Events.heal
.by(SELECTED_PLAYER)
.spell([
SPELLS.HEALING_SURGE, | ]),
this.onRelevantHeal,
);
this.addEventListener(
Events.energize.to(SELECTED_PLAYER).spell(SPELLS.RESURGENCE),
this.onResurgenceProc,
);
}
onRelevantHeal(event: HealEvent) {
if (event.tick) {
return;
}
const spellId = event.ability.guid;
if (!this.resurgence[spellId]) {
this.resurgence[spellId] = {
spellId: spellId,
resurgenceTotal: 0,
castAmount: 0,
};
}
if (event.hitType === HIT_TYPES.CRIT) {
this.resurgence[spellId].resurgenceTotal +=
SPELLS_PROCCING_RESURGENCE[spellId] * this.manaTracker.maxResource;
this.resurgence[spellId].castAmount += 1;
}
}
onResurgenceProc(event: EnergizeEvent) {
const spellId = event.ability.guid;
if (spellId !== SPELLS.RESURGENCE.id) {
this.otherManaGain += event.resourceChange;
return;
}
this.totalResurgenceGain += event.resourceChange;
}
get totalMana() {
const naturalManaRegen = (this.owner.fightDuration / 1000) * MANA_REGEN_PER_SECOND;
const mttMana = this.manaTideTotem.regenOnPlayer;
const wsMana = this.waterShield.regenOnPlayer;
return (
naturalManaRegen +
this.totalResurgenceGain +
this.manaTracker.maxResource +
this.otherManaGain +
mttMana +
wsMana
);
}
statistic() {
return (
<Statistic
position={STATISTIC_ORDER.UNIMPORTANT(90)}
size="flexible"
dropdown={
<>
<div>
<Trans id="shaman.restoration.resurgence.statistic.table.description">
<SpellLink id={SPELLS.RESURGENCE.id} iconStyle={{ height: '1.25em' }} /> accounted
for {formatPercentage(this.totalResurgenceGain / this.totalMana, 0)}% of your mana
pool ({formatNumber(this.totalMana)} mana).
</Trans>
</div>
<table className="table table-condensed">
<thead>
<tr>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.spell">
Spell
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.amount">
Amount
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.crits">
Crits
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.mana">
% of mana
</Trans>
</th>
</tr>
</thead>
<tbody>
{this.resurgence.map((spell) => (
<tr key={spell.spellId}>
<th scope="row">
<SpellIcon id={spell.spellId} style={{ height: '2.5em' }} />
</th>
<td>{formatNumber(spell.resurgenceTotal)}</td>
<td>{formatNumber(spell.castAmount)}</td>
<td>{formatPercentage(spell.resurgenceTotal / this.totalMana)}%</td>
</tr>
))}
</tbody>
</table>
</>
}
>
<BoringValue label={<SpellLink id={SPELLS.RESURGENCE.id} />}>
<div className="flex mtt-value">
<div className="flex-sub icon">
<ManaIcon />
</div>
<div className="flex-main value">
{formatNumber(this.totalResurgenceGain)}
<br />
<small>
<Trans id="shaman.restoration.resurgence.statistic.label">
Mana gained from Resurgence
</Trans>
</small>
</div>
</div>
</BoringValue>
</Statistic>
);
}
}
export default Resurgence; | SPELLS.HEALING_WAVE,
SPELLS.CHAIN_HEAL,
SPELLS.UNLEASH_LIFE_TALENT,
SPELLS.RIPTIDE, | random_line_split |
Resurgence.tsx | import { Trans } from '@lingui/macro';
import { formatNumber, formatPercentage } from 'common/format';
import SPELLS from 'common/SPELLS';
import HIT_TYPES from 'game/HIT_TYPES';
import { SpellLink } from 'interface';
import { SpellIcon } from 'interface';
import ManaIcon from 'interface/icons/Mana';
import Analyzer, { Options, SELECTED_PLAYER } from 'parser/core/Analyzer';
import Events, { EnergizeEvent, HealEvent } from 'parser/core/Events';
import ManaTracker from 'parser/core/healingEfficiency/ManaTracker';
import BoringValue from 'parser/ui/BoringValueText';
import Statistic from 'parser/ui/Statistic';
import { STATISTIC_ORDER } from 'parser/ui/StatisticBox';
import React from 'react';
import './ManaTideTotem.scss';
import ManaTideTotem, { MANA_REGEN_PER_SECOND } from './ManaTideTotem';
import WaterShield from './WaterShield';
const SPELLS_PROCCING_RESURGENCE = {
[SPELLS.HEALING_SURGE.id]: 0.006,
[SPELLS.HEALING_WAVE.id]: 0.01,
[SPELLS.CHAIN_HEAL.id]: 0.0025,
[SPELLS.UNLEASH_LIFE_TALENT.id]: 0.006,
[SPELLS.RIPTIDE.id]: 0.006,
};
interface ResurgenceInfo {
spellId: number;
resurgenceTotal: number;
castAmount: number;
}
class Resurgence extends Analyzer {
static dependencies = {
manaTracker: ManaTracker,
manaTideTotem: ManaTideTotem,
waterShield: WaterShield,
};
protected manaTracker!: ManaTracker;
protected manaTideTotem!: ManaTideTotem;
protected waterShield!: WaterShield;
otherManaGain = 0;
resurgence: ResurgenceInfo[] = [];
totalResurgenceGain = 0;
constructor(options: Options) {
super(options);
this.addEventListener(
Events.heal
.by(SELECTED_PLAYER)
.spell([
SPELLS.HEALING_SURGE,
SPELLS.HEALING_WAVE,
SPELLS.CHAIN_HEAL,
SPELLS.UNLEASH_LIFE_TALENT,
SPELLS.RIPTIDE,
]),
this.onRelevantHeal,
);
this.addEventListener(
Events.energize.to(SELECTED_PLAYER).spell(SPELLS.RESURGENCE),
this.onResurgenceProc,
);
}
onRelevantHeal(event: HealEvent) {
if (event.tick) {
return;
}
const spellId = event.ability.guid;
if (!this.resurgence[spellId]) |
if (event.hitType === HIT_TYPES.CRIT) {
this.resurgence[spellId].resurgenceTotal +=
SPELLS_PROCCING_RESURGENCE[spellId] * this.manaTracker.maxResource;
this.resurgence[spellId].castAmount += 1;
}
}
onResurgenceProc(event: EnergizeEvent) {
const spellId = event.ability.guid;
if (spellId !== SPELLS.RESURGENCE.id) {
this.otherManaGain += event.resourceChange;
return;
}
this.totalResurgenceGain += event.resourceChange;
}
get totalMana() {
const naturalManaRegen = (this.owner.fightDuration / 1000) * MANA_REGEN_PER_SECOND;
const mttMana = this.manaTideTotem.regenOnPlayer;
const wsMana = this.waterShield.regenOnPlayer;
return (
naturalManaRegen +
this.totalResurgenceGain +
this.manaTracker.maxResource +
this.otherManaGain +
mttMana +
wsMana
);
}
statistic() {
return (
<Statistic
position={STATISTIC_ORDER.UNIMPORTANT(90)}
size="flexible"
dropdown={
<>
<div>
<Trans id="shaman.restoration.resurgence.statistic.table.description">
<SpellLink id={SPELLS.RESURGENCE.id} iconStyle={{ height: '1.25em' }} /> accounted
for {formatPercentage(this.totalResurgenceGain / this.totalMana, 0)}% of your mana
pool ({formatNumber(this.totalMana)} mana).
</Trans>
</div>
<table className="table table-condensed">
<thead>
<tr>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.spell">
Spell
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.amount">
Amount
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.crits">
Crits
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.mana">
% of mana
</Trans>
</th>
</tr>
</thead>
<tbody>
{this.resurgence.map((spell) => (
<tr key={spell.spellId}>
<th scope="row">
<SpellIcon id={spell.spellId} style={{ height: '2.5em' }} />
</th>
<td>{formatNumber(spell.resurgenceTotal)}</td>
<td>{formatNumber(spell.castAmount)}</td>
<td>{formatPercentage(spell.resurgenceTotal / this.totalMana)}%</td>
</tr>
))}
</tbody>
</table>
</>
}
>
<BoringValue label={<SpellLink id={SPELLS.RESURGENCE.id} />}>
<div className="flex mtt-value">
<div className="flex-sub icon">
<ManaIcon />
</div>
<div className="flex-main value">
{formatNumber(this.totalResurgenceGain)}
<br />
<small>
<Trans id="shaman.restoration.resurgence.statistic.label">
Mana gained from Resurgence
</Trans>
</small>
</div>
</div>
</BoringValue>
</Statistic>
);
}
}
export default Resurgence;
| {
this.resurgence[spellId] = {
spellId: spellId,
resurgenceTotal: 0,
castAmount: 0,
};
} | conditional_block |
Resurgence.tsx | import { Trans } from '@lingui/macro';
import { formatNumber, formatPercentage } from 'common/format';
import SPELLS from 'common/SPELLS';
import HIT_TYPES from 'game/HIT_TYPES';
import { SpellLink } from 'interface';
import { SpellIcon } from 'interface';
import ManaIcon from 'interface/icons/Mana';
import Analyzer, { Options, SELECTED_PLAYER } from 'parser/core/Analyzer';
import Events, { EnergizeEvent, HealEvent } from 'parser/core/Events';
import ManaTracker from 'parser/core/healingEfficiency/ManaTracker';
import BoringValue from 'parser/ui/BoringValueText';
import Statistic from 'parser/ui/Statistic';
import { STATISTIC_ORDER } from 'parser/ui/StatisticBox';
import React from 'react';
import './ManaTideTotem.scss';
import ManaTideTotem, { MANA_REGEN_PER_SECOND } from './ManaTideTotem';
import WaterShield from './WaterShield';
const SPELLS_PROCCING_RESURGENCE = {
[SPELLS.HEALING_SURGE.id]: 0.006,
[SPELLS.HEALING_WAVE.id]: 0.01,
[SPELLS.CHAIN_HEAL.id]: 0.0025,
[SPELLS.UNLEASH_LIFE_TALENT.id]: 0.006,
[SPELLS.RIPTIDE.id]: 0.006,
};
interface ResurgenceInfo {
spellId: number;
resurgenceTotal: number;
castAmount: number;
}
class Resurgence extends Analyzer {
static dependencies = {
manaTracker: ManaTracker,
manaTideTotem: ManaTideTotem,
waterShield: WaterShield,
};
protected manaTracker!: ManaTracker;
protected manaTideTotem!: ManaTideTotem;
protected waterShield!: WaterShield;
otherManaGain = 0;
resurgence: ResurgenceInfo[] = [];
totalResurgenceGain = 0;
| (options: Options) {
super(options);
this.addEventListener(
Events.heal
.by(SELECTED_PLAYER)
.spell([
SPELLS.HEALING_SURGE,
SPELLS.HEALING_WAVE,
SPELLS.CHAIN_HEAL,
SPELLS.UNLEASH_LIFE_TALENT,
SPELLS.RIPTIDE,
]),
this.onRelevantHeal,
);
this.addEventListener(
Events.energize.to(SELECTED_PLAYER).spell(SPELLS.RESURGENCE),
this.onResurgenceProc,
);
}
onRelevantHeal(event: HealEvent) {
if (event.tick) {
return;
}
const spellId = event.ability.guid;
if (!this.resurgence[spellId]) {
this.resurgence[spellId] = {
spellId: spellId,
resurgenceTotal: 0,
castAmount: 0,
};
}
if (event.hitType === HIT_TYPES.CRIT) {
this.resurgence[spellId].resurgenceTotal +=
SPELLS_PROCCING_RESURGENCE[spellId] * this.manaTracker.maxResource;
this.resurgence[spellId].castAmount += 1;
}
}
onResurgenceProc(event: EnergizeEvent) {
const spellId = event.ability.guid;
if (spellId !== SPELLS.RESURGENCE.id) {
this.otherManaGain += event.resourceChange;
return;
}
this.totalResurgenceGain += event.resourceChange;
}
get totalMana() {
const naturalManaRegen = (this.owner.fightDuration / 1000) * MANA_REGEN_PER_SECOND;
const mttMana = this.manaTideTotem.regenOnPlayer;
const wsMana = this.waterShield.regenOnPlayer;
return (
naturalManaRegen +
this.totalResurgenceGain +
this.manaTracker.maxResource +
this.otherManaGain +
mttMana +
wsMana
);
}
statistic() {
return (
<Statistic
position={STATISTIC_ORDER.UNIMPORTANT(90)}
size="flexible"
dropdown={
<>
<div>
<Trans id="shaman.restoration.resurgence.statistic.table.description">
<SpellLink id={SPELLS.RESURGENCE.id} iconStyle={{ height: '1.25em' }} /> accounted
for {formatPercentage(this.totalResurgenceGain / this.totalMana, 0)}% of your mana
pool ({formatNumber(this.totalMana)} mana).
</Trans>
</div>
<table className="table table-condensed">
<thead>
<tr>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.spell">
Spell
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.amount">
Amount
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.crits">
Crits
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.mana">
% of mana
</Trans>
</th>
</tr>
</thead>
<tbody>
{this.resurgence.map((spell) => (
<tr key={spell.spellId}>
<th scope="row">
<SpellIcon id={spell.spellId} style={{ height: '2.5em' }} />
</th>
<td>{formatNumber(spell.resurgenceTotal)}</td>
<td>{formatNumber(spell.castAmount)}</td>
<td>{formatPercentage(spell.resurgenceTotal / this.totalMana)}%</td>
</tr>
))}
</tbody>
</table>
</>
}
>
<BoringValue label={<SpellLink id={SPELLS.RESURGENCE.id} />}>
<div className="flex mtt-value">
<div className="flex-sub icon">
<ManaIcon />
</div>
<div className="flex-main value">
{formatNumber(this.totalResurgenceGain)}
<br />
<small>
<Trans id="shaman.restoration.resurgence.statistic.label">
Mana gained from Resurgence
</Trans>
</small>
</div>
</div>
</BoringValue>
</Statistic>
);
}
}
export default Resurgence;
| constructor | identifier_name |
Resurgence.tsx | import { Trans } from '@lingui/macro';
import { formatNumber, formatPercentage } from 'common/format';
import SPELLS from 'common/SPELLS';
import HIT_TYPES from 'game/HIT_TYPES';
import { SpellLink } from 'interface';
import { SpellIcon } from 'interface';
import ManaIcon from 'interface/icons/Mana';
import Analyzer, { Options, SELECTED_PLAYER } from 'parser/core/Analyzer';
import Events, { EnergizeEvent, HealEvent } from 'parser/core/Events';
import ManaTracker from 'parser/core/healingEfficiency/ManaTracker';
import BoringValue from 'parser/ui/BoringValueText';
import Statistic from 'parser/ui/Statistic';
import { STATISTIC_ORDER } from 'parser/ui/StatisticBox';
import React from 'react';
import './ManaTideTotem.scss';
import ManaTideTotem, { MANA_REGEN_PER_SECOND } from './ManaTideTotem';
import WaterShield from './WaterShield';
const SPELLS_PROCCING_RESURGENCE = {
[SPELLS.HEALING_SURGE.id]: 0.006,
[SPELLS.HEALING_WAVE.id]: 0.01,
[SPELLS.CHAIN_HEAL.id]: 0.0025,
[SPELLS.UNLEASH_LIFE_TALENT.id]: 0.006,
[SPELLS.RIPTIDE.id]: 0.006,
};
interface ResurgenceInfo {
spellId: number;
resurgenceTotal: number;
castAmount: number;
}
class Resurgence extends Analyzer {
static dependencies = {
manaTracker: ManaTracker,
manaTideTotem: ManaTideTotem,
waterShield: WaterShield,
};
protected manaTracker!: ManaTracker;
protected manaTideTotem!: ManaTideTotem;
protected waterShield!: WaterShield;
otherManaGain = 0;
resurgence: ResurgenceInfo[] = [];
totalResurgenceGain = 0;
constructor(options: Options) |
onRelevantHeal(event: HealEvent) {
if (event.tick) {
return;
}
const spellId = event.ability.guid;
if (!this.resurgence[spellId]) {
this.resurgence[spellId] = {
spellId: spellId,
resurgenceTotal: 0,
castAmount: 0,
};
}
if (event.hitType === HIT_TYPES.CRIT) {
this.resurgence[spellId].resurgenceTotal +=
SPELLS_PROCCING_RESURGENCE[spellId] * this.manaTracker.maxResource;
this.resurgence[spellId].castAmount += 1;
}
}
onResurgenceProc(event: EnergizeEvent) {
const spellId = event.ability.guid;
if (spellId !== SPELLS.RESURGENCE.id) {
this.otherManaGain += event.resourceChange;
return;
}
this.totalResurgenceGain += event.resourceChange;
}
get totalMana() {
const naturalManaRegen = (this.owner.fightDuration / 1000) * MANA_REGEN_PER_SECOND;
const mttMana = this.manaTideTotem.regenOnPlayer;
const wsMana = this.waterShield.regenOnPlayer;
return (
naturalManaRegen +
this.totalResurgenceGain +
this.manaTracker.maxResource +
this.otherManaGain +
mttMana +
wsMana
);
}
statistic() {
return (
<Statistic
position={STATISTIC_ORDER.UNIMPORTANT(90)}
size="flexible"
dropdown={
<>
<div>
<Trans id="shaman.restoration.resurgence.statistic.table.description">
<SpellLink id={SPELLS.RESURGENCE.id} iconStyle={{ height: '1.25em' }} /> accounted
for {formatPercentage(this.totalResurgenceGain / this.totalMana, 0)}% of your mana
pool ({formatNumber(this.totalMana)} mana).
</Trans>
</div>
<table className="table table-condensed">
<thead>
<tr>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.spell">
Spell
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.amount">
Amount
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.crits">
Crits
</Trans>
</th>
<th>
<Trans id="shaman.restoration.resurgence.statistic.tableHeader.mana">
% of mana
</Trans>
</th>
</tr>
</thead>
<tbody>
{this.resurgence.map((spell) => (
<tr key={spell.spellId}>
<th scope="row">
<SpellIcon id={spell.spellId} style={{ height: '2.5em' }} />
</th>
<td>{formatNumber(spell.resurgenceTotal)}</td>
<td>{formatNumber(spell.castAmount)}</td>
<td>{formatPercentage(spell.resurgenceTotal / this.totalMana)}%</td>
</tr>
))}
</tbody>
</table>
</>
}
>
<BoringValue label={<SpellLink id={SPELLS.RESURGENCE.id} />}>
<div className="flex mtt-value">
<div className="flex-sub icon">
<ManaIcon />
</div>
<div className="flex-main value">
{formatNumber(this.totalResurgenceGain)}
<br />
<small>
<Trans id="shaman.restoration.resurgence.statistic.label">
Mana gained from Resurgence
</Trans>
</small>
</div>
</div>
</BoringValue>
</Statistic>
);
}
}
export default Resurgence;
| {
super(options);
this.addEventListener(
Events.heal
.by(SELECTED_PLAYER)
.spell([
SPELLS.HEALING_SURGE,
SPELLS.HEALING_WAVE,
SPELLS.CHAIN_HEAL,
SPELLS.UNLEASH_LIFE_TALENT,
SPELLS.RIPTIDE,
]),
this.onRelevantHeal,
);
this.addEventListener(
Events.energize.to(SELECTED_PLAYER).spell(SPELLS.RESURGENCE),
this.onResurgenceProc,
);
} | identifier_body |
serve.js | var path = require('path');
var url = require('url');
var closure = require('closure-util');
var nomnom = require('nomnom');
var log = closure.log;
var options = nomnom.options({
port: {
abbr: 'p',
'default': 4000,
help: 'Port for incoming connections',
metavar: 'PORT'
},
loglevel: {
abbr: 'l',
choices: ['silly', 'verbose', 'info', 'warn', 'error'],
'default': 'info',
help: 'Log level',
metavar: 'LEVEL'
}
}).parse();
/** @type {string} */
log.level = options.loglevel;
log.info('ol3-cesium', 'Parsing dependencies ...');
var manager = new closure.Manager({
closure: true, // use the bundled Closure Library
lib: [
'src/**/*.js'
],
ignoreRequires: '^ol\\.'
});
manager.on('error', function(e) {
log.error('ol3-cesium', e.message);
});
manager.on('ready', function() {
var server = new closure.Server({
manager: manager,
loader: '/@loader'
});
server.listen(options.port, function() {
log.info('ol3-cesium', 'Listening on http://localhost:' +
options.port + '/ (Ctrl+C to stop)');
});
server.on('error', function(err) {
log.error('ol3-cesium', 'Server failed to start: ' + err.message); | }); | process.exit(1);
}); | random_line_split |
debug_runtime.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Graph debug runtime executes TVM debug packed functions."""
import os
import tempfile
import shutil
from tvm._ffi.base import string_types
from tvm._ffi.function import get_global_func
from tvm.contrib import graph_runtime
from tvm.ndarray import array
from tvm.rpc import base as rpc_base
from . import debug_result
_DUMP_ROOT_PREFIX = "tvmdbg_"
_DUMP_PATH_PREFIX = "_tvmdbg_"
def create(graph_json_str, libmod, ctx, dump_root=None):
"""Create a runtime executor module given a graph and module.
Parameters
----------
graph_json_str : str or graph class
The graph to be deployed in json format output by nnvm graph.
The graph can only contain one operator(tvm_op) that
points to the name of PackedFunc in the libmod.
libmod : tvm.Module
The module of the corresponding function.
ctx : TVMContext
The context to deploy the module, can be local or remote.
dump_root : str
To select which folder the outputs should be kept.
None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
Returns
-------
graph_module : GraphModuleDebug
Debug Runtime graph module that can be used to execute the graph.
"""
if not isinstance(graph_json_str, string_types):
try:
graph_json_str = graph_json_str._tvm_graph_json()
except AttributeError:
raise ValueError("Type %s is not supported" % type(graph_json_str))
try:
fcreate = get_global_func("tvm.graph_runtime_debug.create")
except ValueError:
raise ValueError(
"Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
"config.cmake and rebuild TVM to enable debug mode"
)
ctx, num_rpc_ctx, device_type_id = graph_runtime.get_device_ctx(libmod, ctx)
if num_rpc_ctx == len(ctx):
libmod = rpc_base._ModuleHandle(libmod)
try:
fcreate = ctx[0]._rpc_sess.get_function(
"tvm.graph_runtime_debug.remote_create"
)
except ValueError:
raise ValueError(
"Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
"config.cmake and rebuild TVM to enable debug mode"
)
func_obj = fcreate(graph_json_str, libmod, *device_type_id)
return GraphModuleDebug(func_obj, ctx, graph_json_str, dump_root)
class GraphModuleDebug(graph_runtime.GraphModule):
"""Graph debug runtime module.
This is a debug wrapper over the TVM runtime.
Runtime interfaces are wrapped with debug functionalities.
Manage the debug framework to format the debug data and
trigger the user interfaces.
Parameters
----------
module : Module
The interal tvm module that holds the actual graph functions.
ctx : TVMContext
The context this module is under.
graph_json_str : str or graph class
Content of graph json file in string format
dump_root : str
To select which folder the outputs should be kept.
None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
"""
def __init__(self, module, ctx, graph_json_str, dump_root):
self._dump_root = dump_root
self._dump_path = None
self._get_output_by_layer = module["get_output_by_layer"]
self._run_individual = module["run_individual"]
graph_runtime.GraphModule.__init__(self, module)
self._create_debug_env(graph_json_str, ctx)
def _format_context(self, ctx):
return str(ctx[0]).upper().replace("(", ":").replace(")", "")
def _ensure_dir(self, directory):
"""Create a directory if not exists
Parameters
----------
directory : str
File path to create
"""
if not os.path.exists(directory):
os.makedirs(directory, 0o700)
def _get_dump_path(self, ctx):
"""Make the graph and tensor dump folder and return the path.
Parameters
----------
ctx : TVMContext
The context this module is under.
Returns
-------
path : str
Directory path where the graph and node outputs will be stored.
"""
# save to file
folder_name = _DUMP_PATH_PREFIX + "ctx_"
folder_name = folder_name + ctx.replace(":", "_")
path = os.path.join(self._dump_root, folder_name)
self._ensure_dir(path)
return path
def _remove_dump_root(self):
if os.path.isdir(self._dump_root):
shutil.rmtree(self._dump_root)
def _create_debug_env(self, graph_json, ctx):
"""Create UI wrapper framework to handle multiple UI frontends for tvmdbg
Parameters
----------
graph_json : json format
json formatted NNVM graph contain list of each node's name, shape and type.
nodes_list : list
List of all the nodes presented in the graph
ctx : TVMContext
The context this module is under.
"""
# make the dump folder if not given
if not self._dump_root:
self._dump_root = tempfile.mkdtemp(prefix=_DUMP_ROOT_PREFIX)
# format the context
ctx = self._format_context(ctx)
# updates the dumping directories
self._dump_path = self._get_dump_path(ctx)
# init the debug dumping environment
self.debug_datum = debug_result.DebugResult(graph_json, self._dump_path)
def _run_debug(self):
"""Execute the node specified with index will be executed.
Each debug output will be copied to the buffer
Time consumed for each execution will be set as debug output.
"""
self.debug_datum._time_list = [
[float(t) * 1e-6] for t in self.run_individual(10, 1, 1)
]
for i, node in enumerate(self.debug_datum.get_graph_nodes()):
num_outputs = self.debug_datum.get_graph_node_output_num(node)
for j in range(num_outputs):
out_tensor = self._get_output_by_layer(i, j)
out_tensor = array(out_tensor)
self.debug_datum._output_tensor_list.append(out_tensor)
def debug_get_output(self, node, out):
"""Run graph up to node and get the output to out
Parameters
----------
node : int / str
The node index or name
out : NDArray
The output array container
"""
ret = None
if isinstance(node, str):
output_tensors = self.debug_datum.get_output_tensors()
try:
ret = output_tensors[node]
except:
node_list = output_tensors.keys()
raise RuntimeError(
"Node "
+ node
+ " not found, available nodes are: "
+ str(node_list)
+ "."
)
elif isinstance(node, int):
output_tensors = self.debug_datum._output_tensor_list
ret = output_tensors[node]
else:
raise RuntimeError("Require node index or name only.")
return ret
def run(self, **input_dict):
|
def run_individual(self, number, repeat=1, min_repeat_ms=0):
ret = self._run_individual(number, repeat, min_repeat_ms)
return ret.strip(",").split(",") if ret else []
def exit(self):
"""Exits the dump folder and all its contents"""
self._remove_dump_root()
| """Run forward execution of the graph with debug
Parameters
----------
input_dict : dict of str to NDArray
List of input values to be feed to
"""
if input_dict:
self.set_input(**input_dict)
# Step 1. Execute the graph
self._run_debug()
# Step 2. Dump the output tensors to the dump folder
self.debug_datum.dump_output_tensor()
# Step 3. Dump the Chrome trace to the dump folder
self.debug_datum.dump_chrome_trace()
# Step 4. Display the collected information
self.debug_datum.display_debug_result() | identifier_body |
debug_runtime.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Graph debug runtime executes TVM debug packed functions."""
import os
import tempfile
import shutil
from tvm._ffi.base import string_types
from tvm._ffi.function import get_global_func
from tvm.contrib import graph_runtime
from tvm.ndarray import array
from tvm.rpc import base as rpc_base
from . import debug_result
_DUMP_ROOT_PREFIX = "tvmdbg_"
_DUMP_PATH_PREFIX = "_tvmdbg_"
def create(graph_json_str, libmod, ctx, dump_root=None):
"""Create a runtime executor module given a graph and module.
Parameters
----------
graph_json_str : str or graph class
The graph to be deployed in json format output by nnvm graph.
The graph can only contain one operator(tvm_op) that
points to the name of PackedFunc in the libmod.
libmod : tvm.Module
The module of the corresponding function.
ctx : TVMContext
The context to deploy the module, can be local or remote.
dump_root : str
To select which folder the outputs should be kept.
None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
Returns
-------
graph_module : GraphModuleDebug
Debug Runtime graph module that can be used to execute the graph.
"""
if not isinstance(graph_json_str, string_types):
try:
graph_json_str = graph_json_str._tvm_graph_json()
except AttributeError:
raise ValueError("Type %s is not supported" % type(graph_json_str))
try:
fcreate = get_global_func("tvm.graph_runtime_debug.create")
except ValueError:
raise ValueError(
"Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
"config.cmake and rebuild TVM to enable debug mode"
)
ctx, num_rpc_ctx, device_type_id = graph_runtime.get_device_ctx(libmod, ctx)
if num_rpc_ctx == len(ctx):
libmod = rpc_base._ModuleHandle(libmod)
try:
fcreate = ctx[0]._rpc_sess.get_function(
"tvm.graph_runtime_debug.remote_create"
)
except ValueError:
raise ValueError(
"Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
"config.cmake and rebuild TVM to enable debug mode"
)
func_obj = fcreate(graph_json_str, libmod, *device_type_id)
return GraphModuleDebug(func_obj, ctx, graph_json_str, dump_root)
class GraphModuleDebug(graph_runtime.GraphModule):
"""Graph debug runtime module.
This is a debug wrapper over the TVM runtime.
Runtime interfaces are wrapped with debug functionalities.
Manage the debug framework to format the debug data and
trigger the user interfaces.
Parameters
----------
module : Module
The interal tvm module that holds the actual graph functions.
ctx : TVMContext
The context this module is under.
graph_json_str : str or graph class
Content of graph json file in string format
dump_root : str
To select which folder the outputs should be kept.
None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
"""
def __init__(self, module, ctx, graph_json_str, dump_root):
self._dump_root = dump_root
self._dump_path = None
self._get_output_by_layer = module["get_output_by_layer"]
self._run_individual = module["run_individual"]
graph_runtime.GraphModule.__init__(self, module)
self._create_debug_env(graph_json_str, ctx)
def _format_context(self, ctx):
return str(ctx[0]).upper().replace("(", ":").replace(")", "")
def _ensure_dir(self, directory):
"""Create a directory if not exists
Parameters
----------
directory : str
File path to create
"""
if not os.path.exists(directory):
os.makedirs(directory, 0o700)
def _get_dump_path(self, ctx):
"""Make the graph and tensor dump folder and return the path.
Parameters
----------
ctx : TVMContext
The context this module is under.
Returns
-------
path : str
Directory path where the graph and node outputs will be stored.
"""
# save to file
folder_name = _DUMP_PATH_PREFIX + "ctx_"
folder_name = folder_name + ctx.replace(":", "_")
path = os.path.join(self._dump_root, folder_name)
self._ensure_dir(path)
return path
def _remove_dump_root(self):
if os.path.isdir(self._dump_root):
shutil.rmtree(self._dump_root)
def _create_debug_env(self, graph_json, ctx):
"""Create UI wrapper framework to handle multiple UI frontends for tvmdbg
Parameters
----------
graph_json : json format
json formatted NNVM graph contain list of each node's name, shape and type.
nodes_list : list
List of all the nodes presented in the graph
ctx : TVMContext
The context this module is under.
"""
# make the dump folder if not given
if not self._dump_root:
self._dump_root = tempfile.mkdtemp(prefix=_DUMP_ROOT_PREFIX)
# format the context
ctx = self._format_context(ctx)
# updates the dumping directories
self._dump_path = self._get_dump_path(ctx)
# init the debug dumping environment
self.debug_datum = debug_result.DebugResult(graph_json, self._dump_path)
def _run_debug(self):
"""Execute the node specified with index will be executed.
Each debug output will be copied to the buffer
Time consumed for each execution will be set as debug output.
"""
self.debug_datum._time_list = [
[float(t) * 1e-6] for t in self.run_individual(10, 1, 1)
]
for i, node in enumerate(self.debug_datum.get_graph_nodes()):
num_outputs = self.debug_datum.get_graph_node_output_num(node)
for j in range(num_outputs):
out_tensor = self._get_output_by_layer(i, j)
out_tensor = array(out_tensor)
self.debug_datum._output_tensor_list.append(out_tensor)
def debug_get_output(self, node, out):
"""Run graph up to node and get the output to out
Parameters
----------
node : int / str
The node index or name
out : NDArray
The output array container
"""
ret = None
if isinstance(node, str):
output_tensors = self.debug_datum.get_output_tensors()
try:
ret = output_tensors[node]
except:
node_list = output_tensors.keys()
raise RuntimeError(
"Node "
+ node
+ " not found, available nodes are: "
+ str(node_list)
+ "."
)
elif isinstance(node, int):
output_tensors = self.debug_datum._output_tensor_list
ret = output_tensors[node]
else:
raise RuntimeError("Require node index or name only.")
return ret
def run(self, **input_dict):
"""Run forward execution of the graph with debug
Parameters
----------
input_dict : dict of str to NDArray
List of input values to be feed to
"""
if input_dict:
|
# Step 1. Execute the graph
self._run_debug()
# Step 2. Dump the output tensors to the dump folder
self.debug_datum.dump_output_tensor()
# Step 3. Dump the Chrome trace to the dump folder
self.debug_datum.dump_chrome_trace()
# Step 4. Display the collected information
self.debug_datum.display_debug_result()
def run_individual(self, number, repeat=1, min_repeat_ms=0):
ret = self._run_individual(number, repeat, min_repeat_ms)
return ret.strip(",").split(",") if ret else []
def exit(self):
"""Exits the dump folder and all its contents"""
self._remove_dump_root()
| self.set_input(**input_dict) | conditional_block |
debug_runtime.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Graph debug runtime executes TVM debug packed functions."""
import os
import tempfile
import shutil
from tvm._ffi.base import string_types
from tvm._ffi.function import get_global_func
from tvm.contrib import graph_runtime
from tvm.ndarray import array
from tvm.rpc import base as rpc_base
from . import debug_result
_DUMP_ROOT_PREFIX = "tvmdbg_"
_DUMP_PATH_PREFIX = "_tvmdbg_"
def create(graph_json_str, libmod, ctx, dump_root=None):
"""Create a runtime executor module given a graph and module.
Parameters
----------
graph_json_str : str or graph class
The graph to be deployed in json format output by nnvm graph.
The graph can only contain one operator(tvm_op) that
points to the name of PackedFunc in the libmod.
libmod : tvm.Module
The module of the corresponding function.
ctx : TVMContext
The context to deploy the module, can be local or remote.
dump_root : str
To select which folder the outputs should be kept.
None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
Returns
-------
graph_module : GraphModuleDebug
Debug Runtime graph module that can be used to execute the graph.
"""
if not isinstance(graph_json_str, string_types):
try:
graph_json_str = graph_json_str._tvm_graph_json()
except AttributeError:
raise ValueError("Type %s is not supported" % type(graph_json_str))
try:
fcreate = get_global_func("tvm.graph_runtime_debug.create")
except ValueError:
raise ValueError(
"Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
"config.cmake and rebuild TVM to enable debug mode"
)
ctx, num_rpc_ctx, device_type_id = graph_runtime.get_device_ctx(libmod, ctx)
if num_rpc_ctx == len(ctx):
libmod = rpc_base._ModuleHandle(libmod)
try:
fcreate = ctx[0]._rpc_sess.get_function(
"tvm.graph_runtime_debug.remote_create"
)
except ValueError:
raise ValueError(
"Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
"config.cmake and rebuild TVM to enable debug mode"
)
func_obj = fcreate(graph_json_str, libmod, *device_type_id)
return GraphModuleDebug(func_obj, ctx, graph_json_str, dump_root)
class GraphModuleDebug(graph_runtime.GraphModule):
"""Graph debug runtime module.
This is a debug wrapper over the TVM runtime.
Runtime interfaces are wrapped with debug functionalities.
Manage the debug framework to format the debug data and
trigger the user interfaces.
Parameters
----------
module : Module
The interal tvm module that holds the actual graph functions.
ctx : TVMContext
The context this module is under.
graph_json_str : str or graph class
Content of graph json file in string format
dump_root : str
To select which folder the outputs should be kept.
None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
"""
def __init__(self, module, ctx, graph_json_str, dump_root):
self._dump_root = dump_root
self._dump_path = None
self._get_output_by_layer = module["get_output_by_layer"]
self._run_individual = module["run_individual"]
graph_runtime.GraphModule.__init__(self, module)
self._create_debug_env(graph_json_str, ctx)
def _format_context(self, ctx):
return str(ctx[0]).upper().replace("(", ":").replace(")", "")
def | (self, directory):
"""Create a directory if not exists
Parameters
----------
directory : str
File path to create
"""
if not os.path.exists(directory):
os.makedirs(directory, 0o700)
def _get_dump_path(self, ctx):
"""Make the graph and tensor dump folder and return the path.
Parameters
----------
ctx : TVMContext
The context this module is under.
Returns
-------
path : str
Directory path where the graph and node outputs will be stored.
"""
# save to file
folder_name = _DUMP_PATH_PREFIX + "ctx_"
folder_name = folder_name + ctx.replace(":", "_")
path = os.path.join(self._dump_root, folder_name)
self._ensure_dir(path)
return path
def _remove_dump_root(self):
if os.path.isdir(self._dump_root):
shutil.rmtree(self._dump_root)
def _create_debug_env(self, graph_json, ctx):
"""Create UI wrapper framework to handle multiple UI frontends for tvmdbg
Parameters
----------
graph_json : json format
json formatted NNVM graph contain list of each node's name, shape and type.
nodes_list : list
List of all the nodes presented in the graph
ctx : TVMContext
The context this module is under.
"""
# make the dump folder if not given
if not self._dump_root:
self._dump_root = tempfile.mkdtemp(prefix=_DUMP_ROOT_PREFIX)
# format the context
ctx = self._format_context(ctx)
# updates the dumping directories
self._dump_path = self._get_dump_path(ctx)
# init the debug dumping environment
self.debug_datum = debug_result.DebugResult(graph_json, self._dump_path)
def _run_debug(self):
"""Execute the node specified with index will be executed.
Each debug output will be copied to the buffer
Time consumed for each execution will be set as debug output.
"""
self.debug_datum._time_list = [
[float(t) * 1e-6] for t in self.run_individual(10, 1, 1)
]
for i, node in enumerate(self.debug_datum.get_graph_nodes()):
num_outputs = self.debug_datum.get_graph_node_output_num(node)
for j in range(num_outputs):
out_tensor = self._get_output_by_layer(i, j)
out_tensor = array(out_tensor)
self.debug_datum._output_tensor_list.append(out_tensor)
def debug_get_output(self, node, out):
"""Run graph up to node and get the output to out
Parameters
----------
node : int / str
The node index or name
out : NDArray
The output array container
"""
ret = None
if isinstance(node, str):
output_tensors = self.debug_datum.get_output_tensors()
try:
ret = output_tensors[node]
except:
node_list = output_tensors.keys()
raise RuntimeError(
"Node "
+ node
+ " not found, available nodes are: "
+ str(node_list)
+ "."
)
elif isinstance(node, int):
output_tensors = self.debug_datum._output_tensor_list
ret = output_tensors[node]
else:
raise RuntimeError("Require node index or name only.")
return ret
def run(self, **input_dict):
"""Run forward execution of the graph with debug
Parameters
----------
input_dict : dict of str to NDArray
List of input values to be feed to
"""
if input_dict:
self.set_input(**input_dict)
# Step 1. Execute the graph
self._run_debug()
# Step 2. Dump the output tensors to the dump folder
self.debug_datum.dump_output_tensor()
# Step 3. Dump the Chrome trace to the dump folder
self.debug_datum.dump_chrome_trace()
# Step 4. Display the collected information
self.debug_datum.display_debug_result()
def run_individual(self, number, repeat=1, min_repeat_ms=0):
ret = self._run_individual(number, repeat, min_repeat_ms)
return ret.strip(",").split(",") if ret else []
def exit(self):
"""Exits the dump folder and all its contents"""
self._remove_dump_root()
| _ensure_dir | identifier_name |
debug_runtime.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Graph debug runtime executes TVM debug packed functions."""
import os
import tempfile
import shutil
from tvm._ffi.base import string_types
from tvm._ffi.function import get_global_func
from tvm.contrib import graph_runtime
from tvm.ndarray import array
from tvm.rpc import base as rpc_base
from . import debug_result
_DUMP_ROOT_PREFIX = "tvmdbg_"
_DUMP_PATH_PREFIX = "_tvmdbg_"
def create(graph_json_str, libmod, ctx, dump_root=None):
"""Create a runtime executor module given a graph and module.
Parameters
----------
graph_json_str : str or graph class
The graph to be deployed in json format output by nnvm graph.
The graph can only contain one operator(tvm_op) that
points to the name of PackedFunc in the libmod.
libmod : tvm.Module
The module of the corresponding function.
ctx : TVMContext
The context to deploy the module, can be local or remote.
dump_root : str
To select which folder the outputs should be kept.
None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
Returns
-------
graph_module : GraphModuleDebug
Debug Runtime graph module that can be used to execute the graph.
"""
if not isinstance(graph_json_str, string_types):
try:
graph_json_str = graph_json_str._tvm_graph_json()
except AttributeError:
raise ValueError("Type %s is not supported" % type(graph_json_str))
try:
fcreate = get_global_func("tvm.graph_runtime_debug.create")
except ValueError:
raise ValueError(
"Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
"config.cmake and rebuild TVM to enable debug mode"
)
ctx, num_rpc_ctx, device_type_id = graph_runtime.get_device_ctx(libmod, ctx)
if num_rpc_ctx == len(ctx):
libmod = rpc_base._ModuleHandle(libmod)
try:
fcreate = ctx[0]._rpc_sess.get_function(
"tvm.graph_runtime_debug.remote_create"
)
except ValueError:
raise ValueError(
"Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
"config.cmake and rebuild TVM to enable debug mode"
)
func_obj = fcreate(graph_json_str, libmod, *device_type_id)
return GraphModuleDebug(func_obj, ctx, graph_json_str, dump_root)
class GraphModuleDebug(graph_runtime.GraphModule):
"""Graph debug runtime module.
This is a debug wrapper over the TVM runtime.
Runtime interfaces are wrapped with debug functionalities.
Manage the debug framework to format the debug data and
trigger the user interfaces.
Parameters
----------
module : Module
The interal tvm module that holds the actual graph functions.
ctx : TVMContext
The context this module is under.
graph_json_str : str or graph class
Content of graph json file in string format
dump_root : str
To select which folder the outputs should be kept.
None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
""" | self._dump_path = None
self._get_output_by_layer = module["get_output_by_layer"]
self._run_individual = module["run_individual"]
graph_runtime.GraphModule.__init__(self, module)
self._create_debug_env(graph_json_str, ctx)
def _format_context(self, ctx):
return str(ctx[0]).upper().replace("(", ":").replace(")", "")
def _ensure_dir(self, directory):
"""Create a directory if not exists
Parameters
----------
directory : str
File path to create
"""
if not os.path.exists(directory):
os.makedirs(directory, 0o700)
def _get_dump_path(self, ctx):
"""Make the graph and tensor dump folder and return the path.
Parameters
----------
ctx : TVMContext
The context this module is under.
Returns
-------
path : str
Directory path where the graph and node outputs will be stored.
"""
# save to file
folder_name = _DUMP_PATH_PREFIX + "ctx_"
folder_name = folder_name + ctx.replace(":", "_")
path = os.path.join(self._dump_root, folder_name)
self._ensure_dir(path)
return path
def _remove_dump_root(self):
if os.path.isdir(self._dump_root):
shutil.rmtree(self._dump_root)
def _create_debug_env(self, graph_json, ctx):
"""Create UI wrapper framework to handle multiple UI frontends for tvmdbg
Parameters
----------
graph_json : json format
json formatted NNVM graph contain list of each node's name, shape and type.
nodes_list : list
List of all the nodes presented in the graph
ctx : TVMContext
The context this module is under.
"""
# make the dump folder if not given
if not self._dump_root:
self._dump_root = tempfile.mkdtemp(prefix=_DUMP_ROOT_PREFIX)
# format the context
ctx = self._format_context(ctx)
# updates the dumping directories
self._dump_path = self._get_dump_path(ctx)
# init the debug dumping environment
self.debug_datum = debug_result.DebugResult(graph_json, self._dump_path)
def _run_debug(self):
"""Execute the node specified with index will be executed.
Each debug output will be copied to the buffer
Time consumed for each execution will be set as debug output.
"""
self.debug_datum._time_list = [
[float(t) * 1e-6] for t in self.run_individual(10, 1, 1)
]
for i, node in enumerate(self.debug_datum.get_graph_nodes()):
num_outputs = self.debug_datum.get_graph_node_output_num(node)
for j in range(num_outputs):
out_tensor = self._get_output_by_layer(i, j)
out_tensor = array(out_tensor)
self.debug_datum._output_tensor_list.append(out_tensor)
def debug_get_output(self, node, out):
"""Run graph up to node and get the output to out
Parameters
----------
node : int / str
The node index or name
out : NDArray
The output array container
"""
ret = None
if isinstance(node, str):
output_tensors = self.debug_datum.get_output_tensors()
try:
ret = output_tensors[node]
except:
node_list = output_tensors.keys()
raise RuntimeError(
"Node "
+ node
+ " not found, available nodes are: "
+ str(node_list)
+ "."
)
elif isinstance(node, int):
output_tensors = self.debug_datum._output_tensor_list
ret = output_tensors[node]
else:
raise RuntimeError("Require node index or name only.")
return ret
def run(self, **input_dict):
"""Run forward execution of the graph with debug
Parameters
----------
input_dict : dict of str to NDArray
List of input values to be feed to
"""
if input_dict:
self.set_input(**input_dict)
# Step 1. Execute the graph
self._run_debug()
# Step 2. Dump the output tensors to the dump folder
self.debug_datum.dump_output_tensor()
# Step 3. Dump the Chrome trace to the dump folder
self.debug_datum.dump_chrome_trace()
# Step 4. Display the collected information
self.debug_datum.display_debug_result()
def run_individual(self, number, repeat=1, min_repeat_ms=0):
ret = self._run_individual(number, repeat, min_repeat_ms)
return ret.strip(",").split(",") if ret else []
def exit(self):
"""Exits the dump folder and all its contents"""
self._remove_dump_root() |
def __init__(self, module, ctx, graph_json_str, dump_root):
self._dump_root = dump_root | random_line_split |
release_push.py | ,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-qual-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-qual-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, CHECKER_COMPAT_QUAL_RELEASE_POM,
CHECKER_COMPAT_QUAL,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-compat-qual-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-compat-qual-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JAVAC_BINARY_RELEASE_POM, JAVAC_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "compiler-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "compiler-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JDK7_BINARY_RELEASE_POM, JDK7_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk7-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk7-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JDK8_BINARY_RELEASE_POM, JDK8_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk8-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk8-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JAVACUTIL_BINARY_RELEASE_POM, JAVACUTIL_BINARY,
JAVACUTIL_SOURCE_JAR, JAVACUTIL_JAVADOC_JAR,
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, DATAFLOW_BINARY_RELEASE_POM, DATAFLOW_BINARY,
DATAFLOW_SOURCE_JAR, DATAFLOW_JAVADOC_JAR,
pgp_user, pgp_passphrase )
plugin_jar = find_mvn_plugin_jar( MAVEN_PLUGIN_DIR, new_checker_version )
plugin_source_jar = find_mvn_plugin_jar( MAVEN_PLUGIN_DIR, new_checker_version, "sources" )
plugin_javadoc_jar = os.path.join( MAVEN_RELEASE_DIR, mvn_dist, "checkerframework-maven-plugin-javadoc.jar" )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, MAVEN_PLUGIN_RELEASE_POM, plugin_jar,
plugin_source_jar, plugin_javadoc_jar, pgp_user, pgp_passphrase )
delete_path( mvn_dist )
def run_link_checker( site, output ):
check_links_script = os.path.join(CHECKER_FRAMEWORK_RELEASE, "checkLinks.sh")
cmd = ["sh", check_links_script, site]
out_file = open( output, 'w+' )
print("Executing: " + " ".join(cmd) )
process = subprocess.Popen(cmd, stdout=out_file, stderr=out_file)
process.communicate()
process.wait()
out_file.close()
if process.returncode != 0:
raise Exception('Non-zero return code( %s ) while executing %s' % (process.returncode, cmd))
return output
def | ( jsr308_website, afu_website, checker_website, suffix ):
jsr308Check = run_link_checker( jsr308_website, "/tmp/jsr308." + suffix + ".check" )
afuCheck = run_link_checker( afu_website, "/tmp/afu." + suffix + ".check" )
checkerCheck = run_link_checker( checker_website, "/tmp/checker-framework." + suffix + ".check" )
print( "Link checker results can be found at:\n" +
"\t" + jsr308Check + "\n" +
"\t" + afuCheck + "\n" +
"\t" + checkerCheck + "\n" )
continue_script = prompt_w_suggestion("Delete " + suffix + " site link checker results?", "yes", "^(Yes|yes|No|no)$")
if is_yes(continue_script):
delete( jsr308Check )
delete( afuCheck )
delete( checkerCheck )
def push_interm_to_release_repos():
hg_push_or_fail( INTERM_JSR308_REPO )
hg_push_or_fail( INTERM_ANNO_REPO )
hg_push_or_fail( INTERM_CHECKER_REPO )
def continue_or_exit( msg ):
continue_script = prompt_w_suggestion(msg + " Continue?", "yes", "^(Yes|yes|No|no)$")
if continue_script == "no" or continue_script == "No":
raise Exception( "User elected NOT to continue at prompt: " + msg )
def read_args(argv):
test = True
if len( argv ) == 2:
if argv[1] == "release":
test = False
else:
print_usage()
else:
if len( argv ) > 2:
print_usage()
raise Exception( "Invalid arguments. " + ",".join(argv) )
return test
def print_usage():
print ( "Usage: python release_build.py [release]\n" +
"The only argument this script takes is \"release\". If this argument is " +
"NOT specified then the script will execute all steps that checking and prompting " +
"steps but will NOT actually perform a release. This is for testing the script." )
def main(argv):
# MANUAL Indicates a manual step
# SEMIAUTO Indicates a mostly automated step with possible prompts. Most of these steps become fully-automated when --auto is used.
# AUTO Indicates the step is fully-automated.
# Note that many prompts will cause scripts to exit if you say 'no'. This will require you to re-run
# the script from the beginning, which may take a long time. It is better to say 'yes' to the script
# prompts and follow the indicated steps even if they are redundant/you have done them already. Also,
# be sure to carefully read all instructions on the command-line before typing yes. This is because
# the scripts do not ask you to say 'yes' after each step, so you may miss a step if you only read
# the paragraph asking you to say 'yes'.
set_umask()
test_mode = read_args( argv )
msg = ( "You have chosen test_mode. \nThis means that this script will execute all build steps that " +
"do not have side-effects. That is, this is a test run of the script. All checks and user prompts " +
"will be shown but no steps will be executed that will cause the release to be deployed or partially " +
"deployed.\n" +
"If you meant to do an actual release, re-run this script with one argument, \"release\"." )
if not test_mode:
msg = "You have chosen release_mode. Please follow the prompts to run a full Checker Framework release"
continue_or_exit( msg + "\n" )
if test_mode:
print("Continuing in test mode.")
else:
print("Continuing in release mode.")
check_hg_user()
print( "\nNOTE: Please read all the prompts printed by this script very carefully, as their" )
print( "contents may have changed since the last time you ran it." )
print_step( "Push Step 0: Verify Requirements\n" ) # MANUAL
print( " If this is your first time running the release_push script, please verify that you have met " +
"all the requirements specified in README-maintainers.html \"Pre-release Checklist\"\n" )
continue_or_exit("")
# The release script checks that the new release version is greater than the previous release version.
print_step( "Push Step 1: Checking release versions" ) # SEMIAUTO
dev_jsr308_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "jsr308" )
live_jsr308_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "jsr308" )
dev_afu_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "annotation-file-utilities" )
live_afu_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "annotation-file-utilities" )
dev_checker_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "checker-framework" )
live_checker_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "checker-framework" )
current_checker_version = current_distribution_by_website( live_checker_website )
new_checker_version = current_distribution( CHECKER_FRAMEWORK )
check_release_version( current_checker_version, new_checker_version )
#note, get_afu_version | check_all_links | identifier_name |
release_push.py | ,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-qual-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-qual-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, CHECKER_COMPAT_QUAL_RELEASE_POM,
CHECKER_COMPAT_QUAL,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-compat-qual-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-compat-qual-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JAVAC_BINARY_RELEASE_POM, JAVAC_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "compiler-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "compiler-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JDK7_BINARY_RELEASE_POM, JDK7_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk7-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk7-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JDK8_BINARY_RELEASE_POM, JDK8_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk8-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk8-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JAVACUTIL_BINARY_RELEASE_POM, JAVACUTIL_BINARY,
JAVACUTIL_SOURCE_JAR, JAVACUTIL_JAVADOC_JAR,
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, DATAFLOW_BINARY_RELEASE_POM, DATAFLOW_BINARY,
DATAFLOW_SOURCE_JAR, DATAFLOW_JAVADOC_JAR,
pgp_user, pgp_passphrase )
plugin_jar = find_mvn_plugin_jar( MAVEN_PLUGIN_DIR, new_checker_version )
plugin_source_jar = find_mvn_plugin_jar( MAVEN_PLUGIN_DIR, new_checker_version, "sources" )
plugin_javadoc_jar = os.path.join( MAVEN_RELEASE_DIR, mvn_dist, "checkerframework-maven-plugin-javadoc.jar" )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, MAVEN_PLUGIN_RELEASE_POM, plugin_jar,
plugin_source_jar, plugin_javadoc_jar, pgp_user, pgp_passphrase )
delete_path( mvn_dist )
def run_link_checker( site, output ):
check_links_script = os.path.join(CHECKER_FRAMEWORK_RELEASE, "checkLinks.sh")
cmd = ["sh", check_links_script, site]
out_file = open( output, 'w+' )
print("Executing: " + " ".join(cmd) )
process = subprocess.Popen(cmd, stdout=out_file, stderr=out_file)
process.communicate()
process.wait()
out_file.close()
if process.returncode != 0:
raise Exception('Non-zero return code( %s ) while executing %s' % (process.returncode, cmd))
return output
def check_all_links( jsr308_website, afu_website, checker_website, suffix ):
jsr308Check = run_link_checker( jsr308_website, "/tmp/jsr308." + suffix + ".check" )
afuCheck = run_link_checker( afu_website, "/tmp/afu." + suffix + ".check" )
checkerCheck = run_link_checker( checker_website, "/tmp/checker-framework." + suffix + ".check" )
print( "Link checker results can be found at:\n" +
"\t" + jsr308Check + "\n" +
"\t" + afuCheck + "\n" +
"\t" + checkerCheck + "\n" )
continue_script = prompt_w_suggestion("Delete " + suffix + " site link checker results?", "yes", "^(Yes|yes|No|no)$")
if is_yes(continue_script):
delete( jsr308Check )
delete( afuCheck )
delete( checkerCheck )
def push_interm_to_release_repos():
hg_push_or_fail( INTERM_JSR308_REPO )
hg_push_or_fail( INTERM_ANNO_REPO )
hg_push_or_fail( INTERM_CHECKER_REPO )
def continue_or_exit( msg ):
continue_script = prompt_w_suggestion(msg + " Continue?", "yes", "^(Yes|yes|No|no)$")
if continue_script == "no" or continue_script == "No":
raise Exception( "User elected NOT to continue at prompt: " + msg )
def read_args(argv):
test = True
if len( argv ) == 2:
if argv[1] == "release":
test = False
else:
print_usage()
else:
if len( argv ) > 2:
print_usage()
raise Exception( "Invalid arguments. " + ",".join(argv) )
return test
def print_usage():
print ( "Usage: python release_build.py [release]\n" +
"The only argument this script takes is \"release\". If this argument is " +
"NOT specified then the script will execute all steps that checking and prompting " +
"steps but will NOT actually perform a release. This is for testing the script." )
def main(argv):
# MANUAL Indicates a manual step
# SEMIAUTO Indicates a mostly automated step with possible prompts. Most of these steps become fully-automated when --auto is used.
# AUTO Indicates the step is fully-automated.
# Note that many prompts will cause scripts to exit if you say 'no'. This will require you to re-run
# the script from the beginning, which may take a long time. It is better to say 'yes' to the script
# prompts and follow the indicated steps even if they are redundant/you have done them already. Also,
# be sure to carefully read all instructions on the command-line before typing yes. This is because
# the scripts do not ask you to say 'yes' after each step, so you may miss a step if you only read
# the paragraph asking you to say 'yes'.
set_umask()
test_mode = read_args( argv )
msg = ( "You have chosen test_mode. \nThis means that this script will execute all build steps that " +
"do not have side-effects. That is, this is a test run of the script. All checks and user prompts " +
"will be shown but no steps will be executed that will cause the release to be deployed or partially " +
"deployed.\n" +
"If you meant to do an actual release, re-run this script with one argument, \"release\"." )
if not test_mode:
|
continue_or_exit( msg + "\n" )
if test_mode:
print("Continuing in test mode.")
else:
print("Continuing in release mode.")
check_hg_user()
print( "\nNOTE: Please read all the prompts printed by this script very carefully, as their" )
print( "contents may have changed since the last time you ran it." )
print_step( "Push Step 0: Verify Requirements\n" ) # MANUAL
print( " If this is your first time running the release_push script, please verify that you have met " +
"all the requirements specified in README-maintainers.html \"Pre-release Checklist\"\n" )
continue_or_exit("")
# The release script checks that the new release version is greater than the previous release version.
print_step( "Push Step 1: Checking release versions" ) # SEMIAUTO
dev_jsr308_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "jsr308" )
live_jsr308_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "jsr308" )
dev_afu_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "annotation-file-utilities" )
live_afu_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "annotation-file-utilities" )
dev_checker_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "checker-framework" )
live_checker_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "checker-framework" )
current_checker_version = current_distribution_by_website( live_checker_website )
new_checker_version = current_distribution( CHECKER_FRAMEWORK )
check_release_version( current_checker_version, new_checker_version )
#note, get_afu_version | msg = "You have chosen release_mode. Please follow the prompts to run a full Checker Framework release" | conditional_block |
release_push.py | \n\n")
print_step(" 3a: Run javac sanity test on development release." )
if prompt_yes_no( "Run javac sanity test on development release?", True ):
javac_sanity_check( dev_checker_website, new_checker_version )
print_step("3b: Run Maven sanity test on development release." )
if prompt_yes_no( "Run Maven sanity test on development repo?", True ):
maven_sanity_check( "maven-dev", MAVEN_DEV_REPO, new_checker_version )
print_step( "3c: Build the Eclipse plugin and test." )
print("Please download: http://types.cs.washington.edu/dev/checker-framework/current/checker-framework.zip")
print("Use the jars in the dist directory along with the instructions at " +
"checker-framework/eclipse/README-developers.html to build the Eclipse plugin." +
"Please install this version in the latest version of Eclipse and follow the tutorial at:\n" +
"http://types.cs.washington.edu/dev/checker-framework/tutorial/" )
continue_or_exit("If the tutorial doesn't work, please abort the release and contact the appropriate developer.")
# The Central repository is a repository of build artifacts for build programs like Maven and Ivy.
# This step stages (but doesn't release) the Checker Framework's Maven artifacts in the Sonatypes
# Central Repository.
# Once staging is complete, there are manual steps to log into Sonatypes Central and "close" the
# staging repository. Closing allows us to test the artifacts.
# This step deploys the artifacts to the Central repository and prompts the user to close the
# artifacts. Later, you will be prompted to release the staged artifacts after we commit the
# release to our Google Code repositories.
# For more information on deploying to the Central Repository see:
# https://docs.sonatype.org/display/Repository/Sonatype+OSS+Maven+Repository+Usage+Guide
print_step( "Push Step 4: Stage Maven artifacts in Central" ) # SEMIAUTO
print_step("4a: Stage the artifacts at Maven central." )
if prompt_yes_no( "Stage Maven artifacts in Maven Central?" ):
stage_maven_artifacts_in_maven_central( new_checker_version )
print_step("4b: Close staged artifacts at Maven central." )
print( "Maven artifacts have been staged! Please 'close' (but don't release) the artifacts. " +
"To close, log into https://oss.sonatype.org using your " +
"Sonatype credentials and follow the 'close' instructions at: " + SONATYPE_CLOSING_DIRECTIONS_URL )
print_step("4c: Run Maven sanity test on Maven central artifacts." )
if prompt_yes_no( "Run Maven sanity test on Maven central artifacts?", True ):
repo_url = raw_input( "Please enter the repo URL of the closed artifacts. To find this URL " +
"log into https://oss.sonatype.org. Go to the Staging Repositories. Find " +
"the repository you just closed and paste that URL here:\n" )
maven_sanity_check( "maven-staging", repo_url, new_checker_version )
# This step copies the development release directories to the live release directories.
# It then adds the appropriate permissions to the release. Symlinks need to be updated to point
# to the live website rather than the development website. A straight copy of the directory
# will NOT update the symlinks.
print_step("Push Step 5. Push dev website to live website" ) # SEMIAUTO
continue_or_exit("Copy release to the live website?")
if not test_mode:
print("Copying to live site")
copy_releases_to_live_site( new_checker_version, new_afu_version )
copy_htaccess()
ensure_group_access_to_releases()
update_release_symlinks( new_checker_version, new_afu_version )
else:
print( "Test mode: Skipping copy to live site!" )
# Runs the link the checker on all websites at:
# http://types.cs.washington.edu/
# The output of the link checker is written to files in the /tmp directory whose locations
# will be output at the command prompt. Review the link checker output.
# The set of broken links that is displayed by this check will differ from those in push
# step 2 because the Checker Framework manual and website uses a mix of absolute and
# relative links. Therefore, some links from the development site actually point to the
# live site (the previous release). After step 5, these links point to the current
# release and may be broken.
# NOTE: There will be many broken links within the jdk-api directory see Open JDK/JSR308 Javadoc
print( "Push Step 6. Check live site links" ) # SEMIAUTO
if prompt_yes_no( "Run link Checker on LIVE site?", True ):
check_all_links( live_jsr308_website, live_afu_website, live_checker_website, "live" )
# This step downloads the checker-framework.zip file of the newly live release and ensures we
# can run the Nullness Checker. If this step fails, you should backout the release.
print_step( "Push Step 7: Run javac sanity test on the live release." ) # SEMIAUTO
if prompt_yes_no( "Run javac sanity test on live release?", True ):
javac_sanity_check( live_checker_website, new_checker_version )
# You must manually deploy the Eclipse plugin. Follow the instructions at the prompt.
print_step("Push Step 8: Deploy the Eclipse Plugin to the live site." ) # MANUAL
continue_or_exit( "Follow the instruction under 'Releasing the Plugin' in checker-framework/eclipse/README-developers.html to " +
"deploy the Eclipse plugin to the live website. Please install the plugin from the new " +
"live repository and run it on a file in which you expect a type error. If you run into errors, " +
"back out the release!" )
# This step pushes the changes committed to the interm repositories to the Google Code
# repositories. This is the first irreversible change. After this point, you can no longer
# backout changes and should do another release in case of critical errors.
print_step( "Push Step 9. Commit changes to repositories" ) # SEMIAUTO
if prompt_yes_no( "Push the release to Google code repositories? This is irreversible." ):
if not test_mode:
push_interm_to_release_repos()
print( "Pushed to repos" )
else:
print( "Test mode: Skipping push to Google Code!" )
# This is a manual step that releases the staged Maven artifacts to the actual Central repository.
# This is also an irreversible step. Once you have released these artifacts they will be forever
# available to the Java community through the Central repository. Follow the prompts. The Maven
# artifacts (such as checker-qual.jar) are still needed, but the Maven plug-in is no longer maintained.
print_step( "Push Step 10. Release staged artifacts in Central repository." ) # MANUAL
if test_mode:
msg = ( "Test Mode: You are in test_mode. Please 'DROP' the artifacts. " +
"To drop, log into https://oss.sonatype.org using your " +
"Sonatype credentials and follow the 'DROP' instructions at: " + SONATYPE_DROPPING_DIRECTIONS_URL )
else:
msg = ( "Please 'release' the artifacts, but IMPORTANTLY first ensure that the Checker Framework maven plug-in directory" +
"(and only that directory) is removed from the artifacts. " +
"To release, log into https://oss.sonatype.org using your " +
"Sonatype credentials and follow the 'close' instructions at: " + SONATYPE_RELEASE_DIRECTIONS_URL )
# Later we will fix this so that the maven plug-in directory directory is not included in the first place.
print( msg )
prompt_until_yes()
# A prompt describes the email you should send to all relevant mailing lists.
# Please fill out the email and announce the release.
print_step( "Push Step 11. Announce the release." ) # MANUAL
continue_or_exit( "Please announce the release using the email structure below.\n" +
"Note that this text may have changed since the last time a release was performed.\n" +
get_announcement_email( new_checker_version ) )
print_step( "Push Step 12. Push Eclipse plugin files." ) # MANUAL
if test_mode:
msg = ( "Test Mode: You are in test_mode. If you built the Eclipse plugin on" +
"your local machine, you may want to revert any files that were modified." )
else:
msg = ( "If you built the Eclipse plugin on your local machine, there are a few " +
"changed files with version number changes that need to be pushed.\n" +
"Do not push the .classpath file. The following files should be pushed:\n" +
"checker-framework-eclipse-feature/feature.xml\n" +
"checker-framework-eclipse-plugin/META-INF/MANIFEST.MF\n" +
"checker-framework-eclipse-update-site/site.xml" )
print( msg )
prompt_until_yes()
if __name__ == "__main__": | random_line_split |
||
release_push.py | ,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-qual-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-qual-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, CHECKER_COMPAT_QUAL_RELEASE_POM,
CHECKER_COMPAT_QUAL,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-compat-qual-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-compat-qual-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JAVAC_BINARY_RELEASE_POM, JAVAC_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "compiler-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "compiler-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JDK7_BINARY_RELEASE_POM, JDK7_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk7-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk7-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JDK8_BINARY_RELEASE_POM, JDK8_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk8-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk8-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JAVACUTIL_BINARY_RELEASE_POM, JAVACUTIL_BINARY,
JAVACUTIL_SOURCE_JAR, JAVACUTIL_JAVADOC_JAR,
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, DATAFLOW_BINARY_RELEASE_POM, DATAFLOW_BINARY,
DATAFLOW_SOURCE_JAR, DATAFLOW_JAVADOC_JAR,
pgp_user, pgp_passphrase )
plugin_jar = find_mvn_plugin_jar( MAVEN_PLUGIN_DIR, new_checker_version )
plugin_source_jar = find_mvn_plugin_jar( MAVEN_PLUGIN_DIR, new_checker_version, "sources" )
plugin_javadoc_jar = os.path.join( MAVEN_RELEASE_DIR, mvn_dist, "checkerframework-maven-plugin-javadoc.jar" )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, MAVEN_PLUGIN_RELEASE_POM, plugin_jar,
plugin_source_jar, plugin_javadoc_jar, pgp_user, pgp_passphrase )
delete_path( mvn_dist )
def run_link_checker( site, output ):
check_links_script = os.path.join(CHECKER_FRAMEWORK_RELEASE, "checkLinks.sh")
cmd = ["sh", check_links_script, site]
out_file = open( output, 'w+' )
print("Executing: " + " ".join(cmd) )
process = subprocess.Popen(cmd, stdout=out_file, stderr=out_file)
process.communicate()
process.wait()
out_file.close()
if process.returncode != 0:
raise Exception('Non-zero return code( %s ) while executing %s' % (process.returncode, cmd))
return output
def check_all_links( jsr308_website, afu_website, checker_website, suffix ):
jsr308Check = run_link_checker( jsr308_website, "/tmp/jsr308." + suffix + ".check" )
afuCheck = run_link_checker( afu_website, "/tmp/afu." + suffix + ".check" )
checkerCheck = run_link_checker( checker_website, "/tmp/checker-framework." + suffix + ".check" )
print( "Link checker results can be found at:\n" +
"\t" + jsr308Check + "\n" +
"\t" + afuCheck + "\n" +
"\t" + checkerCheck + "\n" )
continue_script = prompt_w_suggestion("Delete " + suffix + " site link checker results?", "yes", "^(Yes|yes|No|no)$")
if is_yes(continue_script):
delete( jsr308Check )
delete( afuCheck )
delete( checkerCheck )
def push_interm_to_release_repos():
hg_push_or_fail( INTERM_JSR308_REPO )
hg_push_or_fail( INTERM_ANNO_REPO )
hg_push_or_fail( INTERM_CHECKER_REPO )
def continue_or_exit( msg ):
|
def read_args(argv):
test = True
if len( argv ) == 2:
if argv[1] == "release":
test = False
else:
print_usage()
else:
if len( argv ) > 2:
print_usage()
raise Exception( "Invalid arguments. " + ",".join(argv) )
return test
def print_usage():
print ( "Usage: python release_build.py [release]\n" +
"The only argument this script takes is \"release\". If this argument is " +
"NOT specified then the script will execute all steps that checking and prompting " +
"steps but will NOT actually perform a release. This is for testing the script." )
def main(argv):
# MANUAL Indicates a manual step
# SEMIAUTO Indicates a mostly automated step with possible prompts. Most of these steps become fully-automated when --auto is used.
# AUTO Indicates the step is fully-automated.
# Note that many prompts will cause scripts to exit if you say 'no'. This will require you to re-run
# the script from the beginning, which may take a long time. It is better to say 'yes' to the script
# prompts and follow the indicated steps even if they are redundant/you have done them already. Also,
# be sure to carefully read all instructions on the command-line before typing yes. This is because
# the scripts do not ask you to say 'yes' after each step, so you may miss a step if you only read
# the paragraph asking you to say 'yes'.
set_umask()
test_mode = read_args( argv )
msg = ( "You have chosen test_mode. \nThis means that this script will execute all build steps that " +
"do not have side-effects. That is, this is a test run of the script. All checks and user prompts " +
"will be shown but no steps will be executed that will cause the release to be deployed or partially " +
"deployed.\n" +
"If you meant to do an actual release, re-run this script with one argument, \"release\"." )
if not test_mode:
msg = "You have chosen release_mode. Please follow the prompts to run a full Checker Framework release"
continue_or_exit( msg + "\n" )
if test_mode:
print("Continuing in test mode.")
else:
print("Continuing in release mode.")
check_hg_user()
print( "\nNOTE: Please read all the prompts printed by this script very carefully, as their" )
print( "contents may have changed since the last time you ran it." )
print_step( "Push Step 0: Verify Requirements\n" ) # MANUAL
print( " If this is your first time running the release_push script, please verify that you have met " +
"all the requirements specified in README-maintainers.html \"Pre-release Checklist\"\n" )
continue_or_exit("")
# The release script checks that the new release version is greater than the previous release version.
print_step( "Push Step 1: Checking release versions" ) # SEMIAUTO
dev_jsr308_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "jsr308" )
live_jsr308_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "jsr308" )
dev_afu_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "annotation-file-utilities" )
live_afu_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "annotation-file-utilities" )
dev_checker_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "checker-framework" )
live_checker_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "checker-framework" )
current_checker_version = current_distribution_by_website( live_checker_website )
new_checker_version = current_distribution( CHECKER_FRAMEWORK )
check_release_version( current_checker_version, new_checker_version )
#note, get_afu_version | continue_script = prompt_w_suggestion(msg + " Continue?", "yes", "^(Yes|yes|No|no)$")
if continue_script == "no" or continue_script == "No":
raise Exception( "User elected NOT to continue at prompt: " + msg ) | identifier_body |
SessionEventMap.ts | /*
* (C) Copyright 2017-2022 OpenVidu (https://openvidu.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import { EventMap } from './EventMap';
import { ConnectionEvent } from '../ConnectionEvent';
import { ConnectionPropertyChangedEvent } from '../ConnectionPropertyChangedEvent';
import { ExceptionEvent } from '../ExceptionEvent';
import { NetworkQualityLevelChangedEvent } from '../NetworkQualityLevelChangedEvent';
import { PublisherSpeakingEvent } from '../PublisherSpeakingEvent';
import { RecordingEvent } from '../RecordingEvent';
import { SessionDisconnectedEvent } from '../SessionDisconnectedEvent';
import { SignalEvent } from '../SignalEvent';
import { StreamEvent } from '../StreamEvent';
import { StreamPropertyChangedEvent } from '../StreamPropertyChangedEvent';
/**
* Events dispatched by [[Session]] object. Manage event listeners with
* [[Session.on]], [[Session.once]] and [[Session.off]] methods.
*/
export interface SessionEventMap extends EventMap {
/**
* Event dispatched when a new user has connected to the session.
*
* It is fired for both the local user and remote users.
*/
connectionCreated: ConnectionEvent;
/**
* Event dispatched when a remote user has left the session.
*
* For the local user see [[sessionDisconnected]].
*/
connectionDestroyed: ConnectionEvent;
/**
* **This feature is part of OpenVidu Pro tier** <a href="https://docs.openvidu.io/en/stable/openvidu-pro/" style="display: inline-block; background-color: rgb(0, 136, 170); color: white; font-weight: bold; padding: 0px 5px; margin-right: 5px; border-radius: 3px; font-size: 13px; line-height:21px; font-family: Montserrat, sans-serif">PRO</a>
*
* Event dispatched when a property of the local [[Connection]] object changes.
*
* It is fired only for the local user.
*
* The properties that may change are [[Connection.role]] and [[Connection.record]].
* The only way the Connection properties may change is by updating them through:
*
* - [API REST](/en/stable/reference-docs/REST-API/#patch-connection)
* - [openvidu-java-client](/en/stable/reference-docs/openvidu-java-client/#update-a-connection)
* - [openvidu-node-client](/en/stable/reference-docs/openvidu-node-client/#update-a-connection)<br><br>
*/
connectionPropertyChanged: ConnectionPropertyChangedEvent;
/**
* Event dispatched when the local user has left the session.
*
* For remote users see [[connectionDestroyed]].
*/
sessionDisconnected: SessionDisconnectedEvent;
/**
* Event dispatched when a user has started publishing media to the session (see [[Session.publish]]).
*
* It is fired for both the local user and remote users.
*/
streamCreated: StreamEvent;
/**
* Event dispatched when a user stops publishing media to the session.
*
* It is fired for both the local user and remote users.
*/
streamDestroyed: StreamEvent;
/**
* Event dispatched when a Stream undergoes any change in any of its mutable properties
* (see [[StreamPropertyChangedEvent.changedProperty]]).
*
* It is fired for both remote streams (owned by a [[Subscriber]]) or local streams (owned by a [[Publisher]]).
*/
streamPropertyChanged: StreamPropertyChangedEvent;
/**
* Event dispatched when a user has started speaking.
*
* It is fired for both the local user and remote users.
*
* Extra information:
* - This event will only be triggered for **streams that have audio tracks** ([[Stream.hasAudio]] must be true).
* - Further configuration can be applied on how the event is dispatched by setting property `publisherSpeakingEventsOptions` in the call of [[OpenVidu.setAdvancedConfiguration]].
*/
publisherStartSpeaking: PublisherSpeakingEvent;
/**
* Event dispatched when a user has stopped speaking.
*
* It is fired for both the local user and remote users.
*
* Extra information:
* - This event will only be triggered for **streams that have audio tracks** ([[Stream.hasAudio]] must be true).
* - Further configuration can be applied on how the event is dispatched by setting property `publisherSpeakingEventsOptions` in the call of [[OpenVidu.setAdvancedConfiguration]].
*/
publisherStopSpeaking: PublisherSpeakingEvent;
/**
* @hidden
*/
[key: `signal:${string}`]: SignalEvent;
/**
* Event dispatched when a signal is received (see [Send text messages between users](/en/stable/cheatsheet/send-messages)).
*
* If the listener is added as **`signal:TYPE`**, only signals of type **`TYPE`** will trigger the event.
*/
signal: SignalEvent;
/**
* Event dispatched when the session has started being recorded.
*
* Property **`OPENVIDU_RECORDING_NOTIFICATION`** of [OpenVidu Server configuration](/en/stable/reference-docs/openvidu-config/)
* defines which users should receive this events (by default, only users with role `PUBLISHER` or `MODERATOR`)
*/
recordingStarted: RecordingEvent;
/**
* Event dispatched when the session has stopped being recorded.
*
* Property **`OPENVIDU_RECORDING_NOTIFICATION`** of [OpenVidu Server configuration](/en/stable/reference-docs/openvidu-config/)
* defines which users should receive this events (by default, only users with role `PUBLISHER` or `MODERATOR`)
*/
recordingStopped: RecordingEvent;
/**
* **This feature is part of OpenVidu Pro tier** <a href="https://docs.openvidu.io/en/stable/openvidu-pro/" style="display: inline-block; background-color: rgb(0, 136, 170); color: white; font-weight: bold; padding: 0px 5px; margin-right: 5px; border-radius: 3px; font-size: 13px; line-height:21px; font-family: Montserrat, sans-serif">PRO</a>
*
* Event dispatched when the network quality level of a [[Connection]] changes. See [network quality](/en/stable/advanced-features/network-quality/).
*/
networkQualityLevelChanged: NetworkQualityLevelChangedEvent;
/**
* Event dispatched when the local user has lost its connection to the session, and starts the automatic reconnection process.
*
* See [Reconnection events](/en/stable/advanced-features/automatic-reconnection/#reconnection-events).
*/
reconnecting: never;
/**
* Event dispatched when the local user has successfully recovered its connection to the session after losing it.
*
* If the connection was recovered but OpenVidu Server already evicted the user due to timeout, then this event will
* not be dispatched. A [[sessionDisconnected]] event with reason `networkDisconnect` will be triggered instead.
*
* See [Reconnection events](/en/stable/advanced-features/automatic-reconnection/#reconnection-events). | * This event acts as a global handler for asynchronous errors that may be triggered for multiple reasons and from multiple origins.
* To see the different types of exceptions go to [[ExceptionEventName]].
*/
exception: ExceptionEvent;
} | */
reconnected: never;
/** | random_line_split |
websocket.py | # coding=utf-8
import codecs
import logging
import cherrypy
import tailer
from schema import And, Schema, SchemaError, Use
from ws4py.messaging import TextMessage
from ws4py.websocket import WebSocket
import autosubliminal
from autosubliminal import system
from autosubliminal.core.runner import Runner
from autosubliminal.util.encoding import b2u
from autosubliminal.util.json import from_json, to_json
log = logging.getLogger(__name__)
RUN_SCHEDULER = 'RUN_SCHEDULER'
RUN_SYSTEM_PROCESS = 'RUN_SYSTEM_PROCESS'
SUPPORTED_EVENT_TYPES = [RUN_SCHEDULER, RUN_SYSTEM_PROCESS]
MESSAGE_SCHEMA = Schema({
'type': 'EVENT',
'event': {
'type': And(Use(str), lambda t: t in SUPPORTED_EVENT_TYPES),
'data': And(Use(dict))
}
})
class WebSocketHandler(WebSocket):
"""
WebSocket handler class for receiving messages on the server through the websocket system.
For now we only support event messages that trigger something on the server.
"""
def received_message(self, message):
if isinstance(message, TextMessage):
# Data is always returned in bytes through the websocket, so convert it first to unicode
message_dict = from_json(b2u(message.data))
self.handle_message(message_dict)
else:
log.warning('Unsupported message received on websocket server: %r', message)
def | (self, message):
handled = False
# Check for a valid event message structure
if self.check_message_structure(message):
event = message['event']
# Handle a RUN_SCHEDULER event
if event['type'] == RUN_SCHEDULER:
name = event['data']['name']
if name in autosubliminal.SCHEDULERS:
autosubliminal.SCHEDULERS[name].run()
handled = True
# Handle a RUN_SYSTEM_PROCESS event
elif event['type'] == RUN_SYSTEM_PROCESS:
name = event['data']['name']
if name == 'restart':
system.restart()
handled = True
elif name == 'shutdown':
system.shutdown()
handled = True
elif name == 'update':
system.update()
handled = True
elif name == 'flushCache':
system.flush_cache()
handled = True
elif name == 'flushWantedItems':
system.flush_wanted_items()
handled = True
elif name == 'flushLastDownloads':
system.flush_last_downloads()
handled = True
elif name == 'flushLibrary':
system.flush_library()
handled = True
if not handled:
log.warning('Unsupported message received on websocket server: %r', message)
return handled
def check_message_structure(self, message):
try:
MESSAGE_SCHEMA.validate(message)
return True
except SchemaError:
return False
class WebSocketBroadCaster(Runner):
"""
WebSocket broadcaster class for broadcasting data from the server through the websocket system.
"""
def run(self):
# Check for messages on the websocket queue and pop it
if len(autosubliminal.WEBSOCKETMESSAGEQUEUE) > 0:
message = autosubliminal.WEBSOCKETMESSAGEQUEUE.pop(0)
log.debug('Broadcasting websocket message: %r', message)
# The message on the websocket queue is a dict, so convert it to a json string
cherrypy.engine.publish('websocket-broadcast', to_json(message))
class WebSocketLogHandler(WebSocket):
"""
Websocket handler for log file tailing.
"""
def opened(self):
cherrypy.log("WebSocketLogHandler opened, starting log file tailing...")
logfile = autosubliminal.LOGFILE
for line in tailer.follow(codecs.open(logfile, 'r', 'utf-8')):
self.send(TextMessage(line), False)
| handle_message | identifier_name |
websocket.py | # coding=utf-8
import codecs
import logging
import cherrypy
import tailer
from schema import And, Schema, SchemaError, Use
from ws4py.messaging import TextMessage
from ws4py.websocket import WebSocket
import autosubliminal
from autosubliminal import system
from autosubliminal.core.runner import Runner
from autosubliminal.util.encoding import b2u
from autosubliminal.util.json import from_json, to_json
log = logging.getLogger(__name__)
RUN_SCHEDULER = 'RUN_SCHEDULER'
RUN_SYSTEM_PROCESS = 'RUN_SYSTEM_PROCESS'
SUPPORTED_EVENT_TYPES = [RUN_SCHEDULER, RUN_SYSTEM_PROCESS]
MESSAGE_SCHEMA = Schema({
'type': 'EVENT',
'event': {
'type': And(Use(str), lambda t: t in SUPPORTED_EVENT_TYPES),
'data': And(Use(dict))
}
})
class WebSocketHandler(WebSocket):
"""
WebSocket handler class for receiving messages on the server through the websocket system.
For now we only support event messages that trigger something on the server.
"""
def received_message(self, message):
if isinstance(message, TextMessage):
# Data is always returned in bytes through the websocket, so convert it first to unicode
message_dict = from_json(b2u(message.data))
self.handle_message(message_dict)
else:
|
def handle_message(self, message):
handled = False
# Check for a valid event message structure
if self.check_message_structure(message):
event = message['event']
# Handle a RUN_SCHEDULER event
if event['type'] == RUN_SCHEDULER:
name = event['data']['name']
if name in autosubliminal.SCHEDULERS:
autosubliminal.SCHEDULERS[name].run()
handled = True
# Handle a RUN_SYSTEM_PROCESS event
elif event['type'] == RUN_SYSTEM_PROCESS:
name = event['data']['name']
if name == 'restart':
system.restart()
handled = True
elif name == 'shutdown':
system.shutdown()
handled = True
elif name == 'update':
system.update()
handled = True
elif name == 'flushCache':
system.flush_cache()
handled = True
elif name == 'flushWantedItems':
system.flush_wanted_items()
handled = True
elif name == 'flushLastDownloads':
system.flush_last_downloads()
handled = True
elif name == 'flushLibrary':
system.flush_library()
handled = True
if not handled:
log.warning('Unsupported message received on websocket server: %r', message)
return handled
def check_message_structure(self, message):
try:
MESSAGE_SCHEMA.validate(message)
return True
except SchemaError:
return False
class WebSocketBroadCaster(Runner):
"""
WebSocket broadcaster class for broadcasting data from the server through the websocket system.
"""
def run(self):
# Check for messages on the websocket queue and pop it
if len(autosubliminal.WEBSOCKETMESSAGEQUEUE) > 0:
message = autosubliminal.WEBSOCKETMESSAGEQUEUE.pop(0)
log.debug('Broadcasting websocket message: %r', message)
# The message on the websocket queue is a dict, so convert it to a json string
cherrypy.engine.publish('websocket-broadcast', to_json(message))
class WebSocketLogHandler(WebSocket):
"""
Websocket handler for log file tailing.
"""
def opened(self):
cherrypy.log("WebSocketLogHandler opened, starting log file tailing...")
logfile = autosubliminal.LOGFILE
for line in tailer.follow(codecs.open(logfile, 'r', 'utf-8')):
self.send(TextMessage(line), False)
| log.warning('Unsupported message received on websocket server: %r', message) | conditional_block |
websocket.py | # coding=utf-8
import codecs
import logging
import cherrypy
import tailer
from schema import And, Schema, SchemaError, Use
from ws4py.messaging import TextMessage
from ws4py.websocket import WebSocket
import autosubliminal
from autosubliminal import system
from autosubliminal.core.runner import Runner
from autosubliminal.util.encoding import b2u
from autosubliminal.util.json import from_json, to_json
log = logging.getLogger(__name__)
RUN_SCHEDULER = 'RUN_SCHEDULER'
RUN_SYSTEM_PROCESS = 'RUN_SYSTEM_PROCESS'
SUPPORTED_EVENT_TYPES = [RUN_SCHEDULER, RUN_SYSTEM_PROCESS]
MESSAGE_SCHEMA = Schema({
'type': 'EVENT',
'event': {
'type': And(Use(str), lambda t: t in SUPPORTED_EVENT_TYPES),
'data': And(Use(dict))
}
})
class WebSocketHandler(WebSocket):
"""
WebSocket handler class for receiving messages on the server through the websocket system.
For now we only support event messages that trigger something on the server.
"""
def received_message(self, message):
if isinstance(message, TextMessage):
# Data is always returned in bytes through the websocket, so convert it first to unicode
message_dict = from_json(b2u(message.data))
self.handle_message(message_dict)
else:
log.warning('Unsupported message received on websocket server: %r', message)
def handle_message(self, message):
handled = False
# Check for a valid event message structure
if self.check_message_structure(message):
event = message['event']
# Handle a RUN_SCHEDULER event
if event['type'] == RUN_SCHEDULER:
name = event['data']['name']
if name in autosubliminal.SCHEDULERS:
autosubliminal.SCHEDULERS[name].run()
handled = True
# Handle a RUN_SYSTEM_PROCESS event
elif event['type'] == RUN_SYSTEM_PROCESS:
name = event['data']['name']
if name == 'restart':
system.restart()
handled = True
elif name == 'shutdown':
system.shutdown()
handled = True
elif name == 'update':
system.update()
handled = True
elif name == 'flushCache':
system.flush_cache()
handled = True
elif name == 'flushWantedItems':
system.flush_wanted_items()
handled = True
elif name == 'flushLastDownloads':
system.flush_last_downloads()
handled = True
elif name == 'flushLibrary':
system.flush_library()
handled = True
| def check_message_structure(self, message):
try:
MESSAGE_SCHEMA.validate(message)
return True
except SchemaError:
return False
class WebSocketBroadCaster(Runner):
"""
WebSocket broadcaster class for broadcasting data from the server through the websocket system.
"""
def run(self):
# Check for messages on the websocket queue and pop it
if len(autosubliminal.WEBSOCKETMESSAGEQUEUE) > 0:
message = autosubliminal.WEBSOCKETMESSAGEQUEUE.pop(0)
log.debug('Broadcasting websocket message: %r', message)
# The message on the websocket queue is a dict, so convert it to a json string
cherrypy.engine.publish('websocket-broadcast', to_json(message))
class WebSocketLogHandler(WebSocket):
"""
Websocket handler for log file tailing.
"""
def opened(self):
cherrypy.log("WebSocketLogHandler opened, starting log file tailing...")
logfile = autosubliminal.LOGFILE
for line in tailer.follow(codecs.open(logfile, 'r', 'utf-8')):
self.send(TextMessage(line), False) | if not handled:
log.warning('Unsupported message received on websocket server: %r', message)
return handled
| random_line_split |
websocket.py | # coding=utf-8
import codecs
import logging
import cherrypy
import tailer
from schema import And, Schema, SchemaError, Use
from ws4py.messaging import TextMessage
from ws4py.websocket import WebSocket
import autosubliminal
from autosubliminal import system
from autosubliminal.core.runner import Runner
from autosubliminal.util.encoding import b2u
from autosubliminal.util.json import from_json, to_json
log = logging.getLogger(__name__)
RUN_SCHEDULER = 'RUN_SCHEDULER'
RUN_SYSTEM_PROCESS = 'RUN_SYSTEM_PROCESS'
SUPPORTED_EVENT_TYPES = [RUN_SCHEDULER, RUN_SYSTEM_PROCESS]
MESSAGE_SCHEMA = Schema({
'type': 'EVENT',
'event': {
'type': And(Use(str), lambda t: t in SUPPORTED_EVENT_TYPES),
'data': And(Use(dict))
}
})
class WebSocketHandler(WebSocket):
"""
WebSocket handler class for receiving messages on the server through the websocket system.
For now we only support event messages that trigger something on the server.
"""
def received_message(self, message):
if isinstance(message, TextMessage):
# Data is always returned in bytes through the websocket, so convert it first to unicode
message_dict = from_json(b2u(message.data))
self.handle_message(message_dict)
else:
log.warning('Unsupported message received on websocket server: %r', message)
def handle_message(self, message):
| handled = True
elif name == 'update':
system.update()
handled = True
elif name == 'flushCache':
system.flush_cache()
handled = True
elif name == 'flushWantedItems':
system.flush_wanted_items()
handled = True
elif name == 'flushLastDownloads':
system.flush_last_downloads()
handled = True
elif name == 'flushLibrary':
system.flush_library()
handled = True
if not handled:
log.warning('Unsupported message received on websocket server: %r', message)
return handled
def check_message_structure(self, message):
try:
MESSAGE_SCHEMA.validate(message)
return True
except SchemaError:
return False
class WebSocketBroadCaster(Runner):
"""
WebSocket broadcaster class for broadcasting data from the server through the websocket system.
"""
def run(self):
# Check for messages on the websocket queue and pop it
if len(autosubliminal.WEBSOCKETMESSAGEQUEUE) > 0:
message = autosubliminal.WEBSOCKETMESSAGEQUEUE.pop(0)
log.debug('Broadcasting websocket message: %r', message)
# The message on the websocket queue is a dict, so convert it to a json string
cherrypy.engine.publish('websocket-broadcast', to_json(message))
class WebSocketLogHandler(WebSocket):
"""
Websocket handler for log file tailing.
"""
def opened(self):
cherrypy.log("WebSocketLogHandler opened, starting log file tailing...")
logfile = autosubliminal.LOGFILE
for line in tailer.follow(codecs.open(logfile, 'r', 'utf-8')):
self.send(TextMessage(line), False)
| handled = False
# Check for a valid event message structure
if self.check_message_structure(message):
event = message['event']
# Handle a RUN_SCHEDULER event
if event['type'] == RUN_SCHEDULER:
name = event['data']['name']
if name in autosubliminal.SCHEDULERS:
autosubliminal.SCHEDULERS[name].run()
handled = True
# Handle a RUN_SYSTEM_PROCESS event
elif event['type'] == RUN_SYSTEM_PROCESS:
name = event['data']['name']
if name == 'restart':
system.restart()
handled = True
elif name == 'shutdown':
system.shutdown() | identifier_body |
subcriteria_test.tsx | jest.mock("../edit", () => ({
toggleAndEditEqCriteria: jest.fn(),
}));
import React from "react";
import { mount } from "enzyme";
import { toggleAndEditEqCriteria } from "..";
import { CheckboxListProps, SubCriteriaSectionProps } from "../interfaces";
import {
fakePointGroup,
} from "../../../__test_support__/fake_state/resources";
import { CheckboxList, SubCriteriaSection } from "../subcriteria";
describe("<SubCriteriaSection />", () => {
const fakeProps = (): SubCriteriaSectionProps => ({
dispatch: Function,
group: fakePointGroup(),
disabled: false,
pointerTypes: [],
slugs: [],
});
it("doesn't return criteria", () => {
const p = fakeProps();
p.pointerTypes = [];
const wrapper = mount(<SubCriteriaSection {...p} />);
expect(wrapper.text()).toEqual("");
});
it("doesn't return incompatible criteria", () => {
const p = fakeProps();
p.pointerTypes = ["Plant", "Weed"];
const wrapper = mount(<SubCriteriaSection {...p} />);
expect(wrapper.text()).toEqual("");
});
it("returns plant criteria", () => {
const p = fakeProps();
p.pointerTypes = ["Plant"];
p.slugs = ["strawberry-guava"];
const wrapper = mount(<SubCriteriaSection {...p} />);
expect(wrapper.text().toLowerCase()).toContain("stage");
expect(wrapper.text()).toContain("Strawberry guava");
});
it("returns point criteria", () => {
const p = fakeProps();
p.pointerTypes = ["GenericPointer"];
const wrapper = mount(<SubCriteriaSection {...p} />);
expect(wrapper.text().toLowerCase()).toContain("color");
});
it("returns weed criteria", () => {
const p = fakeProps();
p.pointerTypes = ["Weed"];
const wrapper = mount(<SubCriteriaSection {...p} />);
expect(wrapper.text().toLowerCase()).toContain("source");
});
it("returns tool slot criteria", () => {
const p = fakeProps();
p.pointerTypes = ["ToolSlot"];
const wrapper = mount(<SubCriteriaSection {...p} />);
expect(wrapper.text().toLowerCase()).toContain("direction");
});
}); | const fakeProps = (): CheckboxListProps<string> => ({
criteriaKey: "openfarm_slug",
list: [{ label: "label", value: "value" }],
dispatch: jest.fn(),
group: fakePointGroup(),
pointerType: "Plant",
disabled: false,
});
it("toggles criteria", () => {
const p = fakeProps();
const wrapper = mount(<CheckboxList {...p} />);
expect(wrapper.text()).toContain("label");
wrapper.find("input").first().simulate("change");
expect(toggleAndEditEqCriteria).toHaveBeenCalledWith(
p.group, "openfarm_slug", "value");
});
}); |
describe("<CheckboxList />", () => { | random_line_split |
specials.py | from djpcms import sites
from djpcms.http import get_http
from djpcms.template import RequestContext, loader
from djpcms.views.baseview import djpcmsview
class badview(djpcmsview):
def __init__(self, template, httphandler):
self.template = template
self.httphandler = httphandler
super(badview,self).__init__()
def response(self, request):
t = loader.get_template(self.template)
c = {'request_path': request.path,
'grid': self.grid960()}
return self.httphandler(t.render(RequestContext(request, c)))
def http404view(request, *args, **kwargs):
http = get_http(sites.settings.HTTP_LIBRARY)
return badview('404.html',
http.HttpResponseNotFound).response(request)
def | (request, *args, **kwargs):
http = get_http(sites.settings.HTTP_LIBRARY)
return badview('500.html',
http.HttpResponseServerError).response(request) | http500view | identifier_name |
specials.py | from djpcms import sites
from djpcms.http import get_http
from djpcms.template import RequestContext, loader
from djpcms.views.baseview import djpcmsview
class badview(djpcmsview):
def __init__(self, template, httphandler):
self.template = template
self.httphandler = httphandler
super(badview,self).__init__()
def response(self, request):
|
def http404view(request, *args, **kwargs):
http = get_http(sites.settings.HTTP_LIBRARY)
return badview('404.html',
http.HttpResponseNotFound).response(request)
def http500view(request, *args, **kwargs):
http = get_http(sites.settings.HTTP_LIBRARY)
return badview('500.html',
http.HttpResponseServerError).response(request) | t = loader.get_template(self.template)
c = {'request_path': request.path,
'grid': self.grid960()}
return self.httphandler(t.render(RequestContext(request, c))) | identifier_body |
specials.py | from djpcms import sites
from djpcms.http import get_http
from djpcms.template import RequestContext, loader
from djpcms.views.baseview import djpcmsview
class badview(djpcmsview):
def __init__(self, template, httphandler):
self.template = template
self.httphandler = httphandler
super(badview,self).__init__() | def response(self, request):
t = loader.get_template(self.template)
c = {'request_path': request.path,
'grid': self.grid960()}
return self.httphandler(t.render(RequestContext(request, c)))
def http404view(request, *args, **kwargs):
http = get_http(sites.settings.HTTP_LIBRARY)
return badview('404.html',
http.HttpResponseNotFound).response(request)
def http500view(request, *args, **kwargs):
http = get_http(sites.settings.HTTP_LIBRARY)
return badview('500.html',
http.HttpResponseServerError).response(request) | random_line_split |
|
ng-wig.js | /**
* version: 1.1.10
*/
angular.module('ngWig', ['ngwig-app-templates']);
angular.module('ngWig')
.directive('ngWig', function () {
return {
scope: {
content: '=ngWig'
},
restrict: 'A',
replace: true, | scope.autoexpand = !('autoexpand' in attrs) || attrs['autoexpand'] !== 'off';
scope.toggleEditMode = function () {
scope.editMode = !scope.editMode;
};
scope.execCommand = function (command, options) {
if (command === 'createlink') {
options = prompt('Please enter the URL', 'http://');
}
if (command === 'insertimage') {
options = prompt('Please enter an image URL to insert', 'http://');
}
if (options !== null) {
scope.$emit('execCommand', {command: command, options: options});
}
};
scope.styles = [
{name: 'Normal text', value: 'p'},
{name: 'Header 1', value: 'h1'},
{name: 'Header 2', value: 'h2'},
{name: 'Header 3', value: 'h3'}
];
scope.style = scope.styles[0];
scope.$on("colorpicker-selected", function ($event, color) {
scope.execCommand('foreColor', color.value);
});
}
}
});
angular.module('ngWig')
.directive('ngWigEditable', function () {
function init(scope, $element, attrs, ctrl) {
var document = $element[0].ownerDocument;
$element.attr('contenteditable', true);
//model --> view
ctrl.$render = function () {
$element.html(ctrl.$viewValue || '');
};
//view --> model
function viewToModel() {
ctrl.$setViewValue($element.html());
//to support Angular 1.2.x
if (angular.version.minor < 3) {
scope.$apply();
}
}
$element.bind('blur keyup change paste', viewToModel);
scope.$on('execCommand', function (event, params) {
$element[0].focus();
var ieStyleTextSelection = document.selection,
command = params.command,
options = params.options;
if (ieStyleTextSelection) {
var textRange = ieStyleTextSelection.createRange();
}
document.execCommand(command, false, options);
if (ieStyleTextSelection) {
textRange.collapse(false);
textRange.select();
}
viewToModel();
});
}
return {
restrict: 'A',
require: 'ngModel',
replace: true,
link: init
}
}
);
angular.module('ngwig-app-templates', ['ng-wig/views/ng-wig.html']);
angular.module("ng-wig/views/ng-wig.html", []).run(["$templateCache", function($templateCache) {
$templateCache.put("ng-wig/views/ng-wig.html",
"<div class=\"ng-wig\">\n" +
" <ul class=\"nw-toolbar\">\n" +
" <li class=\"nw-toolbar__item\">\n" +
" <select class=\"nw-select\" ng-model=\"style\" ng-change=\"execCommand('formatblock', style.value)\" ng-options=\"style.name for style in styles\">\n" +
" </select>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--unordered-list\" title=\"Unordered List\" ng-click=\"execCommand('insertunorderedlist')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--ordered-list\" title=\"Ordered List\" ng-click=\"execCommand('insertorderedlist')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--bold\" title=\"Bold\" ng-click=\"execCommand('bold')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--italic\" title=\"Italic\" ng-click=\"execCommand('italic')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button colorpicker ng-model=\"fontcolor\" class=\"nw-button nw-button--text-color\" title=\"Font Color\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--link\" title=\"link\" ng-click=\"execCommand('createlink')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--image\" title=\"image\" ng-click=\"execCommand('insertimage')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--source\" ng-class=\"{ 'nw-button--active': editMode }\" ng-click=\"toggleEditMode()\"></button>\n" +
" </li>\n" +
" </ul>\n" +
"\n" +
" <div class=\"nw-editor-container\">\n" +
" <div class=\"nw-editor\">\n" +
" <textarea class=\"nw-editor__src\" ng-show=\"editMode\" ng-model=\"content\"></textarea>\n" +
" <div ng-class=\"{'nw-invisible': editMode, 'nw-autoexpand': autoexpand}\" class=\"nw-editor__res\" ng-model=\"content\" ng-wig-editable></div>\n" +
" </div>\n" +
" </div>\n" +
"</div>\n" +
"");
}]); | templateUrl: 'ng-wig/views/ng-wig.html',
link: function (scope, element, attrs) {
scope.editMode = false; | random_line_split |
ng-wig.js | /**
* version: 1.1.10
*/
angular.module('ngWig', ['ngwig-app-templates']);
angular.module('ngWig')
.directive('ngWig', function () {
return {
scope: {
content: '=ngWig'
},
restrict: 'A',
replace: true,
templateUrl: 'ng-wig/views/ng-wig.html',
link: function (scope, element, attrs) {
scope.editMode = false;
scope.autoexpand = !('autoexpand' in attrs) || attrs['autoexpand'] !== 'off';
scope.toggleEditMode = function () {
scope.editMode = !scope.editMode;
};
scope.execCommand = function (command, options) {
if (command === 'createlink') {
options = prompt('Please enter the URL', 'http://');
}
if (command === 'insertimage') {
options = prompt('Please enter an image URL to insert', 'http://');
}
if (options !== null) {
scope.$emit('execCommand', {command: command, options: options});
}
};
scope.styles = [
{name: 'Normal text', value: 'p'},
{name: 'Header 1', value: 'h1'},
{name: 'Header 2', value: 'h2'},
{name: 'Header 3', value: 'h3'}
];
scope.style = scope.styles[0];
scope.$on("colorpicker-selected", function ($event, color) {
scope.execCommand('foreColor', color.value);
});
}
}
});
angular.module('ngWig')
.directive('ngWigEditable', function () {
function init(scope, $element, attrs, ctrl) {
var document = $element[0].ownerDocument;
$element.attr('contenteditable', true);
//model --> view
ctrl.$render = function () {
$element.html(ctrl.$viewValue || '');
};
//view --> model
function viewToModel() |
$element.bind('blur keyup change paste', viewToModel);
scope.$on('execCommand', function (event, params) {
$element[0].focus();
var ieStyleTextSelection = document.selection,
command = params.command,
options = params.options;
if (ieStyleTextSelection) {
var textRange = ieStyleTextSelection.createRange();
}
document.execCommand(command, false, options);
if (ieStyleTextSelection) {
textRange.collapse(false);
textRange.select();
}
viewToModel();
});
}
return {
restrict: 'A',
require: 'ngModel',
replace: true,
link: init
}
}
);
angular.module('ngwig-app-templates', ['ng-wig/views/ng-wig.html']);
angular.module("ng-wig/views/ng-wig.html", []).run(["$templateCache", function($templateCache) {
$templateCache.put("ng-wig/views/ng-wig.html",
"<div class=\"ng-wig\">\n" +
" <ul class=\"nw-toolbar\">\n" +
" <li class=\"nw-toolbar__item\">\n" +
" <select class=\"nw-select\" ng-model=\"style\" ng-change=\"execCommand('formatblock', style.value)\" ng-options=\"style.name for style in styles\">\n" +
" </select>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--unordered-list\" title=\"Unordered List\" ng-click=\"execCommand('insertunorderedlist')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--ordered-list\" title=\"Ordered List\" ng-click=\"execCommand('insertorderedlist')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--bold\" title=\"Bold\" ng-click=\"execCommand('bold')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--italic\" title=\"Italic\" ng-click=\"execCommand('italic')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button colorpicker ng-model=\"fontcolor\" class=\"nw-button nw-button--text-color\" title=\"Font Color\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--link\" title=\"link\" ng-click=\"execCommand('createlink')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--image\" title=\"image\" ng-click=\"execCommand('insertimage')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--source\" ng-class=\"{ 'nw-button--active': editMode }\" ng-click=\"toggleEditMode()\"></button>\n" +
" </li>\n" +
" </ul>\n" +
"\n" +
" <div class=\"nw-editor-container\">\n" +
" <div class=\"nw-editor\">\n" +
" <textarea class=\"nw-editor__src\" ng-show=\"editMode\" ng-model=\"content\"></textarea>\n" +
" <div ng-class=\"{'nw-invisible': editMode, 'nw-autoexpand': autoexpand}\" class=\"nw-editor__res\" ng-model=\"content\" ng-wig-editable></div>\n" +
" </div>\n" +
" </div>\n" +
"</div>\n" +
"");
}]);
| {
ctrl.$setViewValue($element.html());
//to support Angular 1.2.x
if (angular.version.minor < 3) {
scope.$apply();
}
} | identifier_body |
ng-wig.js | /**
* version: 1.1.10
*/
angular.module('ngWig', ['ngwig-app-templates']);
angular.module('ngWig')
.directive('ngWig', function () {
return {
scope: {
content: '=ngWig'
},
restrict: 'A',
replace: true,
templateUrl: 'ng-wig/views/ng-wig.html',
link: function (scope, element, attrs) {
scope.editMode = false;
scope.autoexpand = !('autoexpand' in attrs) || attrs['autoexpand'] !== 'off';
scope.toggleEditMode = function () {
scope.editMode = !scope.editMode;
};
scope.execCommand = function (command, options) {
if (command === 'createlink') {
options = prompt('Please enter the URL', 'http://');
}
if (command === 'insertimage') |
if (options !== null) {
scope.$emit('execCommand', {command: command, options: options});
}
};
scope.styles = [
{name: 'Normal text', value: 'p'},
{name: 'Header 1', value: 'h1'},
{name: 'Header 2', value: 'h2'},
{name: 'Header 3', value: 'h3'}
];
scope.style = scope.styles[0];
scope.$on("colorpicker-selected", function ($event, color) {
scope.execCommand('foreColor', color.value);
});
}
}
});
angular.module('ngWig')
.directive('ngWigEditable', function () {
function init(scope, $element, attrs, ctrl) {
var document = $element[0].ownerDocument;
$element.attr('contenteditable', true);
//model --> view
ctrl.$render = function () {
$element.html(ctrl.$viewValue || '');
};
//view --> model
function viewToModel() {
ctrl.$setViewValue($element.html());
//to support Angular 1.2.x
if (angular.version.minor < 3) {
scope.$apply();
}
}
$element.bind('blur keyup change paste', viewToModel);
scope.$on('execCommand', function (event, params) {
$element[0].focus();
var ieStyleTextSelection = document.selection,
command = params.command,
options = params.options;
if (ieStyleTextSelection) {
var textRange = ieStyleTextSelection.createRange();
}
document.execCommand(command, false, options);
if (ieStyleTextSelection) {
textRange.collapse(false);
textRange.select();
}
viewToModel();
});
}
return {
restrict: 'A',
require: 'ngModel',
replace: true,
link: init
}
}
);
angular.module('ngwig-app-templates', ['ng-wig/views/ng-wig.html']);
angular.module("ng-wig/views/ng-wig.html", []).run(["$templateCache", function($templateCache) {
$templateCache.put("ng-wig/views/ng-wig.html",
"<div class=\"ng-wig\">\n" +
" <ul class=\"nw-toolbar\">\n" +
" <li class=\"nw-toolbar__item\">\n" +
" <select class=\"nw-select\" ng-model=\"style\" ng-change=\"execCommand('formatblock', style.value)\" ng-options=\"style.name for style in styles\">\n" +
" </select>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--unordered-list\" title=\"Unordered List\" ng-click=\"execCommand('insertunorderedlist')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--ordered-list\" title=\"Ordered List\" ng-click=\"execCommand('insertorderedlist')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--bold\" title=\"Bold\" ng-click=\"execCommand('bold')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--italic\" title=\"Italic\" ng-click=\"execCommand('italic')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button colorpicker ng-model=\"fontcolor\" class=\"nw-button nw-button--text-color\" title=\"Font Color\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--link\" title=\"link\" ng-click=\"execCommand('createlink')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--image\" title=\"image\" ng-click=\"execCommand('insertimage')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--source\" ng-class=\"{ 'nw-button--active': editMode }\" ng-click=\"toggleEditMode()\"></button>\n" +
" </li>\n" +
" </ul>\n" +
"\n" +
" <div class=\"nw-editor-container\">\n" +
" <div class=\"nw-editor\">\n" +
" <textarea class=\"nw-editor__src\" ng-show=\"editMode\" ng-model=\"content\"></textarea>\n" +
" <div ng-class=\"{'nw-invisible': editMode, 'nw-autoexpand': autoexpand}\" class=\"nw-editor__res\" ng-model=\"content\" ng-wig-editable></div>\n" +
" </div>\n" +
" </div>\n" +
"</div>\n" +
"");
}]);
| {
options = prompt('Please enter an image URL to insert', 'http://');
} | conditional_block |
ng-wig.js | /**
* version: 1.1.10
*/
angular.module('ngWig', ['ngwig-app-templates']);
angular.module('ngWig')
.directive('ngWig', function () {
return {
scope: {
content: '=ngWig'
},
restrict: 'A',
replace: true,
templateUrl: 'ng-wig/views/ng-wig.html',
link: function (scope, element, attrs) {
scope.editMode = false;
scope.autoexpand = !('autoexpand' in attrs) || attrs['autoexpand'] !== 'off';
scope.toggleEditMode = function () {
scope.editMode = !scope.editMode;
};
scope.execCommand = function (command, options) {
if (command === 'createlink') {
options = prompt('Please enter the URL', 'http://');
}
if (command === 'insertimage') {
options = prompt('Please enter an image URL to insert', 'http://');
}
if (options !== null) {
scope.$emit('execCommand', {command: command, options: options});
}
};
scope.styles = [
{name: 'Normal text', value: 'p'},
{name: 'Header 1', value: 'h1'},
{name: 'Header 2', value: 'h2'},
{name: 'Header 3', value: 'h3'}
];
scope.style = scope.styles[0];
scope.$on("colorpicker-selected", function ($event, color) {
scope.execCommand('foreColor', color.value);
});
}
}
});
angular.module('ngWig')
.directive('ngWigEditable', function () {
function | (scope, $element, attrs, ctrl) {
var document = $element[0].ownerDocument;
$element.attr('contenteditable', true);
//model --> view
ctrl.$render = function () {
$element.html(ctrl.$viewValue || '');
};
//view --> model
function viewToModel() {
ctrl.$setViewValue($element.html());
//to support Angular 1.2.x
if (angular.version.minor < 3) {
scope.$apply();
}
}
$element.bind('blur keyup change paste', viewToModel);
scope.$on('execCommand', function (event, params) {
$element[0].focus();
var ieStyleTextSelection = document.selection,
command = params.command,
options = params.options;
if (ieStyleTextSelection) {
var textRange = ieStyleTextSelection.createRange();
}
document.execCommand(command, false, options);
if (ieStyleTextSelection) {
textRange.collapse(false);
textRange.select();
}
viewToModel();
});
}
return {
restrict: 'A',
require: 'ngModel',
replace: true,
link: init
}
}
);
angular.module('ngwig-app-templates', ['ng-wig/views/ng-wig.html']);
angular.module("ng-wig/views/ng-wig.html", []).run(["$templateCache", function($templateCache) {
$templateCache.put("ng-wig/views/ng-wig.html",
"<div class=\"ng-wig\">\n" +
" <ul class=\"nw-toolbar\">\n" +
" <li class=\"nw-toolbar__item\">\n" +
" <select class=\"nw-select\" ng-model=\"style\" ng-change=\"execCommand('formatblock', style.value)\" ng-options=\"style.name for style in styles\">\n" +
" </select>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--unordered-list\" title=\"Unordered List\" ng-click=\"execCommand('insertunorderedlist')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--ordered-list\" title=\"Ordered List\" ng-click=\"execCommand('insertorderedlist')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--bold\" title=\"Bold\" ng-click=\"execCommand('bold')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--italic\" title=\"Italic\" ng-click=\"execCommand('italic')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button colorpicker ng-model=\"fontcolor\" class=\"nw-button nw-button--text-color\" title=\"Font Color\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--link\" title=\"link\" ng-click=\"execCommand('createlink')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--image\" title=\"image\" ng-click=\"execCommand('insertimage')\"></button>\n" +
" </li><!--\n" +
" --><li class=\"nw-toolbar__item\">\n" +
" <button type=\"button\" class=\"nw-button nw-button--source\" ng-class=\"{ 'nw-button--active': editMode }\" ng-click=\"toggleEditMode()\"></button>\n" +
" </li>\n" +
" </ul>\n" +
"\n" +
" <div class=\"nw-editor-container\">\n" +
" <div class=\"nw-editor\">\n" +
" <textarea class=\"nw-editor__src\" ng-show=\"editMode\" ng-model=\"content\"></textarea>\n" +
" <div ng-class=\"{'nw-invisible': editMode, 'nw-autoexpand': autoexpand}\" class=\"nw-editor__res\" ng-model=\"content\" ng-wig-editable></div>\n" +
" </div>\n" +
" </div>\n" +
"</div>\n" +
"");
}]);
| init | identifier_name |
reducers.spec.js | import { expect } from 'chai'
import reducers, { initialState } from 'mobilizations/widgets/__plugins__/pressure/reducers'
import { createAction } from 'mobilizations/widgets/__plugins__/pressure/action-creators/create-action'
import * as t from 'mobilizations/widgets/__plugins__/pressure/action-types'
describe('client/mobilizations/widgets/__plugins__/pressure/reducers', () => {
it('should change saving state to true when requesting', () => {
const action = { type: t.WIDGET_PRESSURE_FILL_REQUEST }
const nextState = reducers(initialState, action)
expect(nextState).to.have.property('saving', true)
})
it(
'should change saving state to false and filledPressureWidgets state' +
' with widget id when succeeded',
() => {
// state while requesting
const currentInitialState = { ...initialState, saving: true, filledPressureWidgets: [] }
const action = createAction(t.WIDGET_PRESSURE_FILL_SUCCESS, 1)
const nextState = reducers(currentInitialState, action) | expect(nextState).to.have.property('saving', false)
expect(nextState)
.to.have.property('filledPressureWidgets')
.that.deep.equals([1])
}
)
it('should change saving state to false and error state with message when failed', () => {
// state while requesting
const currentInitialState = { ...initialState, saving: true, filled: false }
const failurePayload = { error: 'Pressure widget fill request error message!' }
const action = createAction(t.WIDGET_PRESSURE_FILL_FAILURE, failurePayload)
const nextState = reducers(currentInitialState, action)
expect(nextState).to.have.property('saving', false)
expect(nextState)
.to.have.property('error')
.that.is.an('object')
.that.deep.equals(failurePayload)
})
}) | random_line_split |
|
index.tsx | import * as React from 'react';
import PropTypes from 'prop-types';
import Animate from 'rc-animate';
import ScrollNumber from './ScrollNumber';
import classNames from 'classnames';
export { ScrollNumberProps } from './ScrollNumber';
export interface BadgeProps {
/** Number to show in badge */
count?: number | string;
showZero?: boolean;
/** Max count to show */
overflowCount?: number;
/** whether to show red dot without number */
dot?: boolean;
style?: React.CSSProperties;
prefixCls?: string;
scrollNumberPrefixCls?: string;
className?: string;
status?: 'success' | 'processing' | 'default' | 'error' | 'warning';
text?: string;
offset?: [number | string, number | string];
}
export default class Badge extends React.Component<BadgeProps, any> {
static defaultProps = {
prefixCls: 'ant-badge',
scrollNumberPrefixCls: 'ant-scroll-number',
count: null,
showZero: false,
dot: false,
overflowCount: 99,
};
static propTypes = {
count: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number,
]),
showZero: PropTypes.bool,
dot: PropTypes.bool,
overflowCount: PropTypes.number,
};
| () {
const {
count,
showZero,
prefixCls,
scrollNumberPrefixCls,
overflowCount,
className,
style,
children,
dot,
status,
text,
offset,
...restProps,
} = this.props;
const isDot = dot || status;
let displayCount = (count as number) > (overflowCount as number) ? `${overflowCount}+` : count;
// dot mode don't need count
if (isDot) {
displayCount = '';
}
const isZero = displayCount === '0' || displayCount === 0;
const isEmpty = displayCount === null || displayCount === undefined || displayCount === '';
const hidden = (isEmpty || (isZero && !showZero)) && !isDot;
const statusCls = classNames({
[`${prefixCls}-status-dot`]: !!status,
[`${prefixCls}-status-${status}`]: !!status,
});
const scrollNumberCls = classNames({
[`${prefixCls}-dot`]: isDot,
[`${prefixCls}-count`]: !isDot,
[`${prefixCls}-multiple-words`]: count && count.toString && count.toString().length > 1,
[`${prefixCls}-status-${status}`]: !!status,
});
const badgeCls = classNames(className, prefixCls, {
[`${prefixCls}-status`]: !!status,
[`${prefixCls}-not-a-wrapper`]: !children,
});
const styleWithOffset = offset ? {
marginTop: offset[0],
marginLeft: offset[1],
...style,
} : style;
// <Badge status="success" />
if (!children && status) {
return (
<span className={badgeCls} style={styleWithOffset}>
<span className={statusCls} />
<span className={`${prefixCls}-status-text`}>{text}</span>
</span>
);
}
const scrollNumber = hidden ? null : (
<ScrollNumber
prefixCls={scrollNumberPrefixCls}
data-show={!hidden}
className={scrollNumberCls}
count={displayCount}
title={count}
style={styleWithOffset}
/>
);
const statusText = (hidden || !text) ? null : (
<span className={`${prefixCls}-status-text`}>{text}</span>
);
return (
<span {...restProps} className={badgeCls}>
{children}
<Animate
component=""
showProp="data-show"
transitionName={children ? `${prefixCls}-zoom` : ''}
transitionAppear
>
{scrollNumber}
</Animate>
{statusText}
</span>
);
}
}
| render | identifier_name |
index.tsx | import * as React from 'react';
import PropTypes from 'prop-types';
import Animate from 'rc-animate';
import ScrollNumber from './ScrollNumber';
import classNames from 'classnames';
export { ScrollNumberProps } from './ScrollNumber';
export interface BadgeProps {
/** Number to show in badge */
count?: number | string;
showZero?: boolean;
/** Max count to show */
overflowCount?: number;
/** whether to show red dot without number */
dot?: boolean;
style?: React.CSSProperties;
prefixCls?: string;
scrollNumberPrefixCls?: string;
className?: string;
status?: 'success' | 'processing' | 'default' | 'error' | 'warning';
text?: string;
offset?: [number | string, number | string];
}
export default class Badge extends React.Component<BadgeProps, any> {
static defaultProps = {
prefixCls: 'ant-badge',
scrollNumberPrefixCls: 'ant-scroll-number',
count: null,
showZero: false,
dot: false,
overflowCount: 99,
};
static propTypes = {
count: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number,
]),
showZero: PropTypes.bool,
dot: PropTypes.bool,
overflowCount: PropTypes.number,
};
render() | displayCount = '';
}
const isZero = displayCount === '0' || displayCount === 0;
const isEmpty = displayCount === null || displayCount === undefined || displayCount === '';
const hidden = (isEmpty || (isZero && !showZero)) && !isDot;
const statusCls = classNames({
[`${prefixCls}-status-dot`]: !!status,
[`${prefixCls}-status-${status}`]: !!status,
});
const scrollNumberCls = classNames({
[`${prefixCls}-dot`]: isDot,
[`${prefixCls}-count`]: !isDot,
[`${prefixCls}-multiple-words`]: count && count.toString && count.toString().length > 1,
[`${prefixCls}-status-${status}`]: !!status,
});
const badgeCls = classNames(className, prefixCls, {
[`${prefixCls}-status`]: !!status,
[`${prefixCls}-not-a-wrapper`]: !children,
});
const styleWithOffset = offset ? {
marginTop: offset[0],
marginLeft: offset[1],
...style,
} : style;
// <Badge status="success" />
if (!children && status) {
return (
<span className={badgeCls} style={styleWithOffset}>
<span className={statusCls} />
<span className={`${prefixCls}-status-text`}>{text}</span>
</span>
);
}
const scrollNumber = hidden ? null : (
<ScrollNumber
prefixCls={scrollNumberPrefixCls}
data-show={!hidden}
className={scrollNumberCls}
count={displayCount}
title={count}
style={styleWithOffset}
/>
);
const statusText = (hidden || !text) ? null : (
<span className={`${prefixCls}-status-text`}>{text}</span>
);
return (
<span {...restProps} className={badgeCls}>
{children}
<Animate
component=""
showProp="data-show"
transitionName={children ? `${prefixCls}-zoom` : ''}
transitionAppear
>
{scrollNumber}
</Animate>
{statusText}
</span>
);
}
}
| {
const {
count,
showZero,
prefixCls,
scrollNumberPrefixCls,
overflowCount,
className,
style,
children,
dot,
status,
text,
offset,
...restProps,
} = this.props;
const isDot = dot || status;
let displayCount = (count as number) > (overflowCount as number) ? `${overflowCount}+` : count;
// dot mode don't need count
if (isDot) { | identifier_body |
index.tsx | import * as React from 'react';
import PropTypes from 'prop-types';
import Animate from 'rc-animate';
import ScrollNumber from './ScrollNumber';
import classNames from 'classnames';
export { ScrollNumberProps } from './ScrollNumber';
export interface BadgeProps {
/** Number to show in badge */
count?: number | string;
showZero?: boolean;
/** Max count to show */
overflowCount?: number;
/** whether to show red dot without number */
dot?: boolean;
style?: React.CSSProperties;
prefixCls?: string;
scrollNumberPrefixCls?: string;
className?: string;
status?: 'success' | 'processing' | 'default' | 'error' | 'warning';
text?: string;
offset?: [number | string, number | string];
}
export default class Badge extends React.Component<BadgeProps, any> {
static defaultProps = {
prefixCls: 'ant-badge',
scrollNumberPrefixCls: 'ant-scroll-number',
count: null,
showZero: false,
dot: false,
overflowCount: 99,
};
static propTypes = {
count: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number,
]),
showZero: PropTypes.bool,
dot: PropTypes.bool,
overflowCount: PropTypes.number,
};
render() {
const {
count,
showZero,
prefixCls,
scrollNumberPrefixCls,
overflowCount,
className,
style,
children,
dot,
status,
text,
offset,
...restProps,
} = this.props;
const isDot = dot || status;
let displayCount = (count as number) > (overflowCount as number) ? `${overflowCount}+` : count;
// dot mode don't need count
if (isDot) {
displayCount = '';
}
const isZero = displayCount === '0' || displayCount === 0;
const isEmpty = displayCount === null || displayCount === undefined || displayCount === '';
const hidden = (isEmpty || (isZero && !showZero)) && !isDot;
const statusCls = classNames({
[`${prefixCls}-status-dot`]: !!status,
[`${prefixCls}-status-${status}`]: !!status,
});
const scrollNumberCls = classNames({
[`${prefixCls}-dot`]: isDot,
[`${prefixCls}-count`]: !isDot,
[`${prefixCls}-multiple-words`]: count && count.toString && count.toString().length > 1,
[`${prefixCls}-status-${status}`]: !!status,
});
const badgeCls = classNames(className, prefixCls, {
[`${prefixCls}-status`]: !!status,
[`${prefixCls}-not-a-wrapper`]: !children,
});
const styleWithOffset = offset ? {
marginTop: offset[0],
marginLeft: offset[1],
...style,
} : style;
// <Badge status="success" />
if (!children && status) {
return (
<span className={badgeCls} style={styleWithOffset}>
<span className={statusCls} />
<span className={`${prefixCls}-status-text`}>{text}</span>
</span>
);
}
const scrollNumber = hidden ? null : (
<ScrollNumber
prefixCls={scrollNumberPrefixCls}
data-show={!hidden}
className={scrollNumberCls}
count={displayCount}
title={count}
style={styleWithOffset}
/>
);
const statusText = (hidden || !text) ? null : (
<span className={`${prefixCls}-status-text`}>{text}</span>
);
return (
<span {...restProps} className={badgeCls}>
{children}
<Animate
component=""
showProp="data-show" | transitionName={children ? `${prefixCls}-zoom` : ''}
transitionAppear
>
{scrollNumber}
</Animate>
{statusText}
</span>
);
}
} | random_line_split |
|
OpenStackTenantForm.tsx | import { FunctionComponent } from 'react';
import { StringField, SecretField, FormContainer } from '@waldur/form';
export const OpenStackTenantForm: FunctionComponent<{
translate;
container;
}> = ({ translate, container }) => (
<FormContainer {...container}>
<StringField
name="backend_url"
label={translate('API URL')}
required={true}
description={translate(
'Keystone auth URL (e.g. http://keystone.example.com:5000/v3)',
)}
/>
<StringField
name="username"
label={translate('Username')}
required={true}
description={translate('Tenant user username')}
/>
<SecretField
name="password"
label={translate('Password')}
required={true}
description={translate('Tenant user password')}
/>
<StringField
name="tenant_id"
label={translate('Tenant ID')}
required={true}
/>
<StringField
name="external_network_id"
label={translate('External network ID')}
required={true}
description={translate(
'It is used to automatically assign floating IP to your virtual machine.',
)}
/>
<StringField
name="domain"
label={translate('Domain')}
description={translate(
'Domain name. If not defined default domain will be used.',
)}
/>
<StringField
name="availability_zone"
label={translate('Availability zone')}
description={translate(
'Default availability zone for provisioned instances.',
)}
/>
<StringField
name="flavor_exclude_regex"
label={translate('Flavor exclude regex')}
description={translate( | />
</FormContainer>
); | 'Flavors matching this regex expression will not be pulled from the backend.',
)} | random_line_split |
uva_10407.py | # /* UVa problem: 10407
# * Simple Division
# * Topic: Number Theory
# *
# * Level: challenging
# *
# * Brief problem description:
# * Given a list of numbers, a1, a2, a3.... an compute a number m such that
# * ai mod m = x for some arbitrary x for all ai.
# * In other words, find a congruence class modulo m to which each number belongs
# * Solution Summary:
# * Compute the differences of each of the numbers, then find the gcd
# * of all of the differences.
# * Used Resources:
# *
# * Textbook: Competitive Programming 3
# * Hints given on 'Spanish Problem Archive'
# *
# * I hereby certify that I have produced the following solution myself
# * using only the resources listed above in accordance with the CMPUT
# * 403 collaboration policy.
# *
# * Tristan Hunt
# */
import sys
def gcd(a, b):
|
def lcm(a, b):
return (a* (b/gcd(a, b)))
def load():
while(1):
line = next(sys.stdin).split()
line = [int(x) for x in line]
line.pop(-1)
if len(line) == 0:
break
yield(line)
for (sequence) in load():
n = len(sequence)
diff = list()
for i in range(0, n-1):
# Now find gcd of all the differences:
diff.append(abs(sequence[i+1] - sequence[i])) #compute the differences
if n == 2:
sys.stdout.write("{}\n".format(diff[0]))
else:
# Compute gcd of the differences
#print(diff)
#sys.stdout.write("gcd({}, {}) = {}\n".format(diff[0], diff[1], gcd(diff[0], diff[1])))
m = gcd(diff[0], diff[1])
for i in range(2, n-1):
#sys.stdout.write("gcd({}, {}) = {}\n".format(m, diff[i], gcd(m, diff[i])))
m = gcd(m, diff[i])
sys.stdout.write("{}\n".format(m))
| if b== 0:
return a
return gcd(b, a%b) | identifier_body |
uva_10407.py | # /* UVa problem: 10407
# * Simple Division
# * Topic: Number Theory
# *
# * Level: challenging
# *
# * Brief problem description:
# * Given a list of numbers, a1, a2, a3.... an compute a number m such that
# * ai mod m = x for some arbitrary x for all ai.
# * In other words, find a congruence class modulo m to which each number belongs
# * Solution Summary:
# * Compute the differences of each of the numbers, then find the gcd
# * of all of the differences.
# * Used Resources:
# *
# * Textbook: Competitive Programming 3
# * Hints given on 'Spanish Problem Archive'
# *
# * I hereby certify that I have produced the following solution myself
# * using only the resources listed above in accordance with the CMPUT
# * 403 collaboration policy.
# *
# * Tristan Hunt
# */
import sys
def gcd(a, b):
if b== 0:
return a
return gcd(b, a%b)
def lcm(a, b):
return (a* (b/gcd(a, b)))
def load():
while(1):
line = next(sys.stdin).split()
line = [int(x) for x in line]
line.pop(-1)
if len(line) == 0:
break
yield(line)
for (sequence) in load():
n = len(sequence)
diff = list() |
if n == 2:
sys.stdout.write("{}\n".format(diff[0]))
else:
# Compute gcd of the differences
#print(diff)
#sys.stdout.write("gcd({}, {}) = {}\n".format(diff[0], diff[1], gcd(diff[0], diff[1])))
m = gcd(diff[0], diff[1])
for i in range(2, n-1):
#sys.stdout.write("gcd({}, {}) = {}\n".format(m, diff[i], gcd(m, diff[i])))
m = gcd(m, diff[i])
sys.stdout.write("{}\n".format(m)) | for i in range(0, n-1):
# Now find gcd of all the differences:
diff.append(abs(sequence[i+1] - sequence[i])) #compute the differences | random_line_split |
uva_10407.py | # /* UVa problem: 10407
# * Simple Division
# * Topic: Number Theory
# *
# * Level: challenging
# *
# * Brief problem description:
# * Given a list of numbers, a1, a2, a3.... an compute a number m such that
# * ai mod m = x for some arbitrary x for all ai.
# * In other words, find a congruence class modulo m to which each number belongs
# * Solution Summary:
# * Compute the differences of each of the numbers, then find the gcd
# * of all of the differences.
# * Used Resources:
# *
# * Textbook: Competitive Programming 3
# * Hints given on 'Spanish Problem Archive'
# *
# * I hereby certify that I have produced the following solution myself
# * using only the resources listed above in accordance with the CMPUT
# * 403 collaboration policy.
# *
# * Tristan Hunt
# */
import sys
def gcd(a, b):
if b== 0:
|
return gcd(b, a%b)
def lcm(a, b):
return (a* (b/gcd(a, b)))
def load():
while(1):
line = next(sys.stdin).split()
line = [int(x) for x in line]
line.pop(-1)
if len(line) == 0:
break
yield(line)
for (sequence) in load():
n = len(sequence)
diff = list()
for i in range(0, n-1):
# Now find gcd of all the differences:
diff.append(abs(sequence[i+1] - sequence[i])) #compute the differences
if n == 2:
sys.stdout.write("{}\n".format(diff[0]))
else:
# Compute gcd of the differences
#print(diff)
#sys.stdout.write("gcd({}, {}) = {}\n".format(diff[0], diff[1], gcd(diff[0], diff[1])))
m = gcd(diff[0], diff[1])
for i in range(2, n-1):
#sys.stdout.write("gcd({}, {}) = {}\n".format(m, diff[i], gcd(m, diff[i])))
m = gcd(m, diff[i])
sys.stdout.write("{}\n".format(m))
| return a | conditional_block |
uva_10407.py | # /* UVa problem: 10407
# * Simple Division
# * Topic: Number Theory
# *
# * Level: challenging
# *
# * Brief problem description:
# * Given a list of numbers, a1, a2, a3.... an compute a number m such that
# * ai mod m = x for some arbitrary x for all ai.
# * In other words, find a congruence class modulo m to which each number belongs
# * Solution Summary:
# * Compute the differences of each of the numbers, then find the gcd
# * of all of the differences.
# * Used Resources:
# *
# * Textbook: Competitive Programming 3
# * Hints given on 'Spanish Problem Archive'
# *
# * I hereby certify that I have produced the following solution myself
# * using only the resources listed above in accordance with the CMPUT
# * 403 collaboration policy.
# *
# * Tristan Hunt
# */
import sys
def gcd(a, b):
if b== 0:
return a
return gcd(b, a%b)
def lcm(a, b):
return (a* (b/gcd(a, b)))
def | ():
while(1):
line = next(sys.stdin).split()
line = [int(x) for x in line]
line.pop(-1)
if len(line) == 0:
break
yield(line)
for (sequence) in load():
n = len(sequence)
diff = list()
for i in range(0, n-1):
# Now find gcd of all the differences:
diff.append(abs(sequence[i+1] - sequence[i])) #compute the differences
if n == 2:
sys.stdout.write("{}\n".format(diff[0]))
else:
# Compute gcd of the differences
#print(diff)
#sys.stdout.write("gcd({}, {}) = {}\n".format(diff[0], diff[1], gcd(diff[0], diff[1])))
m = gcd(diff[0], diff[1])
for i in range(2, n-1):
#sys.stdout.write("gcd({}, {}) = {}\n".format(m, diff[i], gcd(m, diff[i])))
m = gcd(m, diff[i])
sys.stdout.write("{}\n".format(m))
| load | identifier_name |
hg_upgrader.py | import os
from ..cache import set_cache, get_cache
from ..show_error import show_error
from .vcs_upgrader import VcsUpgrader
class HgUpgrader(VcsUpgrader):
"""
Allows upgrading a local mercurial-repository-based package
"""
cli_name = 'hg'
def retrieve_binary(self):
| The Settings \u2013 Default entry can be used for reference,
but changes to that will be overwritten upon next upgrade.
''',
name
)
return False
return binary
def run(self):
"""
Updates the repository with remote changes
:return: False or error, or True on success
"""
binary = self.retrieve_binary()
if not binary:
return False
args = [binary]
args.extend(self.update_command)
args.append('default')
self.execute(args, self.working_copy, meaningful_output=True)
return True
def incoming(self):
""":return: bool if remote revisions are available"""
cache_key = self.working_copy + '.incoming'
incoming = get_cache(cache_key)
if incoming is not None:
return incoming
binary = self.retrieve_binary()
if not binary:
return False
args = [binary, 'in', '-q', 'default']
output = self.execute(args, self.working_copy, meaningful_output=True)
if output is False:
return False
incoming = len(output) > 0
set_cache(cache_key, incoming, self.cache_length)
return incoming
def latest_commit(self):
"""
:return:
The latest commit hash
"""
binary = self.retrieve_binary()
if not binary:
return False
args = [binary, 'id', '-i']
output = self.execute(args, self.working_copy)
if output is False:
return False
return output.strip()
| """
Returns the path to the hg executable
:return: The string path to the executable or False on error
"""
name = 'hg'
if os.name == 'nt':
name += '.exe'
binary = self.find_binary(name)
if not binary:
show_error(
u'''
Unable to find %s.
Please set the "hg_binary" setting by accessing the
Preferences > Package Settings > Package Control > Settings
\u2013 User menu entry.
| identifier_body |
hg_upgrader.py | import os
from ..cache import set_cache, get_cache
from ..show_error import show_error
from .vcs_upgrader import VcsUpgrader
class | (VcsUpgrader):
"""
Allows upgrading a local mercurial-repository-based package
"""
cli_name = 'hg'
def retrieve_binary(self):
"""
Returns the path to the hg executable
:return: The string path to the executable or False on error
"""
name = 'hg'
if os.name == 'nt':
name += '.exe'
binary = self.find_binary(name)
if not binary:
show_error(
u'''
Unable to find %s.
Please set the "hg_binary" setting by accessing the
Preferences > Package Settings > Package Control > Settings
\u2013 User menu entry.
The Settings \u2013 Default entry can be used for reference,
but changes to that will be overwritten upon next upgrade.
''',
name
)
return False
return binary
def run(self):
"""
Updates the repository with remote changes
:return: False or error, or True on success
"""
binary = self.retrieve_binary()
if not binary:
return False
args = [binary]
args.extend(self.update_command)
args.append('default')
self.execute(args, self.working_copy, meaningful_output=True)
return True
def incoming(self):
""":return: bool if remote revisions are available"""
cache_key = self.working_copy + '.incoming'
incoming = get_cache(cache_key)
if incoming is not None:
return incoming
binary = self.retrieve_binary()
if not binary:
return False
args = [binary, 'in', '-q', 'default']
output = self.execute(args, self.working_copy, meaningful_output=True)
if output is False:
return False
incoming = len(output) > 0
set_cache(cache_key, incoming, self.cache_length)
return incoming
def latest_commit(self):
"""
:return:
The latest commit hash
"""
binary = self.retrieve_binary()
if not binary:
return False
args = [binary, 'id', '-i']
output = self.execute(args, self.working_copy)
if output is False:
return False
return output.strip()
| HgUpgrader | identifier_name |
hg_upgrader.py | import os
from ..cache import set_cache, get_cache
from ..show_error import show_error
from .vcs_upgrader import VcsUpgrader
class HgUpgrader(VcsUpgrader):
"""
Allows upgrading a local mercurial-repository-based package
"""
cli_name = 'hg'
def retrieve_binary(self):
"""
Returns the path to the hg executable
:return: The string path to the executable or False on error
"""
name = 'hg'
if os.name == 'nt':
name += '.exe'
binary = self.find_binary(name)
if not binary:
show_error(
u'''
Unable to find %s.
Please set the "hg_binary" setting by accessing the
Preferences > Package Settings > Package Control > Settings
\u2013 User menu entry.
The Settings \u2013 Default entry can be used for reference,
but changes to that will be overwritten upon next upgrade.
''',
name |
def run(self):
"""
Updates the repository with remote changes
:return: False or error, or True on success
"""
binary = self.retrieve_binary()
if not binary:
return False
args = [binary]
args.extend(self.update_command)
args.append('default')
self.execute(args, self.working_copy, meaningful_output=True)
return True
def incoming(self):
""":return: bool if remote revisions are available"""
cache_key = self.working_copy + '.incoming'
incoming = get_cache(cache_key)
if incoming is not None:
return incoming
binary = self.retrieve_binary()
if not binary:
return False
args = [binary, 'in', '-q', 'default']
output = self.execute(args, self.working_copy, meaningful_output=True)
if output is False:
return False
incoming = len(output) > 0
set_cache(cache_key, incoming, self.cache_length)
return incoming
def latest_commit(self):
"""
:return:
The latest commit hash
"""
binary = self.retrieve_binary()
if not binary:
return False
args = [binary, 'id', '-i']
output = self.execute(args, self.working_copy)
if output is False:
return False
return output.strip() | )
return False
return binary | random_line_split |
hg_upgrader.py | import os
from ..cache import set_cache, get_cache
from ..show_error import show_error
from .vcs_upgrader import VcsUpgrader
class HgUpgrader(VcsUpgrader):
"""
Allows upgrading a local mercurial-repository-based package
"""
cli_name = 'hg'
def retrieve_binary(self):
"""
Returns the path to the hg executable
:return: The string path to the executable or False on error
"""
name = 'hg'
if os.name == 'nt':
name += '.exe'
binary = self.find_binary(name)
if not binary:
show_error(
u'''
Unable to find %s.
Please set the "hg_binary" setting by accessing the
Preferences > Package Settings > Package Control > Settings
\u2013 User menu entry.
The Settings \u2013 Default entry can be used for reference,
but changes to that will be overwritten upon next upgrade.
''',
name
)
return False
return binary
def run(self):
"""
Updates the repository with remote changes
:return: False or error, or True on success
"""
binary = self.retrieve_binary()
if not binary:
return False
args = [binary]
args.extend(self.update_command)
args.append('default')
self.execute(args, self.working_copy, meaningful_output=True)
return True
def incoming(self):
""":return: bool if remote revisions are available"""
cache_key = self.working_copy + '.incoming'
incoming = get_cache(cache_key)
if incoming is not None:
return incoming
binary = self.retrieve_binary()
if not binary:
return False
args = [binary, 'in', '-q', 'default']
output = self.execute(args, self.working_copy, meaningful_output=True)
if output is False:
return False
incoming = len(output) > 0
set_cache(cache_key, incoming, self.cache_length)
return incoming
def latest_commit(self):
"""
:return:
The latest commit hash
"""
binary = self.retrieve_binary()
if not binary:
return False
args = [binary, 'id', '-i']
output = self.execute(args, self.working_copy)
if output is False:
|
return output.strip()
| return False | conditional_block |
object_test.js | /*
* Copyright 2015 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
goog.module('$jscomp_object_test');
goog.setTestOnly();
const jsunit = goog.require('goog.testing.jsunit');
const testSuite = goog.require('goog.testing.testSuite');
const testing = goog.require('testing');
const assertDeepEquals = testing.assertDeepEquals;
testSuite({
testAssertDeepEquals() {
// Quick sanity check, since we don't unit test assertDeepEquals
assertDeepEquals({a: 4}, {a: 4});
assertThrowsJsUnitException(() => assertDeepEquals({}, {a: 4}));
assertThrowsJsUnitException(() => assertDeepEquals({a: 4}, {}));
},
testAssign_simple() {
const obj = {a: 2, z: 3};
assertEquals(obj, Object.assign(obj, {a: 4, b: 5}, null, {c: 6, b: 7}));
assertDeepEquals({a: 4, b: 7, c: 6, z: 3}, obj);
},
testAssign_skipsPrototypeProperties() {
if (!Object.create) return;
const proto = {a: 4, b: 5};
const from = Object.create(proto);
from.a = 6;
from.c = 7;
assertDeepEquals({a: 6, c: 7}, Object.assign({}, from));
assertDeepEquals({a: 6, b: 1, c: 7}, Object.assign({b: 1}, from));
},
testAssign_skipsNonEnumerableProperties() {
const from = {'b': 23};
try {
Object.defineProperty(from, 'a', {enumerable: false, value: 42});
} catch (err) {
return; // Object.defineProperty in IE8 test harness exists, always fails
}
assertDeepEquals({'b': 23}, Object.assign({}, from));
assertDeepEquals({'a': 1, 'b': 23}, Object.assign({'a': 1}, from));
},
| () {
assertTrue(Object.is(4, 4));
assertTrue(Object.is(0, 0));
assertTrue(Object.is('4', '4'));
assertTrue(Object.is('', ''));
assertTrue(Object.is(true, true));
assertTrue(Object.is(false, false));
assertTrue(Object.is(null, null));
assertTrue(Object.is(undefined, undefined));
assertTrue(Object.is(NaN, NaN));
const obj = {};
assertTrue(Object.is(obj, obj));
assertFalse(Object.is(0, -0));
assertFalse(Object.is({}, {}));
assertFalse(Object.is(4, '4'));
assertFalse(Object.is(null, void 0));
assertFalse(Object.is(1, true));
assertFalse(Object.is(0, false));
assertFalse(Object.is('', false));
}
});
| testIs | identifier_name |
object_test.js | /*
* Copyright 2015 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
goog.module('$jscomp_object_test');
goog.setTestOnly();
const jsunit = goog.require('goog.testing.jsunit');
const testSuite = goog.require('goog.testing.testSuite');
const testing = goog.require('testing');
const assertDeepEquals = testing.assertDeepEquals;
testSuite({
testAssertDeepEquals() {
// Quick sanity check, since we don't unit test assertDeepEquals
assertDeepEquals({a: 4}, {a: 4});
assertThrowsJsUnitException(() => assertDeepEquals({}, {a: 4}));
assertThrowsJsUnitException(() => assertDeepEquals({a: 4}, {}));
},
testAssign_simple() {
const obj = {a: 2, z: 3};
assertEquals(obj, Object.assign(obj, {a: 4, b: 5}, null, {c: 6, b: 7}));
assertDeepEquals({a: 4, b: 7, c: 6, z: 3}, obj);
},
testAssign_skipsPrototypeProperties() | ,
testAssign_skipsNonEnumerableProperties() {
const from = {'b': 23};
try {
Object.defineProperty(from, 'a', {enumerable: false, value: 42});
} catch (err) {
return; // Object.defineProperty in IE8 test harness exists, always fails
}
assertDeepEquals({'b': 23}, Object.assign({}, from));
assertDeepEquals({'a': 1, 'b': 23}, Object.assign({'a': 1}, from));
},
testIs() {
assertTrue(Object.is(4, 4));
assertTrue(Object.is(0, 0));
assertTrue(Object.is('4', '4'));
assertTrue(Object.is('', ''));
assertTrue(Object.is(true, true));
assertTrue(Object.is(false, false));
assertTrue(Object.is(null, null));
assertTrue(Object.is(undefined, undefined));
assertTrue(Object.is(NaN, NaN));
const obj = {};
assertTrue(Object.is(obj, obj));
assertFalse(Object.is(0, -0));
assertFalse(Object.is({}, {}));
assertFalse(Object.is(4, '4'));
assertFalse(Object.is(null, void 0));
assertFalse(Object.is(1, true));
assertFalse(Object.is(0, false));
assertFalse(Object.is('', false));
}
});
| {
if (!Object.create) return;
const proto = {a: 4, b: 5};
const from = Object.create(proto);
from.a = 6;
from.c = 7;
assertDeepEquals({a: 6, c: 7}, Object.assign({}, from));
assertDeepEquals({a: 6, b: 1, c: 7}, Object.assign({b: 1}, from));
} | identifier_body |
object_test.js | /*
* Copyright 2015 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
goog.module('$jscomp_object_test');
goog.setTestOnly();
const jsunit = goog.require('goog.testing.jsunit');
const testSuite = goog.require('goog.testing.testSuite');
const testing = goog.require('testing');
const assertDeepEquals = testing.assertDeepEquals;
testSuite({
testAssertDeepEquals() {
// Quick sanity check, since we don't unit test assertDeepEquals
assertDeepEquals({a: 4}, {a: 4});
assertThrowsJsUnitException(() => assertDeepEquals({}, {a: 4}));
assertThrowsJsUnitException(() => assertDeepEquals({a: 4}, {}));
},
testAssign_simple() {
const obj = {a: 2, z: 3};
assertEquals(obj, Object.assign(obj, {a: 4, b: 5}, null, {c: 6, b: 7}));
assertDeepEquals({a: 4, b: 7, c: 6, z: 3}, obj);
},
testAssign_skipsPrototypeProperties() {
if (!Object.create) return;
const proto = {a: 4, b: 5};
const from = Object.create(proto);
from.a = 6;
from.c = 7;
assertDeepEquals({a: 6, c: 7}, Object.assign({}, from));
assertDeepEquals({a: 6, b: 1, c: 7}, Object.assign({b: 1}, from));
},
testAssign_skipsNonEnumerableProperties() {
const from = {'b': 23};
try {
Object.defineProperty(from, 'a', {enumerable: false, value: 42});
} catch (err) {
return; // Object.defineProperty in IE8 test harness exists, always fails
}
assertDeepEquals({'b': 23}, Object.assign({}, from));
assertDeepEquals({'a': 1, 'b': 23}, Object.assign({'a': 1}, from));
},
testIs() {
assertTrue(Object.is(4, 4));
assertTrue(Object.is(0, 0));
assertTrue(Object.is('4', '4'));
assertTrue(Object.is('', ''));
assertTrue(Object.is(true, true));
assertTrue(Object.is(false, false));
assertTrue(Object.is(null, null));
assertTrue(Object.is(undefined, undefined));
assertTrue(Object.is(NaN, NaN));
const obj = {};
assertTrue(Object.is(obj, obj));
assertFalse(Object.is(0, -0));
assertFalse(Object.is({}, {}));
assertFalse(Object.is(4, '4'));
assertFalse(Object.is(null, void 0));
assertFalse(Object.is(1, true));
assertFalse(Object.is(0, false)); | assertFalse(Object.is('', false));
}
}); | random_line_split |
|
twitter-bot-detail.component.spec.ts | /* tslint:disable max-line-length */
import { ComponentFixture, TestBed, async } from '@angular/core/testing';
import { DatePipe } from '@angular/common';
import { ActivatedRoute } from '@angular/router';
import { Observable } from 'rxjs/Rx';
import { JhiDateUtils, JhiDataUtils, JhiEventManager } from 'ng-jhipster'; | import { TwitterBot } from '../../../../../../main/webapp/app/entities/twitter-bot/twitter-bot.model';
describe('Component Tests', () => {
describe('TwitterBot Management Detail Component', () => {
let comp: TwitterBotDetailComponent;
let fixture: ComponentFixture<TwitterBotDetailComponent>;
let service: TwitterBotService;
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [GamecraftgatewayTestModule],
declarations: [TwitterBotDetailComponent],
providers: [
JhiDateUtils,
JhiDataUtils,
DatePipe,
{
provide: ActivatedRoute,
useValue: new MockActivatedRoute({id: 123})
},
TwitterBotService,
JhiEventManager
]
}).overrideTemplate(TwitterBotDetailComponent, '')
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(TwitterBotDetailComponent);
comp = fixture.componentInstance;
service = fixture.debugElement.injector.get(TwitterBotService);
});
describe('OnInit', () => {
it('Should call load all on init', () => {
// GIVEN
spyOn(service, 'find').and.returnValue(Observable.of(new TwitterBot(10)));
// WHEN
comp.ngOnInit();
// THEN
expect(service.find).toHaveBeenCalledWith(123);
expect(comp.twitterBot).toEqual(jasmine.objectContaining({id: 10}));
});
});
});
}); | import { GamecraftgatewayTestModule } from '../../../test.module';
import { MockActivatedRoute } from '../../../helpers/mock-route.service';
import { TwitterBotDetailComponent } from '../../../../../../main/webapp/app/entities/twitter-bot/twitter-bot-detail.component';
import { TwitterBotService } from '../../../../../../main/webapp/app/entities/twitter-bot/twitter-bot.service'; | random_line_split |
ar-IL.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
const u = undefined;
function plural(n: number): number |
export default [
'ar-IL',
[['ص', 'م'], u, u],
[['ص', 'م'], u, ['صباحًا', 'مساءً']],
[
['ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س'],
[
'الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس',
'الجمعة', 'السبت'
],
u, ['أحد', 'إثنين', 'ثلاثاء', 'أربعاء', 'خميس', 'جمعة', 'سبت']
],
u,
[
['ي', 'ف', 'م', 'أ', 'و', 'ن', 'ل', 'غ', 'س', 'ك', 'ب', 'د'],
[
'يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو',
'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر'
],
u
],
u,
[['ق.م', 'م'], u, ['قبل الميلاد', 'ميلادي']],
0,
[5, 6],
['d\u200f/M\u200f/y', 'dd\u200f/MM\u200f/y', 'd MMMM y', 'EEEE، d MMMM y'],
['H:mm', 'H:mm:ss', 'H:mm:ss z', 'H:mm:ss zzzz'],
['{1} {0}', u, u, u],
[
'.', ',', ';', '\u200e%\u200e', '\u200e+', '\u200e-', 'E', '×', '‰', '∞',
'ليس رقمًا', ':'
],
['#,##0.###', '#,##0%', '¤ #,##0.00', '#E0'],
'₪',
'شيكل إسرائيلي جديد',
{
'AED': ['د.إ.\u200f'],
'ARS': [u, 'AR$'],
'AUD': ['AU$'],
'BBD': [u, 'BB$'],
'BHD': ['د.ب.\u200f'],
'BMD': [u, 'BM$'],
'BND': [u, 'BN$'],
'BSD': [u, 'BS$'],
'BZD': [u, 'BZ$'],
'CAD': ['CA$'],
'CLP': [u, 'CL$'],
'CNY': ['CN¥'],
'COP': [u, 'CO$'],
'CUP': [u, 'CU$'],
'DOP': [u, 'DO$'],
'DZD': ['د.ج.\u200f'],
'EGP': ['ج.م.\u200f', 'E£'],
'FJD': [u, 'FJ$'],
'GBP': ['UK£'],
'GYD': [u, 'GY$'],
'HKD': ['HK$'],
'IQD': ['د.ع.\u200f'],
'IRR': ['ر.إ.'],
'JMD': [u, 'JM$'],
'JOD': ['د.أ.\u200f'],
'JPY': ['JP¥'],
'KWD': ['د.ك.\u200f'],
'KYD': [u, 'KY$'],
'LBP': ['ل.ل.\u200f', 'L£'],
'LRD': [u, '$LR'],
'LYD': ['د.ل.\u200f'],
'MAD': ['د.م.\u200f'],
'MRU': ['أ.م.'],
'MXN': ['MX$'],
'NZD': ['NZ$'],
'OMR': ['ر.ع.\u200f'],
'QAR': ['ر.ق.\u200f'],
'SAR': ['ر.س.\u200f'],
'SBD': [u, 'SB$'],
'SDD': ['د.س.\u200f'],
'SDG': ['ج.س.'],
'SRD': [u, 'SR$'],
'SYP': ['ل.س.\u200f', '£'],
'THB': ['฿'],
'TND': ['د.ت.\u200f'],
'TTD': [u, 'TT$'],
'TWD': ['NT$'],
'USD': ['US$'],
'UYU': [u, 'UY$'],
'XXX': ['***'],
'YER': ['ر.ي.\u200f']
},
'rtl',
plural
];
| {
if (n === 0) return 0;
if (n === 1) return 1;
if (n === 2) return 2;
if (n % 100 === Math.floor(n % 100) && n % 100 >= 3 && n % 100 <= 10) return 3;
if (n % 100 === Math.floor(n % 100) && n % 100 >= 11 && n % 100 <= 99) return 4;
return 5;
} | identifier_body |
ar-IL.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
const u = undefined;
function | (n: number): number {
if (n === 0) return 0;
if (n === 1) return 1;
if (n === 2) return 2;
if (n % 100 === Math.floor(n % 100) && n % 100 >= 3 && n % 100 <= 10) return 3;
if (n % 100 === Math.floor(n % 100) && n % 100 >= 11 && n % 100 <= 99) return 4;
return 5;
}
export default [
'ar-IL',
[['ص', 'م'], u, u],
[['ص', 'م'], u, ['صباحًا', 'مساءً']],
[
['ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س'],
[
'الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس',
'الجمعة', 'السبت'
],
u, ['أحد', 'إثنين', 'ثلاثاء', 'أربعاء', 'خميس', 'جمعة', 'سبت']
],
u,
[
['ي', 'ف', 'م', 'أ', 'و', 'ن', 'ل', 'غ', 'س', 'ك', 'ب', 'د'],
[
'يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو',
'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر'
],
u
],
u,
[['ق.م', 'م'], u, ['قبل الميلاد', 'ميلادي']],
0,
[5, 6],
['d\u200f/M\u200f/y', 'dd\u200f/MM\u200f/y', 'd MMMM y', 'EEEE، d MMMM y'],
['H:mm', 'H:mm:ss', 'H:mm:ss z', 'H:mm:ss zzzz'],
['{1} {0}', u, u, u],
[
'.', ',', ';', '\u200e%\u200e', '\u200e+', '\u200e-', 'E', '×', '‰', '∞',
'ليس رقمًا', ':'
],
['#,##0.###', '#,##0%', '¤ #,##0.00', '#E0'],
'₪',
'شيكل إسرائيلي جديد',
{
'AED': ['د.إ.\u200f'],
'ARS': [u, 'AR$'],
'AUD': ['AU$'],
'BBD': [u, 'BB$'],
'BHD': ['د.ب.\u200f'],
'BMD': [u, 'BM$'],
'BND': [u, 'BN$'],
'BSD': [u, 'BS$'],
'BZD': [u, 'BZ$'],
'CAD': ['CA$'],
'CLP': [u, 'CL$'],
'CNY': ['CN¥'],
'COP': [u, 'CO$'],
'CUP': [u, 'CU$'],
'DOP': [u, 'DO$'],
'DZD': ['د.ج.\u200f'],
'EGP': ['ج.م.\u200f', 'E£'],
'FJD': [u, 'FJ$'],
'GBP': ['UK£'],
'GYD': [u, 'GY$'],
'HKD': ['HK$'],
'IQD': ['د.ع.\u200f'],
'IRR': ['ر.إ.'],
'JMD': [u, 'JM$'],
'JOD': ['د.أ.\u200f'],
'JPY': ['JP¥'],
'KWD': ['د.ك.\u200f'],
'KYD': [u, 'KY$'],
'LBP': ['ل.ل.\u200f', 'L£'],
'LRD': [u, '$LR'],
'LYD': ['د.ل.\u200f'],
'MAD': ['د.م.\u200f'],
'MRU': ['أ.م.'],
'MXN': ['MX$'],
'NZD': ['NZ$'],
'OMR': ['ر.ع.\u200f'],
'QAR': ['ر.ق.\u200f'],
'SAR': ['ر.س.\u200f'],
'SBD': [u, 'SB$'],
'SDD': ['د.س.\u200f'],
'SDG': ['ج.س.'],
'SRD': [u, 'SR$'],
'SYP': ['ل.س.\u200f', '£'],
'THB': ['฿'],
'TND': ['د.ت.\u200f'],
'TTD': [u, 'TT$'],
'TWD': ['NT$'],
'USD': ['US$'],
'UYU': [u, 'UY$'],
'XXX': ['***'],
'YER': ['ر.ي.\u200f']
},
'rtl',
plural
];
| plural | identifier_name |
ar-IL.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
const u = undefined;
function plural(n: number): number {
if (n === 0) return 0;
if (n === 1) return 1;
if (n === 2) return 2;
if (n % 100 === Math.floor(n % 100) && n % 100 >= 3 && n % 100 <= 10) return 3;
if (n % 100 === Math.floor(n % 100) && n % 100 >= 11 && n % 100 <= 99) return 4;
return 5;
}
export default [
'ar-IL',
[['ص', 'م'], u, u],
[['ص', 'م'], u, ['صباحًا', 'مساءً']],
[
['ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س'],
[
'الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس',
'الجمعة', 'السبت'
], | ],
u,
[
['ي', 'ف', 'م', 'أ', 'و', 'ن', 'ل', 'غ', 'س', 'ك', 'ب', 'د'],
[
'يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو',
'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر'
],
u
],
u,
[['ق.م', 'م'], u, ['قبل الميلاد', 'ميلادي']],
0,
[5, 6],
['d\u200f/M\u200f/y', 'dd\u200f/MM\u200f/y', 'd MMMM y', 'EEEE، d MMMM y'],
['H:mm', 'H:mm:ss', 'H:mm:ss z', 'H:mm:ss zzzz'],
['{1} {0}', u, u, u],
[
'.', ',', ';', '\u200e%\u200e', '\u200e+', '\u200e-', 'E', '×', '‰', '∞',
'ليس رقمًا', ':'
],
['#,##0.###', '#,##0%', '¤ #,##0.00', '#E0'],
'₪',
'شيكل إسرائيلي جديد',
{
'AED': ['د.إ.\u200f'],
'ARS': [u, 'AR$'],
'AUD': ['AU$'],
'BBD': [u, 'BB$'],
'BHD': ['د.ب.\u200f'],
'BMD': [u, 'BM$'],
'BND': [u, 'BN$'],
'BSD': [u, 'BS$'],
'BZD': [u, 'BZ$'],
'CAD': ['CA$'],
'CLP': [u, 'CL$'],
'CNY': ['CN¥'],
'COP': [u, 'CO$'],
'CUP': [u, 'CU$'],
'DOP': [u, 'DO$'],
'DZD': ['د.ج.\u200f'],
'EGP': ['ج.م.\u200f', 'E£'],
'FJD': [u, 'FJ$'],
'GBP': ['UK£'],
'GYD': [u, 'GY$'],
'HKD': ['HK$'],
'IQD': ['د.ع.\u200f'],
'IRR': ['ر.إ.'],
'JMD': [u, 'JM$'],
'JOD': ['د.أ.\u200f'],
'JPY': ['JP¥'],
'KWD': ['د.ك.\u200f'],
'KYD': [u, 'KY$'],
'LBP': ['ل.ل.\u200f', 'L£'],
'LRD': [u, '$LR'],
'LYD': ['د.ل.\u200f'],
'MAD': ['د.م.\u200f'],
'MRU': ['أ.م.'],
'MXN': ['MX$'],
'NZD': ['NZ$'],
'OMR': ['ر.ع.\u200f'],
'QAR': ['ر.ق.\u200f'],
'SAR': ['ر.س.\u200f'],
'SBD': [u, 'SB$'],
'SDD': ['د.س.\u200f'],
'SDG': ['ج.س.'],
'SRD': [u, 'SR$'],
'SYP': ['ل.س.\u200f', '£'],
'THB': ['฿'],
'TND': ['د.ت.\u200f'],
'TTD': [u, 'TT$'],
'TWD': ['NT$'],
'USD': ['US$'],
'UYU': [u, 'UY$'],
'XXX': ['***'],
'YER': ['ر.ي.\u200f']
},
'rtl',
plural
]; | u, ['أحد', 'إثنين', 'ثلاثاء', 'أربعاء', 'خميس', 'جمعة', 'سبت'] | random_line_split |
configurable_validator_signer.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{Error, PersistentSafetyStorage};
use diem_crypto::{
ed25519::{Ed25519PrivateKey, Ed25519PublicKey, Ed25519Signature},
hash::CryptoHash,
};
use diem_global_constants::CONSENSUS_KEY;
use diem_types::{account_address::AccountAddress, validator_signer::ValidatorSigner};
use serde::Serialize;
/// A ConfigurableValidatorSigner is a ValidatorSigner wrapper that offers either
/// a ValidatorSigner instance or a ValidatorHandle instance, depending on the
/// configuration chosen. This abstracts away the complexities of handling either
/// instance, while offering the same API as a ValidatorSigner.
pub enum ConfigurableValidatorSigner {
Signer(ValidatorSigner),
Handle(ValidatorHandle),
}
impl ConfigurableValidatorSigner {
/// Returns a new ValidatorSigner instance
pub fn new_signer(author: AccountAddress, consensus_key: Ed25519PrivateKey) -> Self {
let signer = ValidatorSigner::new(author, consensus_key);
ConfigurableValidatorSigner::Signer(signer)
}
/// Returns a new ValidatorHandle instance
pub fn new_handle(author: AccountAddress, key_version: Ed25519PublicKey) -> Self {
let handle = ValidatorHandle::new(author, key_version);
ConfigurableValidatorSigner::Handle(handle)
}
/// Returns the author associated with the signer configuration.
pub fn author(&self) -> AccountAddress {
match self {
ConfigurableValidatorSigner::Signer(signer) => signer.author(),
ConfigurableValidatorSigner::Handle(handle) => handle.author(),
}
}
/// Returns the public key associated with the signer configuration.
pub fn public_key(&self) -> Ed25519PublicKey {
match self {
ConfigurableValidatorSigner::Signer(signer) => signer.public_key(),
ConfigurableValidatorSigner::Handle(handle) => handle.key_version(),
}
}
/// Signs a given message using the signer configuration.
pub fn | <T: Serialize + CryptoHash>(
&self,
message: &T,
storage: &PersistentSafetyStorage,
) -> Result<Ed25519Signature, Error> {
match self {
ConfigurableValidatorSigner::Signer(signer) => Ok(signer.sign(message)),
ConfigurableValidatorSigner::Handle(handle) => handle.sign(message, storage),
}
}
}
/// A ValidatorHandle associates a validator with a consensus key version held in storage.
/// In contrast to a ValidatorSigner, ValidatorHandle does not hold the private
/// key directly but rather holds a reference to that private key which should be
/// accessed using the handle and the secure storage backend.
pub struct ValidatorHandle {
author: AccountAddress,
key_version: Ed25519PublicKey,
}
impl ValidatorHandle {
pub fn new(author: AccountAddress, key_version: Ed25519PublicKey) -> Self {
ValidatorHandle {
author,
key_version,
}
}
/// Returns the author associated with this handle.
pub fn author(&self) -> AccountAddress {
self.author
}
/// Returns the public key version associated with this handle.
pub fn key_version(&self) -> Ed25519PublicKey {
self.key_version.clone()
}
/// Signs a given message using this handle and a given secure storage backend.
pub fn sign<T: Serialize + CryptoHash>(
&self,
message: &T,
storage: &PersistentSafetyStorage,
) -> Result<Ed25519Signature, Error> {
storage.sign(CONSENSUS_KEY.into(), self.key_version(), message)
}
}
| sign | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.