file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
listener.py | from typing import Callable, Any
from ..model import MetaEvent, Event
from ..exceptions import PropertyStatechartError
__all__ = ['InternalEventListener', 'PropertyStatechartListener'] | """
def __init__(self, callable: Callable[[Event], Any]) -> None:
self._callable = callable
def __call__(self, event: MetaEvent) -> None:
if event.name == 'event sent':
self._callable(Event(event.event.name, **event.event.data))
class PropertyStatechartListener:
"""
Listener that propagates meta-events to given property statechart, executes
the property statechart, and checks it.
"""
def __init__(self, interpreter) -> None:
self._interpreter = interpreter
def __call__(self, event: MetaEvent) -> None:
self._interpreter.queue(event)
self._interpreter.execute()
if self._interpreter.final:
raise PropertyStatechartError(self._interpreter) |
class InternalEventListener:
"""
Listener that filters and propagates internal events as external events. | random_line_split |
listener.py | from typing import Callable, Any
from ..model import MetaEvent, Event
from ..exceptions import PropertyStatechartError
__all__ = ['InternalEventListener', 'PropertyStatechartListener']
class InternalEventListener:
"""
Listener that filters and propagates internal events as external events.
"""
def __init__(self, callable: Callable[[Event], Any]) -> None:
self._callable = callable
def __call__(self, event: MetaEvent) -> None:
if event.name == 'event sent':
self._callable(Event(event.event.name, **event.event.data))
class PropertyStatechartListener:
"""
Listener that propagates meta-events to given property statechart, executes
the property statechart, and checks it.
"""
def __init__(self, interpreter) -> None:
self._interpreter = interpreter
def __call__(self, event: MetaEvent) -> None:
self._interpreter.queue(event)
self._interpreter.execute()
if self._interpreter.final:
| raise PropertyStatechartError(self._interpreter) | conditional_block |
|
listener.py | from typing import Callable, Any
from ..model import MetaEvent, Event
from ..exceptions import PropertyStatechartError
__all__ = ['InternalEventListener', 'PropertyStatechartListener']
class InternalEventListener:
"""
Listener that filters and propagates internal events as external events.
"""
def __init__(self, callable: Callable[[Event], Any]) -> None:
self._callable = callable
def __call__(self, event: MetaEvent) -> None:
if event.name == 'event sent':
self._callable(Event(event.event.name, **event.event.data))
class | :
"""
Listener that propagates meta-events to given property statechart, executes
the property statechart, and checks it.
"""
def __init__(self, interpreter) -> None:
self._interpreter = interpreter
def __call__(self, event: MetaEvent) -> None:
self._interpreter.queue(event)
self._interpreter.execute()
if self._interpreter.final:
raise PropertyStatechartError(self._interpreter)
| PropertyStatechartListener | identifier_name |
listener.py | from typing import Callable, Any
from ..model import MetaEvent, Event
from ..exceptions import PropertyStatechartError
__all__ = ['InternalEventListener', 'PropertyStatechartListener']
class InternalEventListener:
"""
Listener that filters and propagates internal events as external events.
"""
def __init__(self, callable: Callable[[Event], Any]) -> None:
|
def __call__(self, event: MetaEvent) -> None:
if event.name == 'event sent':
self._callable(Event(event.event.name, **event.event.data))
class PropertyStatechartListener:
"""
Listener that propagates meta-events to given property statechart, executes
the property statechart, and checks it.
"""
def __init__(self, interpreter) -> None:
self._interpreter = interpreter
def __call__(self, event: MetaEvent) -> None:
self._interpreter.queue(event)
self._interpreter.execute()
if self._interpreter.final:
raise PropertyStatechartError(self._interpreter)
| self._callable = callable | identifier_body |
complex.rs | (re: T, im: T) -> Complex<T> {
Complex { re: re, im: im }
}
/**
Returns the square of the norm (since `T` doesn't necessarily
have a sqrt function), i.e. `re^2 + im^2`.
*/
#[inline]
pub fn norm_sqr(&self) -> T {
self.re * self.re + self.im * self.im
}
/// Returns the complex conjugate. i.e. `re - i im`
#[inline]
pub fn conj(&self) -> Complex<T> {
Complex::new(self.re.clone(), -self.im)
}
/// Multiplies `self` by the scalar `t`.
#[inline]
pub fn scale(&self, t: T) -> Complex<T> {
Complex::new(self.re * t, self.im * t)
}
/// Divides `self` by the scalar `t`.
#[inline]
pub fn unscale(&self, t: T) -> Complex<T> {
Complex::new(self.re / t, self.im / t)
}
/// Returns `1/self`
#[inline]
pub fn inv(&self) -> Complex<T> {
let norm_sqr = self.norm_sqr();
Complex::new(self.re / norm_sqr,
-self.im / norm_sqr)
}
}
impl<T: Clone + FloatMath> Complex<T> {
/// Calculate |self|
#[inline]
pub fn norm(&self) -> T {
self.re.hypot(self.im)
}
}
impl<T: Clone + FloatMath> Complex<T> {
/// Calculate the principal Arg of self.
#[inline]
pub fn arg(&self) -> T {
self.im.atan2(self.re)
}
/// Convert to polar form (r, theta), such that `self = r * exp(i
/// * theta)`
#[inline]
pub fn to_polar(&self) -> (T, T) {
(self.norm(), self.arg())
}
/// Convert a polar representation into a complex number.
#[inline]
pub fn from_polar(r: &T, theta: &T) -> Complex<T> {
Complex::new(*r * theta.cos(), *r * theta.sin())
}
}
/* arithmetic */
// (a + i b) + (c + i d) == (a + c) + i (b + d)
impl<T: Clone + Num> Add<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn add(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re + other.re, self.im + other.im)
}
}
// (a + i b) - (c + i d) == (a - c) + i (b - d)
impl<T: Clone + Num> Sub<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn sub(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re - other.re, self.im - other.im)
}
}
// (a + i b) * (c + i d) == (a*c - b*d) + i (a*d + b*c)
impl<T: Clone + Num> Mul<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn mul(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re*other.re - self.im*other.im,
self.re*other.im + self.im*other.re)
}
}
// (a + i b) / (c + i d) == [(a + i b) * (c - i d)] / (c*c + d*d)
// == [(a*c + b*d) / (c*c + d*d)] + i [(b*c - a*d) / (c*c + d*d)]
impl<T: Clone + Num> Div<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn div(&self, other: &Complex<T>) -> Complex<T> {
let norm_sqr = other.norm_sqr();
Complex::new((self.re*other.re + self.im*other.im) / norm_sqr,
(self.im*other.re - self.re*other.im) / norm_sqr)
}
}
impl<T: Clone + Num> Neg<Complex<T>> for Complex<T> {
#[inline]
fn neg(&self) -> Complex<T> {
Complex::new(-self.re, -self.im)
}
}
/* constants */
impl<T: Clone + Num> Zero for Complex<T> {
#[inline]
fn zero() -> Complex<T> {
Complex::new(Zero::zero(), Zero::zero())
}
#[inline]
fn is_zero(&self) -> bool {
self.re.is_zero() && self.im.is_zero()
}
}
impl<T: Clone + Num> One for Complex<T> {
#[inline]
fn one() -> Complex<T> {
Complex::new(One::one(), Zero::zero())
}
}
/* string conversions */
impl<T: fmt::Show + Num + PartialOrd> fmt::Show for Complex<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.im < Zero::zero() {
write!(f, "{}-{}i", self.re, -self.im)
} else {
write!(f, "{}+{}i", self.re, self.im)
}
}
}
impl<T: ToStrRadix + Num + PartialOrd> ToStrRadix for Complex<T> {
fn to_str_radix(&self, radix: uint) -> String {
if self.im < Zero::zero() {
format!("{}-{}i",
self.re.to_str_radix(radix),
(-self.im).to_str_radix(radix))
} else {
format!("{}+{}i",
self.re.to_str_radix(radix),
self.im.to_str_radix(radix))
}
}
}
#[cfg(test)]
mod test {
#![allow(non_uppercase_statics)]
use super::{Complex64, Complex};
use std::num::{Zero,One,Float};
pub static _0_0i : Complex64 = Complex { re: 0.0, im: 0.0 };
pub static _1_0i : Complex64 = Complex { re: 1.0, im: 0.0 };
pub static _1_1i : Complex64 = Complex { re: 1.0, im: 1.0 };
pub static _0_1i : Complex64 = Complex { re: 0.0, im: 1.0 };
pub static _neg1_1i : Complex64 = Complex { re: -1.0, im: 1.0 };
pub static _05_05i : Complex64 = Complex { re: 0.5, im: 0.5 };
pub static all_consts : [Complex64, .. 5] = [_0_0i, _1_0i, _1_1i, _neg1_1i, _05_05i];
#[test]
fn test_consts() {
// check our constants are what Complex::new creates
fn test(c : Complex64, r : f64, i: f64) {
assert_eq!(c, Complex::new(r,i));
}
test(_0_0i, 0.0, 0.0);
test(_1_0i, 1.0, 0.0);
test(_1_1i, 1.0, 1.0);
test(_neg1_1i, -1.0, 1.0);
test(_05_05i, 0.5, 0.5);
assert_eq!(_0_0i, Zero::zero());
assert_eq!(_1_0i, One::one());
}
#[test]
#[ignore(cfg(target_arch = "x86"))]
// FIXME #7158: (maybe?) currently failing on x86.
fn test_norm() {
fn test(c: Complex64, ns: f64) {
assert_eq!(c.norm_sqr(), ns);
assert_eq!(c.norm(), ns.sqrt())
}
test(_0_0i, 0.0);
test(_1_0i, 1.0);
test(_1_1i, 2.0);
test(_neg1_1i, 2.0);
test(_05_05i, 0.5);
}
#[test]
fn test_scale_unscale() {
assert_eq!(_05_05i.scale(2.0), _1_1i);
assert_eq!(_1_1i.unscale(2.0), _05_05i);
for &c in all_consts.iter() {
assert_eq!(c.scale(2.0).unscale(2.0), c);
}
}
#[test]
fn test_conj() {
for &c in all_consts.iter() {
assert_eq!(c.conj(), Complex::new(c.re, -c.im));
| new | identifier_name |
|
complex.rs | d) == (a + c) + i (b + d)
impl<T: Clone + Num> Add<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn add(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re + other.re, self.im + other.im)
}
}
// (a + i b) - (c + i d) == (a - c) + i (b - d)
impl<T: Clone + Num> Sub<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn sub(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re - other.re, self.im - other.im)
}
}
// (a + i b) * (c + i d) == (a*c - b*d) + i (a*d + b*c)
impl<T: Clone + Num> Mul<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn mul(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re*other.re - self.im*other.im,
self.re*other.im + self.im*other.re)
}
}
// (a + i b) / (c + i d) == [(a + i b) * (c - i d)] / (c*c + d*d)
// == [(a*c + b*d) / (c*c + d*d)] + i [(b*c - a*d) / (c*c + d*d)]
impl<T: Clone + Num> Div<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn div(&self, other: &Complex<T>) -> Complex<T> {
let norm_sqr = other.norm_sqr();
Complex::new((self.re*other.re + self.im*other.im) / norm_sqr,
(self.im*other.re - self.re*other.im) / norm_sqr)
}
}
impl<T: Clone + Num> Neg<Complex<T>> for Complex<T> {
#[inline]
fn neg(&self) -> Complex<T> {
Complex::new(-self.re, -self.im)
}
}
/* constants */
impl<T: Clone + Num> Zero for Complex<T> {
#[inline]
fn zero() -> Complex<T> {
Complex::new(Zero::zero(), Zero::zero())
}
#[inline]
fn is_zero(&self) -> bool {
self.re.is_zero() && self.im.is_zero()
}
}
impl<T: Clone + Num> One for Complex<T> {
#[inline]
fn one() -> Complex<T> {
Complex::new(One::one(), Zero::zero())
}
}
/* string conversions */
impl<T: fmt::Show + Num + PartialOrd> fmt::Show for Complex<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.im < Zero::zero() {
write!(f, "{}-{}i", self.re, -self.im)
} else {
write!(f, "{}+{}i", self.re, self.im)
}
}
}
impl<T: ToStrRadix + Num + PartialOrd> ToStrRadix for Complex<T> {
fn to_str_radix(&self, radix: uint) -> String {
if self.im < Zero::zero() {
format!("{}-{}i",
self.re.to_str_radix(radix),
(-self.im).to_str_radix(radix))
} else {
format!("{}+{}i",
self.re.to_str_radix(radix),
self.im.to_str_radix(radix))
}
}
}
#[cfg(test)]
mod test {
#![allow(non_uppercase_statics)]
use super::{Complex64, Complex};
use std::num::{Zero,One,Float};
pub static _0_0i : Complex64 = Complex { re: 0.0, im: 0.0 };
pub static _1_0i : Complex64 = Complex { re: 1.0, im: 0.0 };
pub static _1_1i : Complex64 = Complex { re: 1.0, im: 1.0 };
pub static _0_1i : Complex64 = Complex { re: 0.0, im: 1.0 };
pub static _neg1_1i : Complex64 = Complex { re: -1.0, im: 1.0 };
pub static _05_05i : Complex64 = Complex { re: 0.5, im: 0.5 };
pub static all_consts : [Complex64, .. 5] = [_0_0i, _1_0i, _1_1i, _neg1_1i, _05_05i];
#[test]
fn test_consts() {
// check our constants are what Complex::new creates
fn test(c : Complex64, r : f64, i: f64) {
assert_eq!(c, Complex::new(r,i));
}
test(_0_0i, 0.0, 0.0);
test(_1_0i, 1.0, 0.0);
test(_1_1i, 1.0, 1.0);
test(_neg1_1i, -1.0, 1.0);
test(_05_05i, 0.5, 0.5);
assert_eq!(_0_0i, Zero::zero());
assert_eq!(_1_0i, One::one());
}
#[test]
#[ignore(cfg(target_arch = "x86"))]
// FIXME #7158: (maybe?) currently failing on x86.
fn test_norm() {
fn test(c: Complex64, ns: f64) {
assert_eq!(c.norm_sqr(), ns);
assert_eq!(c.norm(), ns.sqrt())
}
test(_0_0i, 0.0);
test(_1_0i, 1.0);
test(_1_1i, 2.0);
test(_neg1_1i, 2.0);
test(_05_05i, 0.5);
}
#[test]
fn test_scale_unscale() {
assert_eq!(_05_05i.scale(2.0), _1_1i);
assert_eq!(_1_1i.unscale(2.0), _05_05i);
for &c in all_consts.iter() {
assert_eq!(c.scale(2.0).unscale(2.0), c);
}
}
#[test]
fn test_conj() {
for &c in all_consts.iter() {
assert_eq!(c.conj(), Complex::new(c.re, -c.im));
assert_eq!(c.conj().conj(), c);
}
}
#[test]
fn test_inv() {
assert_eq!(_1_1i.inv(), _05_05i.conj());
assert_eq!(_1_0i.inv(), _1_0i.inv());
}
#[test]
#[should_fail]
fn test_divide_by_zero_natural() {
let n = Complex::new(2i, 3i);
let d = Complex::new(0, 0);
let _x = n / d;
}
#[test]
#[should_fail]
#[ignore]
fn test_inv_zero() {
// FIXME #5736: should this really fail, or just NaN?
_0_0i.inv();
}
#[test]
fn test_arg() {
fn test(c: Complex64, arg: f64) {
assert!((c.arg() - arg).abs() < 1.0e-6)
}
test(_1_0i, 0.0);
test(_1_1i, 0.25 * Float::pi());
test(_neg1_1i, 0.75 * Float::pi());
test(_05_05i, 0.25 * Float::pi());
}
#[test]
fn test_polar_conv() {
fn test(c: Complex64) {
let (r, theta) = c.to_polar();
assert!((c - Complex::from_polar(&r, &theta)).norm() < 1e-6);
}
for &c in all_consts.iter() { test(c); }
}
mod arith {
use super::{_0_0i, _1_0i, _1_1i, _0_1i, _neg1_1i, _05_05i, all_consts};
use std::num::Zero;
#[test]
fn test_add() {
assert_eq!(_05_05i + _05_05i, _1_1i); | assert_eq!(_0_1i + _1_0i, _1_1i);
assert_eq!(_1_0i + _neg1_1i, _0_1i);
| random_line_split |
|
complex.rs | , t: T) -> Complex<T> {
Complex::new(self.re * t, self.im * t)
}
/// Divides `self` by the scalar `t`.
#[inline]
pub fn unscale(&self, t: T) -> Complex<T> {
Complex::new(self.re / t, self.im / t)
}
/// Returns `1/self`
#[inline]
pub fn inv(&self) -> Complex<T> {
let norm_sqr = self.norm_sqr();
Complex::new(self.re / norm_sqr,
-self.im / norm_sqr)
}
}
impl<T: Clone + FloatMath> Complex<T> {
/// Calculate |self|
#[inline]
pub fn norm(&self) -> T {
self.re.hypot(self.im)
}
}
impl<T: Clone + FloatMath> Complex<T> {
/// Calculate the principal Arg of self.
#[inline]
pub fn arg(&self) -> T {
self.im.atan2(self.re)
}
/// Convert to polar form (r, theta), such that `self = r * exp(i
/// * theta)`
#[inline]
pub fn to_polar(&self) -> (T, T) {
(self.norm(), self.arg())
}
/// Convert a polar representation into a complex number.
#[inline]
pub fn from_polar(r: &T, theta: &T) -> Complex<T> {
Complex::new(*r * theta.cos(), *r * theta.sin())
}
}
/* arithmetic */
// (a + i b) + (c + i d) == (a + c) + i (b + d)
impl<T: Clone + Num> Add<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn add(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re + other.re, self.im + other.im)
}
}
// (a + i b) - (c + i d) == (a - c) + i (b - d)
impl<T: Clone + Num> Sub<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn sub(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re - other.re, self.im - other.im)
}
}
// (a + i b) * (c + i d) == (a*c - b*d) + i (a*d + b*c)
impl<T: Clone + Num> Mul<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn mul(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re*other.re - self.im*other.im,
self.re*other.im + self.im*other.re)
}
}
// (a + i b) / (c + i d) == [(a + i b) * (c - i d)] / (c*c + d*d)
// == [(a*c + b*d) / (c*c + d*d)] + i [(b*c - a*d) / (c*c + d*d)]
impl<T: Clone + Num> Div<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn div(&self, other: &Complex<T>) -> Complex<T> {
let norm_sqr = other.norm_sqr();
Complex::new((self.re*other.re + self.im*other.im) / norm_sqr,
(self.im*other.re - self.re*other.im) / norm_sqr)
}
}
impl<T: Clone + Num> Neg<Complex<T>> for Complex<T> {
#[inline]
fn neg(&self) -> Complex<T> {
Complex::new(-self.re, -self.im)
}
}
/* constants */
impl<T: Clone + Num> Zero for Complex<T> {
#[inline]
fn zero() -> Complex<T> {
Complex::new(Zero::zero(), Zero::zero())
}
#[inline]
fn is_zero(&self) -> bool {
self.re.is_zero() && self.im.is_zero()
}
}
impl<T: Clone + Num> One for Complex<T> {
#[inline]
fn one() -> Complex<T> {
Complex::new(One::one(), Zero::zero())
}
}
/* string conversions */
impl<T: fmt::Show + Num + PartialOrd> fmt::Show for Complex<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.im < Zero::zero() {
write!(f, "{}-{}i", self.re, -self.im)
} else {
write!(f, "{}+{}i", self.re, self.im)
}
}
}
impl<T: ToStrRadix + Num + PartialOrd> ToStrRadix for Complex<T> {
fn to_str_radix(&self, radix: uint) -> String {
if self.im < Zero::zero() {
format!("{}-{}i",
self.re.to_str_radix(radix),
(-self.im).to_str_radix(radix))
} else |
}
}
#[cfg(test)]
mod test {
#![allow(non_uppercase_statics)]
use super::{Complex64, Complex};
use std::num::{Zero,One,Float};
pub static _0_0i : Complex64 = Complex { re: 0.0, im: 0.0 };
pub static _1_0i : Complex64 = Complex { re: 1.0, im: 0.0 };
pub static _1_1i : Complex64 = Complex { re: 1.0, im: 1.0 };
pub static _0_1i : Complex64 = Complex { re: 0.0, im: 1.0 };
pub static _neg1_1i : Complex64 = Complex { re: -1.0, im: 1.0 };
pub static _05_05i : Complex64 = Complex { re: 0.5, im: 0.5 };
pub static all_consts : [Complex64, .. 5] = [_0_0i, _1_0i, _1_1i, _neg1_1i, _05_05i];
#[test]
fn test_consts() {
// check our constants are what Complex::new creates
fn test(c : Complex64, r : f64, i: f64) {
assert_eq!(c, Complex::new(r,i));
}
test(_0_0i, 0.0, 0.0);
test(_1_0i, 1.0, 0.0);
test(_1_1i, 1.0, 1.0);
test(_neg1_1i, -1.0, 1.0);
test(_05_05i, 0.5, 0.5);
assert_eq!(_0_0i, Zero::zero());
assert_eq!(_1_0i, One::one());
}
#[test]
#[ignore(cfg(target_arch = "x86"))]
// FIXME #7158: (maybe?) currently failing on x86.
fn test_norm() {
fn test(c: Complex64, ns: f64) {
assert_eq!(c.norm_sqr(), ns);
assert_eq!(c.norm(), ns.sqrt())
}
test(_0_0i, 0.0);
test(_1_0i, 1.0);
test(_1_1i, 2.0);
test(_neg1_1i, 2.0);
test(_05_05i, 0.5);
}
#[test]
fn test_scale_unscale() {
assert_eq!(_05_05i.scale(2.0), _1_1i);
assert_eq!(_1_1i.unscale(2.0), _05_05i);
for &c in all_consts.iter() {
assert_eq!(c.scale(2.0).unscale(2.0), c);
}
}
#[test]
fn test_conj() {
for &c in all_consts.iter() {
assert_eq!(c.conj(), Complex::new(c.re, -c.im));
assert_eq!(c.conj().conj(), c);
}
}
#[test]
fn test_inv() {
assert_eq!(_1_1i.inv(), _05_05i.conj());
assert_eq!(_1_0i.inv(), _1_0i.inv());
}
#[test]
#[should_fail]
fn test_divide_by_zero_natural() {
let n = Complex::new(2i, 3i);
let d = Complex::new(0, 0);
let _x = n / d;
}
#[test]
#[should_fail]
#[ignore]
fn test_inv_zero() {
// FIXME #5736: should this really fail, or just NaN?
| {
format!("{}+{}i",
self.re.to_str_radix(radix),
self.im.to_str_radix(radix))
} | conditional_block |
complex.rs | , t: T) -> Complex<T> {
Complex::new(self.re * t, self.im * t)
}
/// Divides `self` by the scalar `t`.
#[inline]
pub fn unscale(&self, t: T) -> Complex<T> {
Complex::new(self.re / t, self.im / t)
}
/// Returns `1/self`
#[inline]
pub fn inv(&self) -> Complex<T> {
let norm_sqr = self.norm_sqr();
Complex::new(self.re / norm_sqr,
-self.im / norm_sqr)
}
}
impl<T: Clone + FloatMath> Complex<T> {
/// Calculate |self|
#[inline]
pub fn norm(&self) -> T {
self.re.hypot(self.im)
}
}
impl<T: Clone + FloatMath> Complex<T> {
/// Calculate the principal Arg of self.
#[inline]
pub fn arg(&self) -> T {
self.im.atan2(self.re)
}
/// Convert to polar form (r, theta), such that `self = r * exp(i
/// * theta)`
#[inline]
pub fn to_polar(&self) -> (T, T) |
/// Convert a polar representation into a complex number.
#[inline]
pub fn from_polar(r: &T, theta: &T) -> Complex<T> {
Complex::new(*r * theta.cos(), *r * theta.sin())
}
}
/* arithmetic */
// (a + i b) + (c + i d) == (a + c) + i (b + d)
impl<T: Clone + Num> Add<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn add(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re + other.re, self.im + other.im)
}
}
// (a + i b) - (c + i d) == (a - c) + i (b - d)
impl<T: Clone + Num> Sub<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn sub(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re - other.re, self.im - other.im)
}
}
// (a + i b) * (c + i d) == (a*c - b*d) + i (a*d + b*c)
impl<T: Clone + Num> Mul<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn mul(&self, other: &Complex<T>) -> Complex<T> {
Complex::new(self.re*other.re - self.im*other.im,
self.re*other.im + self.im*other.re)
}
}
// (a + i b) / (c + i d) == [(a + i b) * (c - i d)] / (c*c + d*d)
// == [(a*c + b*d) / (c*c + d*d)] + i [(b*c - a*d) / (c*c + d*d)]
impl<T: Clone + Num> Div<Complex<T>, Complex<T>> for Complex<T> {
#[inline]
fn div(&self, other: &Complex<T>) -> Complex<T> {
let norm_sqr = other.norm_sqr();
Complex::new((self.re*other.re + self.im*other.im) / norm_sqr,
(self.im*other.re - self.re*other.im) / norm_sqr)
}
}
impl<T: Clone + Num> Neg<Complex<T>> for Complex<T> {
#[inline]
fn neg(&self) -> Complex<T> {
Complex::new(-self.re, -self.im)
}
}
/* constants */
impl<T: Clone + Num> Zero for Complex<T> {
#[inline]
fn zero() -> Complex<T> {
Complex::new(Zero::zero(), Zero::zero())
}
#[inline]
fn is_zero(&self) -> bool {
self.re.is_zero() && self.im.is_zero()
}
}
impl<T: Clone + Num> One for Complex<T> {
#[inline]
fn one() -> Complex<T> {
Complex::new(One::one(), Zero::zero())
}
}
/* string conversions */
impl<T: fmt::Show + Num + PartialOrd> fmt::Show for Complex<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.im < Zero::zero() {
write!(f, "{}-{}i", self.re, -self.im)
} else {
write!(f, "{}+{}i", self.re, self.im)
}
}
}
impl<T: ToStrRadix + Num + PartialOrd> ToStrRadix for Complex<T> {
fn to_str_radix(&self, radix: uint) -> String {
if self.im < Zero::zero() {
format!("{}-{}i",
self.re.to_str_radix(radix),
(-self.im).to_str_radix(radix))
} else {
format!("{}+{}i",
self.re.to_str_radix(radix),
self.im.to_str_radix(radix))
}
}
}
#[cfg(test)]
mod test {
#![allow(non_uppercase_statics)]
use super::{Complex64, Complex};
use std::num::{Zero,One,Float};
pub static _0_0i : Complex64 = Complex { re: 0.0, im: 0.0 };
pub static _1_0i : Complex64 = Complex { re: 1.0, im: 0.0 };
pub static _1_1i : Complex64 = Complex { re: 1.0, im: 1.0 };
pub static _0_1i : Complex64 = Complex { re: 0.0, im: 1.0 };
pub static _neg1_1i : Complex64 = Complex { re: -1.0, im: 1.0 };
pub static _05_05i : Complex64 = Complex { re: 0.5, im: 0.5 };
pub static all_consts : [Complex64, .. 5] = [_0_0i, _1_0i, _1_1i, _neg1_1i, _05_05i];
#[test]
fn test_consts() {
// check our constants are what Complex::new creates
fn test(c : Complex64, r : f64, i: f64) {
assert_eq!(c, Complex::new(r,i));
}
test(_0_0i, 0.0, 0.0);
test(_1_0i, 1.0, 0.0);
test(_1_1i, 1.0, 1.0);
test(_neg1_1i, -1.0, 1.0);
test(_05_05i, 0.5, 0.5);
assert_eq!(_0_0i, Zero::zero());
assert_eq!(_1_0i, One::one());
}
#[test]
#[ignore(cfg(target_arch = "x86"))]
// FIXME #7158: (maybe?) currently failing on x86.
fn test_norm() {
fn test(c: Complex64, ns: f64) {
assert_eq!(c.norm_sqr(), ns);
assert_eq!(c.norm(), ns.sqrt())
}
test(_0_0i, 0.0);
test(_1_0i, 1.0);
test(_1_1i, 2.0);
test(_neg1_1i, 2.0);
test(_05_05i, 0.5);
}
#[test]
fn test_scale_unscale() {
assert_eq!(_05_05i.scale(2.0), _1_1i);
assert_eq!(_1_1i.unscale(2.0), _05_05i);
for &c in all_consts.iter() {
assert_eq!(c.scale(2.0).unscale(2.0), c);
}
}
#[test]
fn test_conj() {
for &c in all_consts.iter() {
assert_eq!(c.conj(), Complex::new(c.re, -c.im));
assert_eq!(c.conj().conj(), c);
}
}
#[test]
fn test_inv() {
assert_eq!(_1_1i.inv(), _05_05i.conj());
assert_eq!(_1_0i.inv(), _1_0i.inv());
}
#[test]
#[should_fail]
fn test_divide_by_zero_natural() {
let n = Complex::new(2i, 3i);
let d = Complex::new(0, 0);
let _x = n / d;
}
#[test]
#[should_fail]
#[ignore]
fn test_inv_zero() {
// FIXME #5736: should this really fail, or just NaN?
| {
(self.norm(), self.arg())
} | identifier_body |
getAttribute.js | describe('getAttribute', function () {
var
chai = require('chai'),
assert = chai.assert;
var
DomParser = require('../index.js'),
parser = new DomParser();
it('attr value with "="', function(){
var html =
'<div id="outer" data-a ttt = "asd\'">\n' +
' <a id="inner" href="/search?field=123"></a>\n' +
'</div>';
var
dom = parser.parseFromString(html),
outer = dom.getElementById('outer'),
inner;
inner = dom.getElementById('inner');
| assert.strictEqual(outer.getAttribute('not-exists'), null);
assert.equal(inner.getAttribute('href'), '/search?field=123');
});
}); | assert.equal(outer.attributes.length, 3);
assert.equal(outer.getAttribute('id'), 'outer');
assert.equal(outer.getAttribute('data-a'), '');
assert.equal(outer.getAttribute('ttt'), 'asd\''); | random_line_split |
asteroids.js | //asteroid clone (core mechanics only)
//arrow keys to move + x to shoot
var bullets;
var asteroids;
var ship;
var shipImage, bulletImage, particleImage;
var MARGIN = 40;
function setup() {
createCanvas(800, 600);
bulletImage = loadImage('assets/asteroids_bullet.png');
shipImage = loadImage('assets/asteroids_ship0001.png');
particleImage = loadImage('assets/asteroids_particle.png');
ship = createSprite(width/2, height/2);
ship.maxSpeed = 6;
ship.friction = 0.98;
ship.setCollider('circle', 0, 0, 20);
ship.addImage('normal', shipImage);
ship.addAnimation('thrust', 'assets/asteroids_ship0002.png', 'assets/asteroids_ship0007.png');
asteroids = new Group();
bullets = new Group();
for(var i = 0; i<8; i++) {
var ang = random(360);
var px = width/2 + 1000 * cos(radians(ang));
var py = height/2+ 1000 * sin(radians(ang));
createAsteroid(3, px, py);
}
}
function draw() {
background(0);
fill(255);
textAlign(CENTER);
text('Controls: Arrow Keys + X', width/2, 20);
for(var i=0; i<allSprites.length; i++) {
var s = allSprites[i];
if(s.position.x<-MARGIN) s.position.x = width+MARGIN;
if(s.position.x>width+MARGIN) s.position.x = -MARGIN;
if(s.position.y<-MARGIN) s.position.y = height+MARGIN;
if(s.position.y>height+MARGIN) s.position.y = -MARGIN;
}
asteroids.overlap(bullets, asteroidHit);
ship.bounce(asteroids);
if(keyDown(LEFT_ARROW))
ship.rotation -= 4;
if(keyDown(RIGHT_ARROW))
ship.rotation += 4;
if(keyDown(UP_ARROW))
{
ship.addSpeed(0.2, ship.rotation);
ship.changeAnimation('thrust');
}
else
ship.changeAnimation('normal');
if(keyWentDown('x'))
{
var bullet = createSprite(ship.position.x, ship.position.y);
bullet.addImage(bulletImage);
bullet.setSpeed(10+ship.getSpeed(), ship.rotation);
bullet.life = 30;
bullets.add(bullet);
}
drawSprites();
}
function | (type, x, y) {
var a = createSprite(x, y);
var img = loadImage('assets/asteroid'+floor(random(0, 3))+'.png');
a.addImage(img);
a.setSpeed(2.5-(type/2), random(360));
a.rotationSpeed = 0.5;
//a.debug = true;
a.type = type;
if(type == 2)
a.scale = 0.6;
if(type == 1)
a.scale = 0.3;
a.mass = 2+a.scale;
a.setCollider('circle', 0, 0, 50);
asteroids.add(a);
return a;
}
function asteroidHit(asteroid, bullet) {
var newType = asteroid.type-1;
if(newType>0) {
createAsteroid(newType, asteroid.position.x, asteroid.position.y);
createAsteroid(newType, asteroid.position.x, asteroid.position.y);
}
for(var i=0; i<10; i++) {
var p = createSprite(bullet.position.x, bullet.position.y);
p.addImage(particleImage);
p.setSpeed(random(3, 5), random(360));
p.friction = 0.95;
p.life = 15;
}
bullet.remove();
asteroid.remove();
}
| createAsteroid | identifier_name |
asteroids.js | //asteroid clone (core mechanics only)
//arrow keys to move + x to shoot
var bullets;
var asteroids;
var ship;
var shipImage, bulletImage, particleImage;
var MARGIN = 40;
function setup() {
createCanvas(800, 600);
bulletImage = loadImage('assets/asteroids_bullet.png');
shipImage = loadImage('assets/asteroids_ship0001.png');
particleImage = loadImage('assets/asteroids_particle.png');
ship = createSprite(width/2, height/2);
ship.maxSpeed = 6;
ship.friction = 0.98;
ship.setCollider('circle', 0, 0, 20);
ship.addImage('normal', shipImage);
ship.addAnimation('thrust', 'assets/asteroids_ship0002.png', 'assets/asteroids_ship0007.png');
asteroids = new Group();
bullets = new Group();
| createAsteroid(3, px, py);
}
}
function draw() {
background(0);
fill(255);
textAlign(CENTER);
text('Controls: Arrow Keys + X', width/2, 20);
for(var i=0; i<allSprites.length; i++) {
var s = allSprites[i];
if(s.position.x<-MARGIN) s.position.x = width+MARGIN;
if(s.position.x>width+MARGIN) s.position.x = -MARGIN;
if(s.position.y<-MARGIN) s.position.y = height+MARGIN;
if(s.position.y>height+MARGIN) s.position.y = -MARGIN;
}
asteroids.overlap(bullets, asteroidHit);
ship.bounce(asteroids);
if(keyDown(LEFT_ARROW))
ship.rotation -= 4;
if(keyDown(RIGHT_ARROW))
ship.rotation += 4;
if(keyDown(UP_ARROW))
{
ship.addSpeed(0.2, ship.rotation);
ship.changeAnimation('thrust');
}
else
ship.changeAnimation('normal');
if(keyWentDown('x'))
{
var bullet = createSprite(ship.position.x, ship.position.y);
bullet.addImage(bulletImage);
bullet.setSpeed(10+ship.getSpeed(), ship.rotation);
bullet.life = 30;
bullets.add(bullet);
}
drawSprites();
}
function createAsteroid(type, x, y) {
var a = createSprite(x, y);
var img = loadImage('assets/asteroid'+floor(random(0, 3))+'.png');
a.addImage(img);
a.setSpeed(2.5-(type/2), random(360));
a.rotationSpeed = 0.5;
//a.debug = true;
a.type = type;
if(type == 2)
a.scale = 0.6;
if(type == 1)
a.scale = 0.3;
a.mass = 2+a.scale;
a.setCollider('circle', 0, 0, 50);
asteroids.add(a);
return a;
}
function asteroidHit(asteroid, bullet) {
var newType = asteroid.type-1;
if(newType>0) {
createAsteroid(newType, asteroid.position.x, asteroid.position.y);
createAsteroid(newType, asteroid.position.x, asteroid.position.y);
}
for(var i=0; i<10; i++) {
var p = createSprite(bullet.position.x, bullet.position.y);
p.addImage(particleImage);
p.setSpeed(random(3, 5), random(360));
p.friction = 0.95;
p.life = 15;
}
bullet.remove();
asteroid.remove();
} | for(var i = 0; i<8; i++) {
var ang = random(360);
var px = width/2 + 1000 * cos(radians(ang));
var py = height/2+ 1000 * sin(radians(ang)); | random_line_split |
asteroids.js | //asteroid clone (core mechanics only)
//arrow keys to move + x to shoot
var bullets;
var asteroids;
var ship;
var shipImage, bulletImage, particleImage;
var MARGIN = 40;
function setup() {
createCanvas(800, 600);
bulletImage = loadImage('assets/asteroids_bullet.png');
shipImage = loadImage('assets/asteroids_ship0001.png');
particleImage = loadImage('assets/asteroids_particle.png');
ship = createSprite(width/2, height/2);
ship.maxSpeed = 6;
ship.friction = 0.98;
ship.setCollider('circle', 0, 0, 20);
ship.addImage('normal', shipImage);
ship.addAnimation('thrust', 'assets/asteroids_ship0002.png', 'assets/asteroids_ship0007.png');
asteroids = new Group();
bullets = new Group();
for(var i = 0; i<8; i++) {
var ang = random(360);
var px = width/2 + 1000 * cos(radians(ang));
var py = height/2+ 1000 * sin(radians(ang));
createAsteroid(3, px, py);
}
}
function draw() | ship.rotation -= 4;
if(keyDown(RIGHT_ARROW))
ship.rotation += 4;
if(keyDown(UP_ARROW))
{
ship.addSpeed(0.2, ship.rotation);
ship.changeAnimation('thrust');
}
else
ship.changeAnimation('normal');
if(keyWentDown('x'))
{
var bullet = createSprite(ship.position.x, ship.position.y);
bullet.addImage(bulletImage);
bullet.setSpeed(10+ship.getSpeed(), ship.rotation);
bullet.life = 30;
bullets.add(bullet);
}
drawSprites();
}
function createAsteroid(type, x, y) {
var a = createSprite(x, y);
var img = loadImage('assets/asteroid'+floor(random(0, 3))+'.png');
a.addImage(img);
a.setSpeed(2.5-(type/2), random(360));
a.rotationSpeed = 0.5;
//a.debug = true;
a.type = type;
if(type == 2)
a.scale = 0.6;
if(type == 1)
a.scale = 0.3;
a.mass = 2+a.scale;
a.setCollider('circle', 0, 0, 50);
asteroids.add(a);
return a;
}
function asteroidHit(asteroid, bullet) {
var newType = asteroid.type-1;
if(newType>0) {
createAsteroid(newType, asteroid.position.x, asteroid.position.y);
createAsteroid(newType, asteroid.position.x, asteroid.position.y);
}
for(var i=0; i<10; i++) {
var p = createSprite(bullet.position.x, bullet.position.y);
p.addImage(particleImage);
p.setSpeed(random(3, 5), random(360));
p.friction = 0.95;
p.life = 15;
}
bullet.remove();
asteroid.remove();
}
| {
background(0);
fill(255);
textAlign(CENTER);
text('Controls: Arrow Keys + X', width/2, 20);
for(var i=0; i<allSprites.length; i++) {
var s = allSprites[i];
if(s.position.x<-MARGIN) s.position.x = width+MARGIN;
if(s.position.x>width+MARGIN) s.position.x = -MARGIN;
if(s.position.y<-MARGIN) s.position.y = height+MARGIN;
if(s.position.y>height+MARGIN) s.position.y = -MARGIN;
}
asteroids.overlap(bullets, asteroidHit);
ship.bounce(asteroids);
if(keyDown(LEFT_ARROW)) | identifier_body |
sap.ui.core.message.d.ts | declare namespace sap.ui.core.message {
class Message extends sap.ui.base.Object {
/**
*
* @param mParameters a map which contains the following parameter properties:
* {string} [mParameters.id] The message id: will be defaulted if no id is set
* {string} [mParameters.message] The message text
* {string} [mParameters.description] The message description
* {sap.ui.core.MessageType} [mParameters.type] The message type
* {string} [mParameters.code] The message code
* {sap.ui.core.message.Messageprocessor} [mParameters.processor]
* {string} [mParameters.target] The message target: The syntax MessageProcessor dependent. Read the documentation of the respective MessageProcessor.
* {boolean} [mParameters.persistent] Sets message persistent: If persistent is set true the message lifecycle controlled by Application
*/
constructor(mParameters?: {
id?: string;
message?: string;
description?: string;
type?: sap.ui.core.MessageType;
code?: string;
processor?: MessageProcessor;
target?: string;
persistent?: boolean;
});
}
class MessageManager extends sap.ui.base.EventProvider {
/**
* Add messages to MessageManager
*/
addMessages(vMessages: Message|Message[]);
/**
* Get the MessageModel
*/
getMessageModel();
/**
* Register MessageProcessor
*/
registerMessageProcessor(oProcessor: MessageProcessor);
/**
* Register ManagedObject: Validation and Parse errors are handled by the MessageManager for this object
*/
registerObject(oObject, bHandleValidation);
/**
* Remove all messages
*/
removeAllMessages();
/**
* Remove given Messages
*/
removeMessages(vMessages);
/**
* Deregister MessageProcessor
*/
unregisterMessageProcessor(oProcessor: MessageProcessor);
/**
* Unregister ManagedObject
*/
unregisterObject(oObject);
}
class ControlMessageProcessor extends MessageProcessor {
/**
* Check Messages and update controls with messages
*/
checkMessages();
/**
* Set Messages to check
*/
setMessages(vMessages);
}
class MessageProcessor extends sap.ui.base.EventProvider {
/**
* Attach event-handler fnFunction to the 'messageChange' event of this sap.ui.core.message.MessageProcessor.
*/
attachMessageChange(fnFunction, oListener?);
attachMessageChange(oData, fnFunction, oListener?);
/**
* Implement in inheriting classes
*/
checkMessage();
/**
* Destroys the MessageProcessor Instance
*/
destroy();
/**
* Detach event-handler fnFunction from the 'sap.ui.core.message.MessageProcessor' event of this sap.ui.core.message.MessageProcessor.
*/
detachMessageChange(fnFunction, oListener);
/**
* Fire event messageChange to attached listeners.
*/
fireMessageChange(mArguments?); | /**
* Returns the ID of the MessageProcessor instance
*/
getId();
/**
* Implement in inheriting classes
*/
setMessages(vMessages);
}
} | random_line_split |
|
sap.ui.core.message.d.ts | declare namespace sap.ui.core.message {
class Message extends sap.ui.base.Object {
/**
*
* @param mParameters a map which contains the following parameter properties:
* {string} [mParameters.id] The message id: will be defaulted if no id is set
* {string} [mParameters.message] The message text
* {string} [mParameters.description] The message description
* {sap.ui.core.MessageType} [mParameters.type] The message type
* {string} [mParameters.code] The message code
* {sap.ui.core.message.Messageprocessor} [mParameters.processor]
* {string} [mParameters.target] The message target: The syntax MessageProcessor dependent. Read the documentation of the respective MessageProcessor.
* {boolean} [mParameters.persistent] Sets message persistent: If persistent is set true the message lifecycle controlled by Application
*/
constructor(mParameters?: {
id?: string;
message?: string;
description?: string;
type?: sap.ui.core.MessageType;
code?: string;
processor?: MessageProcessor;
target?: string;
persistent?: boolean;
});
}
class | extends sap.ui.base.EventProvider {
/**
* Add messages to MessageManager
*/
addMessages(vMessages: Message|Message[]);
/**
* Get the MessageModel
*/
getMessageModel();
/**
* Register MessageProcessor
*/
registerMessageProcessor(oProcessor: MessageProcessor);
/**
* Register ManagedObject: Validation and Parse errors are handled by the MessageManager for this object
*/
registerObject(oObject, bHandleValidation);
/**
* Remove all messages
*/
removeAllMessages();
/**
* Remove given Messages
*/
removeMessages(vMessages);
/**
* Deregister MessageProcessor
*/
unregisterMessageProcessor(oProcessor: MessageProcessor);
/**
* Unregister ManagedObject
*/
unregisterObject(oObject);
}
class ControlMessageProcessor extends MessageProcessor {
/**
* Check Messages and update controls with messages
*/
checkMessages();
/**
* Set Messages to check
*/
setMessages(vMessages);
}
class MessageProcessor extends sap.ui.base.EventProvider {
/**
* Attach event-handler fnFunction to the 'messageChange' event of this sap.ui.core.message.MessageProcessor.
*/
attachMessageChange(fnFunction, oListener?);
attachMessageChange(oData, fnFunction, oListener?);
/**
* Implement in inheriting classes
*/
checkMessage();
/**
* Destroys the MessageProcessor Instance
*/
destroy();
/**
* Detach event-handler fnFunction from the 'sap.ui.core.message.MessageProcessor' event of this sap.ui.core.message.MessageProcessor.
*/
detachMessageChange(fnFunction, oListener);
/**
* Fire event messageChange to attached listeners.
*/
fireMessageChange(mArguments?);
/**
* Returns the ID of the MessageProcessor instance
*/
getId();
/**
* Implement in inheriting classes
*/
setMessages(vMessages);
}
}
| MessageManager | identifier_name |
device.d.ts | export interface Device {
/** Get the version of Cordova running on the device. */
cordova: string;
/**
* The device.model returns the name of the device's model or product. The value is set
* by the device manufacturer and may be different across versions of the same product.
*/
model: string;
/** Get the device's operating system name. */
platform: string;
/** Get the device's Universally Unique Identifier (UUID). */
uuid: string;
/** Get the operating system version. */
version: string;
/** Get the device's manufacturer. */
manufacturer: string;
/** Whether the device is running on a simulator. */
isVirtual: boolean;
/** Get the device hardware serial number. */
serial: string;
}
/**
* @name Device
* @description
* Access information about the underlying device and platform.
*
* @usage
* ```typescript
* import { Device } from 'ionic-native';
*
*
* console.log('Device UUID is: ' + Device.device.uuid);
* ```
*/
export declare class | {
/**
* Returns the whole device object.
*
* @returns {Object} The device object.
*/
static readonly device: Device;
}
| Device | identifier_name |
device.d.ts | export interface Device {
/** Get the version of Cordova running on the device. */
cordova: string;
/**
* The device.model returns the name of the device's model or product. The value is set
* by the device manufacturer and may be different across versions of the same product.
*/
model: string;
/** Get the device's operating system name. */
platform: string;
/** Get the device's Universally Unique Identifier (UUID). */
uuid: string;
/** Get the operating system version. */
version: string;
/** Get the device's manufacturer. */ | manufacturer: string;
/** Whether the device is running on a simulator. */
isVirtual: boolean;
/** Get the device hardware serial number. */
serial: string;
}
/**
* @name Device
* @description
* Access information about the underlying device and platform.
*
* @usage
* ```typescript
* import { Device } from 'ionic-native';
*
*
* console.log('Device UUID is: ' + Device.device.uuid);
* ```
*/
export declare class Device {
/**
* Returns the whole device object.
*
* @returns {Object} The device object.
*/
static readonly device: Device;
} | random_line_split |
|
quota_manager.rs | // CopyrightTechnologies LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Quota manager.
use super::ContractCallExt;
use std::collections::HashMap;
use std::str::FromStr;
use crate::contracts::tools::{decode as decode_tools, method as method_tools};
use crate::libexecutor::executor::Executor;
use crate::types::block_number::BlockTag;
use crate::types::reserved_addresses;
use cita_types::{traits::LowerHex, Address, H160};
use libproto::blockchain::AccountGasLimit as ProtoAccountQuotaLimit;
const QUOTAS: &[u8] = &*b"getQuotas()";
const ACCOUNTS: &[u8] = &*b"getAccounts()";
const BQL: &[u8] = &*b"getBQL()";
const DEFAULT_AQL: &[u8] = &*b"getDefaultAQL()";
// Quota limit of autoExec
const AUTO_EXEC_QL: &[u8] = &*b"getAutoExecQL()";
const BQL_VALUE: u64 = 1_073_741_824;
const AQL_VALUE: u64 = 268_435_456;
pub const AUTO_EXEC_QL_VALUE: u64 = 1_048_576;
lazy_static! {
static ref QUOTAS_HASH: Vec<u8> = method_tools::encode_to_vec(QUOTAS);
static ref ACCOUNTS_HASH: Vec<u8> = method_tools::encode_to_vec(ACCOUNTS);
static ref BQL_HASH: Vec<u8> = method_tools::encode_to_vec(BQL);
static ref DEFAULT_AQL_HASH: Vec<u8> = method_tools::encode_to_vec(DEFAULT_AQL);
static ref AUTO_EXEC_QL_HASH: Vec<u8> = method_tools::encode_to_vec(AUTO_EXEC_QL);
static ref CONTRACT_ADDRESS: H160 = H160::from_str(reserved_addresses::QUOTA_MANAGER).unwrap();
}
#[derive(PartialEq, Clone, Default, Debug, Serialize, Deserialize)]
pub struct AccountQuotaLimit {
pub common_quota_limit: u64,
pub specific_quota_limit: HashMap<Address, u64>,
}
impl AccountQuotaLimit {
pub fn new() -> Self {
AccountQuotaLimit {
common_quota_limit: 4_294_967_296,
specific_quota_limit: HashMap::new(),
}
}
pub fn set_common_quota_limit(&mut self, v: u64) {
self.common_quota_limit = v;
}
pub fn get_common_quota_limit(&self) -> u64 {
self.common_quota_limit
}
pub fn set_specific_quota_limit(&mut self, v: HashMap<Address, u64>) {
self.specific_quota_limit = v;
}
pub fn get_specific_quota_limit(&self) -> &HashMap<Address, u64> {
&self.specific_quota_limit
}
}
impl Into<ProtoAccountQuotaLimit> for AccountQuotaLimit {
fn into(self) -> ProtoAccountQuotaLimit {
let mut r = ProtoAccountQuotaLimit::new();
r.common_quota_limit = self.common_quota_limit;
let specific_quota_limit: HashMap<String, u64> = self
.get_specific_quota_limit()
.iter()
.map(|(k, v)| (k.lower_hex(), *v))
.collect();
r.set_specific_quota_limit(specific_quota_limit);
r
}
}
pub struct QuotaManager<'a> {
executor: &'a Executor,
}
impl<'a> QuotaManager<'a> {
pub fn new(executor: &'a Executor) -> Self {
QuotaManager { executor }
}
/// Special account quota limit
pub fn specific(&self, block_tag: BlockTag) -> HashMap<Address, u64> {
let users = self.users(block_tag).unwrap_or_else(Self::default_users);
let quota = self.quota(block_tag).unwrap_or_else(Self::default_quota);
let mut specific = HashMap::new();
for (k, v) in users.iter().zip(quota.iter()) {
specific.insert(*k, *v);
}
specific
}
/// Quota array
pub fn quota(&self, block_tag: BlockTag) -> Option<Vec<u64>> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*QUOTAS_HASH.as_slice(),
None,
block_tag,
)
.ok() | pub fn default_quota() -> Vec<u64> {
info!("Use default quota.");
Vec::new()
}
/// Account array
pub fn users(&self, block_tag: BlockTag) -> Option<Vec<Address>> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*ACCOUNTS_HASH.as_slice(),
None,
block_tag,
)
.ok()
.and_then(|output| decode_tools::to_address_vec(&output))
}
pub fn default_users() -> Vec<Address> {
info!("Use default users.");
Vec::new()
}
/// Global quota limit
pub fn block_quota_limit(&self, block_tag: BlockTag) -> Option<u64> {
self.executor
.call_method(&*CONTRACT_ADDRESS, &*BQL_HASH.as_slice(), None, block_tag)
.ok()
.and_then(|output| decode_tools::to_u64(&output))
}
pub fn default_block_quota_limit() -> u64 {
info!("Use default block quota limit.");
BQL_VALUE
}
/// Global account quota limit
pub fn account_quota_limit(&self, block_tag: BlockTag) -> Option<u64> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*DEFAULT_AQL_HASH.as_slice(),
None,
block_tag,
)
.ok()
.and_then(|output| decode_tools::to_u64(&output))
}
pub fn default_account_quota_limit() -> u64 {
info!("Use default account quota limit.");
AQL_VALUE
}
/// Auto exec quota limit
pub fn auto_exec_quota_limit(&self, block_tag: BlockTag) -> Option<u64> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*AUTO_EXEC_QL_HASH.as_slice(),
None,
block_tag,
)
.ok()
.and_then(|output| decode_tools::to_u64(&output))
}
pub fn default_auto_exec_quota_limit() -> u64 {
info!("Use default auto exec quota limit.");
AUTO_EXEC_QL_VALUE
}
}
#[cfg(test)]
mod tests {
extern crate cita_logger as logger;
use super::{QuotaManager, AQL_VALUE, AUTO_EXEC_QL_VALUE, BQL_VALUE};
use crate::tests::helpers::init_executor;
use crate::types::block_number::{BlockTag, Tag};
use cita_types::H160;
use std::str::FromStr;
#[test]
fn test_users() {
let executor = init_executor();
let quota_management = QuotaManager::new(&executor);
let users = quota_management.users(BlockTag::Tag(Tag::Pending)).unwrap();
assert_eq!(
users,
vec![H160::from_str("4b5ae4567ad5d9fb92bc9afd6a657e6fa13a2523").unwrap()]
);
}
#[test]
fn test_quota() {
let executor = init_executor();
let quota_management = QuotaManager::new(&executor);
// Test quota
let quota = quota_management.quota(BlockTag::Tag(Tag::Pending)).unwrap();
assert_eq!(quota, vec![BQL_VALUE]);
// Test block quota limit
let block_quota_limit = quota_management
.block_quota_limit(BlockTag::Tag(Tag::Pending))
.unwrap();
assert_eq!(block_quota_limit, BQL_VALUE);
// Test account quota limit
let account_quota_limit = quota_management
.account_quota_limit(BlockTag::Tag(Tag::Pending))
.unwrap();
assert_eq!(account_quota_limit, AQL_VALUE);
// Test auto exec quota limit
let auto_exec_quota_limit = quota_management
.auto_exec_quota_limit(BlockTag::Tag(Tag::Pending))
.unwrap();
assert_eq!(auto_exec_quota_limit, AUTO_EXEC_QL_VALUE);
}
} | .and_then(|output| decode_tools::to_u64_vec(&output))
}
| random_line_split |
quota_manager.rs | // CopyrightTechnologies LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Quota manager.
use super::ContractCallExt;
use std::collections::HashMap;
use std::str::FromStr;
use crate::contracts::tools::{decode as decode_tools, method as method_tools};
use crate::libexecutor::executor::Executor;
use crate::types::block_number::BlockTag;
use crate::types::reserved_addresses;
use cita_types::{traits::LowerHex, Address, H160};
use libproto::blockchain::AccountGasLimit as ProtoAccountQuotaLimit;
const QUOTAS: &[u8] = &*b"getQuotas()";
const ACCOUNTS: &[u8] = &*b"getAccounts()";
const BQL: &[u8] = &*b"getBQL()";
const DEFAULT_AQL: &[u8] = &*b"getDefaultAQL()";
// Quota limit of autoExec
const AUTO_EXEC_QL: &[u8] = &*b"getAutoExecQL()";
const BQL_VALUE: u64 = 1_073_741_824;
const AQL_VALUE: u64 = 268_435_456;
pub const AUTO_EXEC_QL_VALUE: u64 = 1_048_576;
lazy_static! {
static ref QUOTAS_HASH: Vec<u8> = method_tools::encode_to_vec(QUOTAS);
static ref ACCOUNTS_HASH: Vec<u8> = method_tools::encode_to_vec(ACCOUNTS);
static ref BQL_HASH: Vec<u8> = method_tools::encode_to_vec(BQL);
static ref DEFAULT_AQL_HASH: Vec<u8> = method_tools::encode_to_vec(DEFAULT_AQL);
static ref AUTO_EXEC_QL_HASH: Vec<u8> = method_tools::encode_to_vec(AUTO_EXEC_QL);
static ref CONTRACT_ADDRESS: H160 = H160::from_str(reserved_addresses::QUOTA_MANAGER).unwrap();
}
#[derive(PartialEq, Clone, Default, Debug, Serialize, Deserialize)]
pub struct AccountQuotaLimit {
pub common_quota_limit: u64,
pub specific_quota_limit: HashMap<Address, u64>,
}
impl AccountQuotaLimit {
pub fn new() -> Self {
AccountQuotaLimit {
common_quota_limit: 4_294_967_296,
specific_quota_limit: HashMap::new(),
}
}
pub fn set_common_quota_limit(&mut self, v: u64) {
self.common_quota_limit = v;
}
pub fn get_common_quota_limit(&self) -> u64 {
self.common_quota_limit
}
pub fn set_specific_quota_limit(&mut self, v: HashMap<Address, u64>) {
self.specific_quota_limit = v;
}
pub fn get_specific_quota_limit(&self) -> &HashMap<Address, u64> {
&self.specific_quota_limit
}
}
impl Into<ProtoAccountQuotaLimit> for AccountQuotaLimit {
fn | (self) -> ProtoAccountQuotaLimit {
let mut r = ProtoAccountQuotaLimit::new();
r.common_quota_limit = self.common_quota_limit;
let specific_quota_limit: HashMap<String, u64> = self
.get_specific_quota_limit()
.iter()
.map(|(k, v)| (k.lower_hex(), *v))
.collect();
r.set_specific_quota_limit(specific_quota_limit);
r
}
}
pub struct QuotaManager<'a> {
executor: &'a Executor,
}
impl<'a> QuotaManager<'a> {
pub fn new(executor: &'a Executor) -> Self {
QuotaManager { executor }
}
/// Special account quota limit
pub fn specific(&self, block_tag: BlockTag) -> HashMap<Address, u64> {
let users = self.users(block_tag).unwrap_or_else(Self::default_users);
let quota = self.quota(block_tag).unwrap_or_else(Self::default_quota);
let mut specific = HashMap::new();
for (k, v) in users.iter().zip(quota.iter()) {
specific.insert(*k, *v);
}
specific
}
/// Quota array
pub fn quota(&self, block_tag: BlockTag) -> Option<Vec<u64>> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*QUOTAS_HASH.as_slice(),
None,
block_tag,
)
.ok()
.and_then(|output| decode_tools::to_u64_vec(&output))
}
pub fn default_quota() -> Vec<u64> {
info!("Use default quota.");
Vec::new()
}
/// Account array
pub fn users(&self, block_tag: BlockTag) -> Option<Vec<Address>> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*ACCOUNTS_HASH.as_slice(),
None,
block_tag,
)
.ok()
.and_then(|output| decode_tools::to_address_vec(&output))
}
pub fn default_users() -> Vec<Address> {
info!("Use default users.");
Vec::new()
}
/// Global quota limit
pub fn block_quota_limit(&self, block_tag: BlockTag) -> Option<u64> {
self.executor
.call_method(&*CONTRACT_ADDRESS, &*BQL_HASH.as_slice(), None, block_tag)
.ok()
.and_then(|output| decode_tools::to_u64(&output))
}
pub fn default_block_quota_limit() -> u64 {
info!("Use default block quota limit.");
BQL_VALUE
}
/// Global account quota limit
pub fn account_quota_limit(&self, block_tag: BlockTag) -> Option<u64> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*DEFAULT_AQL_HASH.as_slice(),
None,
block_tag,
)
.ok()
.and_then(|output| decode_tools::to_u64(&output))
}
pub fn default_account_quota_limit() -> u64 {
info!("Use default account quota limit.");
AQL_VALUE
}
/// Auto exec quota limit
pub fn auto_exec_quota_limit(&self, block_tag: BlockTag) -> Option<u64> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*AUTO_EXEC_QL_HASH.as_slice(),
None,
block_tag,
)
.ok()
.and_then(|output| decode_tools::to_u64(&output))
}
pub fn default_auto_exec_quota_limit() -> u64 {
info!("Use default auto exec quota limit.");
AUTO_EXEC_QL_VALUE
}
}
#[cfg(test)]
mod tests {
extern crate cita_logger as logger;
use super::{QuotaManager, AQL_VALUE, AUTO_EXEC_QL_VALUE, BQL_VALUE};
use crate::tests::helpers::init_executor;
use crate::types::block_number::{BlockTag, Tag};
use cita_types::H160;
use std::str::FromStr;
#[test]
fn test_users() {
let executor = init_executor();
let quota_management = QuotaManager::new(&executor);
let users = quota_management.users(BlockTag::Tag(Tag::Pending)).unwrap();
assert_eq!(
users,
vec![H160::from_str("4b5ae4567ad5d9fb92bc9afd6a657e6fa13a2523").unwrap()]
);
}
#[test]
fn test_quota() {
let executor = init_executor();
let quota_management = QuotaManager::new(&executor);
// Test quota
let quota = quota_management.quota(BlockTag::Tag(Tag::Pending)).unwrap();
assert_eq!(quota, vec![BQL_VALUE]);
// Test block quota limit
let block_quota_limit = quota_management
.block_quota_limit(BlockTag::Tag(Tag::Pending))
.unwrap();
assert_eq!(block_quota_limit, BQL_VALUE);
// Test account quota limit
let account_quota_limit = quota_management
.account_quota_limit(BlockTag::Tag(Tag::Pending))
.unwrap();
assert_eq!(account_quota_limit, AQL_VALUE);
// Test auto exec quota limit
let auto_exec_quota_limit = quota_management
.auto_exec_quota_limit(BlockTag::Tag(Tag::Pending))
.unwrap();
assert_eq!(auto_exec_quota_limit, AUTO_EXEC_QL_VALUE);
}
}
| into | identifier_name |
quota_manager.rs | // CopyrightTechnologies LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Quota manager.
use super::ContractCallExt;
use std::collections::HashMap;
use std::str::FromStr;
use crate::contracts::tools::{decode as decode_tools, method as method_tools};
use crate::libexecutor::executor::Executor;
use crate::types::block_number::BlockTag;
use crate::types::reserved_addresses;
use cita_types::{traits::LowerHex, Address, H160};
use libproto::blockchain::AccountGasLimit as ProtoAccountQuotaLimit;
const QUOTAS: &[u8] = &*b"getQuotas()";
const ACCOUNTS: &[u8] = &*b"getAccounts()";
const BQL: &[u8] = &*b"getBQL()";
const DEFAULT_AQL: &[u8] = &*b"getDefaultAQL()";
// Quota limit of autoExec
const AUTO_EXEC_QL: &[u8] = &*b"getAutoExecQL()";
const BQL_VALUE: u64 = 1_073_741_824;
const AQL_VALUE: u64 = 268_435_456;
pub const AUTO_EXEC_QL_VALUE: u64 = 1_048_576;
lazy_static! {
static ref QUOTAS_HASH: Vec<u8> = method_tools::encode_to_vec(QUOTAS);
static ref ACCOUNTS_HASH: Vec<u8> = method_tools::encode_to_vec(ACCOUNTS);
static ref BQL_HASH: Vec<u8> = method_tools::encode_to_vec(BQL);
static ref DEFAULT_AQL_HASH: Vec<u8> = method_tools::encode_to_vec(DEFAULT_AQL);
static ref AUTO_EXEC_QL_HASH: Vec<u8> = method_tools::encode_to_vec(AUTO_EXEC_QL);
static ref CONTRACT_ADDRESS: H160 = H160::from_str(reserved_addresses::QUOTA_MANAGER).unwrap();
}
#[derive(PartialEq, Clone, Default, Debug, Serialize, Deserialize)]
pub struct AccountQuotaLimit {
pub common_quota_limit: u64,
pub specific_quota_limit: HashMap<Address, u64>,
}
impl AccountQuotaLimit {
pub fn new() -> Self {
AccountQuotaLimit {
common_quota_limit: 4_294_967_296,
specific_quota_limit: HashMap::new(),
}
}
pub fn set_common_quota_limit(&mut self, v: u64) {
self.common_quota_limit = v;
}
pub fn get_common_quota_limit(&self) -> u64 {
self.common_quota_limit
}
pub fn set_specific_quota_limit(&mut self, v: HashMap<Address, u64>) {
self.specific_quota_limit = v;
}
pub fn get_specific_quota_limit(&self) -> &HashMap<Address, u64> {
&self.specific_quota_limit
}
}
impl Into<ProtoAccountQuotaLimit> for AccountQuotaLimit {
fn into(self) -> ProtoAccountQuotaLimit {
let mut r = ProtoAccountQuotaLimit::new();
r.common_quota_limit = self.common_quota_limit;
let specific_quota_limit: HashMap<String, u64> = self
.get_specific_quota_limit()
.iter()
.map(|(k, v)| (k.lower_hex(), *v))
.collect();
r.set_specific_quota_limit(specific_quota_limit);
r
}
}
pub struct QuotaManager<'a> {
executor: &'a Executor,
}
impl<'a> QuotaManager<'a> {
pub fn new(executor: &'a Executor) -> Self {
QuotaManager { executor }
}
/// Special account quota limit
pub fn specific(&self, block_tag: BlockTag) -> HashMap<Address, u64> |
/// Quota array
pub fn quota(&self, block_tag: BlockTag) -> Option<Vec<u64>> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*QUOTAS_HASH.as_slice(),
None,
block_tag,
)
.ok()
.and_then(|output| decode_tools::to_u64_vec(&output))
}
pub fn default_quota() -> Vec<u64> {
info!("Use default quota.");
Vec::new()
}
/// Account array
pub fn users(&self, block_tag: BlockTag) -> Option<Vec<Address>> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*ACCOUNTS_HASH.as_slice(),
None,
block_tag,
)
.ok()
.and_then(|output| decode_tools::to_address_vec(&output))
}
pub fn default_users() -> Vec<Address> {
info!("Use default users.");
Vec::new()
}
/// Global quota limit
pub fn block_quota_limit(&self, block_tag: BlockTag) -> Option<u64> {
self.executor
.call_method(&*CONTRACT_ADDRESS, &*BQL_HASH.as_slice(), None, block_tag)
.ok()
.and_then(|output| decode_tools::to_u64(&output))
}
pub fn default_block_quota_limit() -> u64 {
info!("Use default block quota limit.");
BQL_VALUE
}
/// Global account quota limit
pub fn account_quota_limit(&self, block_tag: BlockTag) -> Option<u64> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*DEFAULT_AQL_HASH.as_slice(),
None,
block_tag,
)
.ok()
.and_then(|output| decode_tools::to_u64(&output))
}
pub fn default_account_quota_limit() -> u64 {
info!("Use default account quota limit.");
AQL_VALUE
}
/// Auto exec quota limit
pub fn auto_exec_quota_limit(&self, block_tag: BlockTag) -> Option<u64> {
self.executor
.call_method(
&*CONTRACT_ADDRESS,
&*AUTO_EXEC_QL_HASH.as_slice(),
None,
block_tag,
)
.ok()
.and_then(|output| decode_tools::to_u64(&output))
}
pub fn default_auto_exec_quota_limit() -> u64 {
info!("Use default auto exec quota limit.");
AUTO_EXEC_QL_VALUE
}
}
#[cfg(test)]
mod tests {
extern crate cita_logger as logger;
use super::{QuotaManager, AQL_VALUE, AUTO_EXEC_QL_VALUE, BQL_VALUE};
use crate::tests::helpers::init_executor;
use crate::types::block_number::{BlockTag, Tag};
use cita_types::H160;
use std::str::FromStr;
#[test]
fn test_users() {
let executor = init_executor();
let quota_management = QuotaManager::new(&executor);
let users = quota_management.users(BlockTag::Tag(Tag::Pending)).unwrap();
assert_eq!(
users,
vec![H160::from_str("4b5ae4567ad5d9fb92bc9afd6a657e6fa13a2523").unwrap()]
);
}
#[test]
fn test_quota() {
let executor = init_executor();
let quota_management = QuotaManager::new(&executor);
// Test quota
let quota = quota_management.quota(BlockTag::Tag(Tag::Pending)).unwrap();
assert_eq!(quota, vec![BQL_VALUE]);
// Test block quota limit
let block_quota_limit = quota_management
.block_quota_limit(BlockTag::Tag(Tag::Pending))
.unwrap();
assert_eq!(block_quota_limit, BQL_VALUE);
// Test account quota limit
let account_quota_limit = quota_management
.account_quota_limit(BlockTag::Tag(Tag::Pending))
.unwrap();
assert_eq!(account_quota_limit, AQL_VALUE);
// Test auto exec quota limit
let auto_exec_quota_limit = quota_management
.auto_exec_quota_limit(BlockTag::Tag(Tag::Pending))
.unwrap();
assert_eq!(auto_exec_quota_limit, AUTO_EXEC_QL_VALUE);
}
}
| {
let users = self.users(block_tag).unwrap_or_else(Self::default_users);
let quota = self.quota(block_tag).unwrap_or_else(Self::default_quota);
let mut specific = HashMap::new();
for (k, v) in users.iter().zip(quota.iter()) {
specific.insert(*k, *v);
}
specific
} | identifier_body |
crx.js | /* global require, process, Buffer, module */
'use strict';
var fs = require("fs");
var path = require("path");
var join = path.join;
var crypto = require("crypto");
var RSA = require('node-rsa');
var wrench = require("wrench");
var archiver = require("archiver");
var Promise = require('es6-promise').Promise;
var temp = require('temp');
function ChromeExtension(attrs) {
if ((this instanceof ChromeExtension) !== true) {
return new ChromeExtension(attrs);
}
/*
Defaults
*/
this.appId = null;
this.manifest = '';
this.loaded = false;
this.rootDirectory = '';
this.publicKey = null;
this.privateKey = null;
this.codebase = null;
/*
Copying attributes
*/
for (var name in attrs) {
this[name] = attrs[name];
}
temp.track();
this.path = temp.mkdirSync('crx');
}
ChromeExtension.prototype = {
/**
* Packs the content of the extension in a crx file.
*
* @param {Buffer=} contentsBuffer
* @returns {Promise}
* @example
*
* crx.pack().then(function(crxContent){
* // do something with the crxContent binary data
* });
*
*/
pack: function (contentsBuffer) {
if (!this.loaded) {
return this.load().then(this.pack.bind(this, contentsBuffer));
}
var selfie = this;
var packP = [
this.generatePublicKey(),
contentsBuffer || selfie.loadContents(),
this.writeFile("manifest.json", JSON.stringify(selfie.manifest))
];
return Promise.all(packP).then(function(outputs){
var publicKey = outputs[0];
var contents = outputs[1];
selfie.publicKey = publicKey;
var signature = selfie.generateSignature(contents);
return selfie.generatePackage(signature, publicKey, contents);
}, function (err) {
throw new Error(err.toString());
});
},
/**
* Loads extension manifest and copies its content to a workable path.
*
* @param {string=} path
* @returns {Promise}
*/
load: function (path) {
var selfie = this;
return new Promise(function(resolve, reject){
wrench.copyDirRecursive(path || selfie.rootDirectory, selfie.path, {forceDelete: true}, function (err) {
if (err) |
selfie.manifest = require(join(selfie.path, "manifest.json"));
selfie.loaded = true;
resolve(selfie);
});
});
},
/**
* Writes data into the extension workable directory.
*
* @param {string} path
* @param {*} data
* @returns {Promise}
*/
writeFile: function (path, data) {
var absPath = join(this.path, path);
return new Promise(function(resolve, reject){
fs.writeFile(absPath, data, function (err) {
if (err) {
return reject(err);
}
resolve();
});
});
},
/**
* Generates a public key.
*
* BC BREAK `this.publicKey` is not stored anymore (since 1.0.0)
* BC BREAK callback parameter has been removed in favor to the promise interface.
*
* @returns {Promise} Resolves to {Buffer} containing the public key
* @example
*
* crx.generatePublicKey(function(publicKey){
* // do something with publicKey
* });
*/
generatePublicKey: function () {
var privateKey = this.privateKey;
return new Promise(function(resolve, reject){
var key = new RSA(privateKey);
resolve(key.exportKey('pkcs8-public-der'));
});
},
/**
* Generates a SHA1 package signature.
*
* BC BREAK `this.signature` is not stored anymore (since 1.0.0)
*
* @param {Buffer} contents
* @returns {Buffer}
*/
generateSignature: function (contents) {
return new Buffer(
crypto
.createSign("sha1")
.update(contents)
.sign(this.privateKey),
"binary"
);
},
/**
*
* BC BREAK `this.contents` is not stored anymore (since 1.0.0)
*
* @returns {Promise}
*/
loadContents: function () {
var archive = archiver("zip");
var selfie = this;
return new Promise(function(resolve, reject){
var contents = new Buffer('');
var allFiles = [];
if (!selfie.loaded) {
throw new Error('crx.load needs to be called first in order to prepare the workspace.');
}
// the callback is called many times
// when 'files' is null, it means we accumulated everything
// hence this weird setup
wrench.readdirRecursive(selfie.path, function(err, files){
if (err){
return reject(err);
}
// stack unless 'files' is null
if (files){
allFiles = allFiles.concat(files);
return;
}
allFiles.forEach(function (file) {
var filePath = join(selfie.path, file);
var stat = fs.statSync(filePath);
if (stat.isFile() && file !== "key.pem") {
archive.append(fs.createReadStream(filePath), { name: file });
}
});
archive.finalize();
// Relates to the issue: "Event 'finished' no longer valid #18"
// https://github.com/jed/crx/issues/18
// TODO: Buffer concat could be a problem when building a big extension.
// So ideally only the 'finish' callback must be used.
archive.on('readable', function () {
var buf = archive.read();
if (buf) {
contents = Buffer.concat([contents, buf]);
}
});
archive.on('finish', function () {
resolve(contents);
});
archive.on("error", reject);
});
});
},
/**
* Generates and returns a signed package from extension content.
*
* BC BREAK `this.package` is not stored anymore (since 1.0.0)
*
* @param {Buffer} signature
* @param {Buffer} publicKey
* @param {Buffer} contents
* @returns {Buffer}
*/
generatePackage: function (signature, publicKey, contents) {
var keyLength = publicKey.length;
var sigLength = signature.length;
var zipLength = contents.length;
var length = 16 + keyLength + sigLength + zipLength;
var crx = new Buffer(length);
crx.write("Cr24" + new Array(13).join("\x00"), "binary");
crx[4] = 2;
crx.writeUInt32LE(keyLength, 8);
crx.writeUInt32LE(sigLength, 12);
publicKey.copy(crx, 16);
signature.copy(crx, 16 + keyLength);
contents.copy(crx, 16 + keyLength + sigLength);
return crx;
},
/**
* Generates an appId from the publicKey.
* Public key has to be set for this to work, otherwise an error is thrown.
*
* BC BREAK `this.appId` is not stored anymore (since 1.0.0)
* BC BREAK introduced `publicKey` parameter as it is not stored any more since 2.0.0
*
* @param {Buffer|string} [publicKey] the public key to use to generate the app ID
* @returns {string}
*/
generateAppId: function (publicKey) {
publicKey = publicKey || this.publicKey;
if (typeof publicKey !== 'string' && !(publicKey instanceof Buffer)) {
throw new Error('Public key is neither set, nor given');
}
return crypto
.createHash("sha256")
.update(publicKey)
.digest("hex")
.slice(0, 32)
.replace(/./g, function (x) {
return (parseInt(x, 16) + 10).toString(26);
});
},
/**
* Generates an updateXML file from the extension content.
*
* BC BREAK `this.updateXML` is not stored anymore (since 1.0.0)
*
* @returns {Buffer}
*/
generateUpdateXML: function () {
if (!this.codebase) {
throw new Error("No URL provided for update.xml.");
}
return new Buffer(
"<?xml version='1.0' encoding='UTF-8'?>\n" +
"<gupdate xmlns='http://www.google.com/update2/response' protocol='2.0'>\n" +
" <app appid='" + (this.appId || this.generateAppId()) + "'>\n" +
" <updatecheck codebase='" + this.codebase + "' version='" + this.manifest.version + "' />\n" +
" </app>\n" +
"</gupdate>"
);
}
};
module.exports = ChromeExtension;
| {
return reject(err);
} | conditional_block |
crx.js | /* global require, process, Buffer, module */
'use strict';
var fs = require("fs");
var path = require("path");
var join = path.join;
var crypto = require("crypto");
var RSA = require('node-rsa');
var wrench = require("wrench");
var archiver = require("archiver");
var Promise = require('es6-promise').Promise;
var temp = require('temp');
function | (attrs) {
if ((this instanceof ChromeExtension) !== true) {
return new ChromeExtension(attrs);
}
/*
Defaults
*/
this.appId = null;
this.manifest = '';
this.loaded = false;
this.rootDirectory = '';
this.publicKey = null;
this.privateKey = null;
this.codebase = null;
/*
Copying attributes
*/
for (var name in attrs) {
this[name] = attrs[name];
}
temp.track();
this.path = temp.mkdirSync('crx');
}
ChromeExtension.prototype = {
/**
* Packs the content of the extension in a crx file.
*
* @param {Buffer=} contentsBuffer
* @returns {Promise}
* @example
*
* crx.pack().then(function(crxContent){
* // do something with the crxContent binary data
* });
*
*/
pack: function (contentsBuffer) {
if (!this.loaded) {
return this.load().then(this.pack.bind(this, contentsBuffer));
}
var selfie = this;
var packP = [
this.generatePublicKey(),
contentsBuffer || selfie.loadContents(),
this.writeFile("manifest.json", JSON.stringify(selfie.manifest))
];
return Promise.all(packP).then(function(outputs){
var publicKey = outputs[0];
var contents = outputs[1];
selfie.publicKey = publicKey;
var signature = selfie.generateSignature(contents);
return selfie.generatePackage(signature, publicKey, contents);
}, function (err) {
throw new Error(err.toString());
});
},
/**
* Loads extension manifest and copies its content to a workable path.
*
* @param {string=} path
* @returns {Promise}
*/
load: function (path) {
var selfie = this;
return new Promise(function(resolve, reject){
wrench.copyDirRecursive(path || selfie.rootDirectory, selfie.path, {forceDelete: true}, function (err) {
if (err) {
return reject(err);
}
selfie.manifest = require(join(selfie.path, "manifest.json"));
selfie.loaded = true;
resolve(selfie);
});
});
},
/**
* Writes data into the extension workable directory.
*
* @param {string} path
* @param {*} data
* @returns {Promise}
*/
writeFile: function (path, data) {
var absPath = join(this.path, path);
return new Promise(function(resolve, reject){
fs.writeFile(absPath, data, function (err) {
if (err) {
return reject(err);
}
resolve();
});
});
},
/**
* Generates a public key.
*
* BC BREAK `this.publicKey` is not stored anymore (since 1.0.0)
* BC BREAK callback parameter has been removed in favor to the promise interface.
*
* @returns {Promise} Resolves to {Buffer} containing the public key
* @example
*
* crx.generatePublicKey(function(publicKey){
* // do something with publicKey
* });
*/
generatePublicKey: function () {
var privateKey = this.privateKey;
return new Promise(function(resolve, reject){
var key = new RSA(privateKey);
resolve(key.exportKey('pkcs8-public-der'));
});
},
/**
* Generates a SHA1 package signature.
*
* BC BREAK `this.signature` is not stored anymore (since 1.0.0)
*
* @param {Buffer} contents
* @returns {Buffer}
*/
generateSignature: function (contents) {
return new Buffer(
crypto
.createSign("sha1")
.update(contents)
.sign(this.privateKey),
"binary"
);
},
/**
*
* BC BREAK `this.contents` is not stored anymore (since 1.0.0)
*
* @returns {Promise}
*/
loadContents: function () {
var archive = archiver("zip");
var selfie = this;
return new Promise(function(resolve, reject){
var contents = new Buffer('');
var allFiles = [];
if (!selfie.loaded) {
throw new Error('crx.load needs to be called first in order to prepare the workspace.');
}
// the callback is called many times
// when 'files' is null, it means we accumulated everything
// hence this weird setup
wrench.readdirRecursive(selfie.path, function(err, files){
if (err){
return reject(err);
}
// stack unless 'files' is null
if (files){
allFiles = allFiles.concat(files);
return;
}
allFiles.forEach(function (file) {
var filePath = join(selfie.path, file);
var stat = fs.statSync(filePath);
if (stat.isFile() && file !== "key.pem") {
archive.append(fs.createReadStream(filePath), { name: file });
}
});
archive.finalize();
// Relates to the issue: "Event 'finished' no longer valid #18"
// https://github.com/jed/crx/issues/18
// TODO: Buffer concat could be a problem when building a big extension.
// So ideally only the 'finish' callback must be used.
archive.on('readable', function () {
var buf = archive.read();
if (buf) {
contents = Buffer.concat([contents, buf]);
}
});
archive.on('finish', function () {
resolve(contents);
});
archive.on("error", reject);
});
});
},
/**
* Generates and returns a signed package from extension content.
*
* BC BREAK `this.package` is not stored anymore (since 1.0.0)
*
* @param {Buffer} signature
* @param {Buffer} publicKey
* @param {Buffer} contents
* @returns {Buffer}
*/
generatePackage: function (signature, publicKey, contents) {
var keyLength = publicKey.length;
var sigLength = signature.length;
var zipLength = contents.length;
var length = 16 + keyLength + sigLength + zipLength;
var crx = new Buffer(length);
crx.write("Cr24" + new Array(13).join("\x00"), "binary");
crx[4] = 2;
crx.writeUInt32LE(keyLength, 8);
crx.writeUInt32LE(sigLength, 12);
publicKey.copy(crx, 16);
signature.copy(crx, 16 + keyLength);
contents.copy(crx, 16 + keyLength + sigLength);
return crx;
},
/**
* Generates an appId from the publicKey.
* Public key has to be set for this to work, otherwise an error is thrown.
*
* BC BREAK `this.appId` is not stored anymore (since 1.0.0)
* BC BREAK introduced `publicKey` parameter as it is not stored any more since 2.0.0
*
* @param {Buffer|string} [publicKey] the public key to use to generate the app ID
* @returns {string}
*/
generateAppId: function (publicKey) {
publicKey = publicKey || this.publicKey;
if (typeof publicKey !== 'string' && !(publicKey instanceof Buffer)) {
throw new Error('Public key is neither set, nor given');
}
return crypto
.createHash("sha256")
.update(publicKey)
.digest("hex")
.slice(0, 32)
.replace(/./g, function (x) {
return (parseInt(x, 16) + 10).toString(26);
});
},
/**
* Generates an updateXML file from the extension content.
*
* BC BREAK `this.updateXML` is not stored anymore (since 1.0.0)
*
* @returns {Buffer}
*/
generateUpdateXML: function () {
if (!this.codebase) {
throw new Error("No URL provided for update.xml.");
}
return new Buffer(
"<?xml version='1.0' encoding='UTF-8'?>\n" +
"<gupdate xmlns='http://www.google.com/update2/response' protocol='2.0'>\n" +
" <app appid='" + (this.appId || this.generateAppId()) + "'>\n" +
" <updatecheck codebase='" + this.codebase + "' version='" + this.manifest.version + "' />\n" +
" </app>\n" +
"</gupdate>"
);
}
};
module.exports = ChromeExtension;
| ChromeExtension | identifier_name |
crx.js | /* global require, process, Buffer, module */
'use strict';
var fs = require("fs");
var path = require("path");
var join = path.join;
var crypto = require("crypto");
var RSA = require('node-rsa');
var wrench = require("wrench");
var archiver = require("archiver");
var Promise = require('es6-promise').Promise;
var temp = require('temp');
function ChromeExtension(attrs) {
if ((this instanceof ChromeExtension) !== true) {
return new ChromeExtension(attrs);
}
/*
Defaults
*/
this.appId = null;
this.manifest = '';
this.loaded = false;
this.rootDirectory = '';
this.publicKey = null;
this.privateKey = null; |
this.codebase = null;
/*
Copying attributes
*/
for (var name in attrs) {
this[name] = attrs[name];
}
temp.track();
this.path = temp.mkdirSync('crx');
}
ChromeExtension.prototype = {
/**
* Packs the content of the extension in a crx file.
*
* @param {Buffer=} contentsBuffer
* @returns {Promise}
* @example
*
* crx.pack().then(function(crxContent){
* // do something with the crxContent binary data
* });
*
*/
pack: function (contentsBuffer) {
if (!this.loaded) {
return this.load().then(this.pack.bind(this, contentsBuffer));
}
var selfie = this;
var packP = [
this.generatePublicKey(),
contentsBuffer || selfie.loadContents(),
this.writeFile("manifest.json", JSON.stringify(selfie.manifest))
];
return Promise.all(packP).then(function(outputs){
var publicKey = outputs[0];
var contents = outputs[1];
selfie.publicKey = publicKey;
var signature = selfie.generateSignature(contents);
return selfie.generatePackage(signature, publicKey, contents);
}, function (err) {
throw new Error(err.toString());
});
},
/**
* Loads extension manifest and copies its content to a workable path.
*
* @param {string=} path
* @returns {Promise}
*/
load: function (path) {
var selfie = this;
return new Promise(function(resolve, reject){
wrench.copyDirRecursive(path || selfie.rootDirectory, selfie.path, {forceDelete: true}, function (err) {
if (err) {
return reject(err);
}
selfie.manifest = require(join(selfie.path, "manifest.json"));
selfie.loaded = true;
resolve(selfie);
});
});
},
/**
* Writes data into the extension workable directory.
*
* @param {string} path
* @param {*} data
* @returns {Promise}
*/
writeFile: function (path, data) {
var absPath = join(this.path, path);
return new Promise(function(resolve, reject){
fs.writeFile(absPath, data, function (err) {
if (err) {
return reject(err);
}
resolve();
});
});
},
/**
* Generates a public key.
*
* BC BREAK `this.publicKey` is not stored anymore (since 1.0.0)
* BC BREAK callback parameter has been removed in favor to the promise interface.
*
* @returns {Promise} Resolves to {Buffer} containing the public key
* @example
*
* crx.generatePublicKey(function(publicKey){
* // do something with publicKey
* });
*/
generatePublicKey: function () {
var privateKey = this.privateKey;
return new Promise(function(resolve, reject){
var key = new RSA(privateKey);
resolve(key.exportKey('pkcs8-public-der'));
});
},
/**
* Generates a SHA1 package signature.
*
* BC BREAK `this.signature` is not stored anymore (since 1.0.0)
*
* @param {Buffer} contents
* @returns {Buffer}
*/
generateSignature: function (contents) {
return new Buffer(
crypto
.createSign("sha1")
.update(contents)
.sign(this.privateKey),
"binary"
);
},
/**
*
* BC BREAK `this.contents` is not stored anymore (since 1.0.0)
*
* @returns {Promise}
*/
loadContents: function () {
var archive = archiver("zip");
var selfie = this;
return new Promise(function(resolve, reject){
var contents = new Buffer('');
var allFiles = [];
if (!selfie.loaded) {
throw new Error('crx.load needs to be called first in order to prepare the workspace.');
}
// the callback is called many times
// when 'files' is null, it means we accumulated everything
// hence this weird setup
wrench.readdirRecursive(selfie.path, function(err, files){
if (err){
return reject(err);
}
// stack unless 'files' is null
if (files){
allFiles = allFiles.concat(files);
return;
}
allFiles.forEach(function (file) {
var filePath = join(selfie.path, file);
var stat = fs.statSync(filePath);
if (stat.isFile() && file !== "key.pem") {
archive.append(fs.createReadStream(filePath), { name: file });
}
});
archive.finalize();
// Relates to the issue: "Event 'finished' no longer valid #18"
// https://github.com/jed/crx/issues/18
// TODO: Buffer concat could be a problem when building a big extension.
// So ideally only the 'finish' callback must be used.
archive.on('readable', function () {
var buf = archive.read();
if (buf) {
contents = Buffer.concat([contents, buf]);
}
});
archive.on('finish', function () {
resolve(contents);
});
archive.on("error", reject);
});
});
},
/**
* Generates and returns a signed package from extension content.
*
* BC BREAK `this.package` is not stored anymore (since 1.0.0)
*
* @param {Buffer} signature
* @param {Buffer} publicKey
* @param {Buffer} contents
* @returns {Buffer}
*/
generatePackage: function (signature, publicKey, contents) {
var keyLength = publicKey.length;
var sigLength = signature.length;
var zipLength = contents.length;
var length = 16 + keyLength + sigLength + zipLength;
var crx = new Buffer(length);
crx.write("Cr24" + new Array(13).join("\x00"), "binary");
crx[4] = 2;
crx.writeUInt32LE(keyLength, 8);
crx.writeUInt32LE(sigLength, 12);
publicKey.copy(crx, 16);
signature.copy(crx, 16 + keyLength);
contents.copy(crx, 16 + keyLength + sigLength);
return crx;
},
/**
* Generates an appId from the publicKey.
* Public key has to be set for this to work, otherwise an error is thrown.
*
* BC BREAK `this.appId` is not stored anymore (since 1.0.0)
* BC BREAK introduced `publicKey` parameter as it is not stored any more since 2.0.0
*
* @param {Buffer|string} [publicKey] the public key to use to generate the app ID
* @returns {string}
*/
generateAppId: function (publicKey) {
publicKey = publicKey || this.publicKey;
if (typeof publicKey !== 'string' && !(publicKey instanceof Buffer)) {
throw new Error('Public key is neither set, nor given');
}
return crypto
.createHash("sha256")
.update(publicKey)
.digest("hex")
.slice(0, 32)
.replace(/./g, function (x) {
return (parseInt(x, 16) + 10).toString(26);
});
},
/**
* Generates an updateXML file from the extension content.
*
* BC BREAK `this.updateXML` is not stored anymore (since 1.0.0)
*
* @returns {Buffer}
*/
generateUpdateXML: function () {
if (!this.codebase) {
throw new Error("No URL provided for update.xml.");
}
return new Buffer(
"<?xml version='1.0' encoding='UTF-8'?>\n" +
"<gupdate xmlns='http://www.google.com/update2/response' protocol='2.0'>\n" +
" <app appid='" + (this.appId || this.generateAppId()) + "'>\n" +
" <updatecheck codebase='" + this.codebase + "' version='" + this.manifest.version + "' />\n" +
" </app>\n" +
"</gupdate>"
);
}
};
module.exports = ChromeExtension; | random_line_split |
|
crx.js | /* global require, process, Buffer, module */
'use strict';
var fs = require("fs");
var path = require("path");
var join = path.join;
var crypto = require("crypto");
var RSA = require('node-rsa');
var wrench = require("wrench");
var archiver = require("archiver");
var Promise = require('es6-promise').Promise;
var temp = require('temp');
function ChromeExtension(attrs) | this.codebase = null;
/*
Copying attributes
*/
for (var name in attrs) {
this[name] = attrs[name];
}
temp.track();
this.path = temp.mkdirSync('crx');
}
ChromeExtension.prototype = {
/**
* Packs the content of the extension in a crx file.
*
* @param {Buffer=} contentsBuffer
* @returns {Promise}
* @example
*
* crx.pack().then(function(crxContent){
* // do something with the crxContent binary data
* });
*
*/
pack: function (contentsBuffer) {
if (!this.loaded) {
return this.load().then(this.pack.bind(this, contentsBuffer));
}
var selfie = this;
var packP = [
this.generatePublicKey(),
contentsBuffer || selfie.loadContents(),
this.writeFile("manifest.json", JSON.stringify(selfie.manifest))
];
return Promise.all(packP).then(function(outputs){
var publicKey = outputs[0];
var contents = outputs[1];
selfie.publicKey = publicKey;
var signature = selfie.generateSignature(contents);
return selfie.generatePackage(signature, publicKey, contents);
}, function (err) {
throw new Error(err.toString());
});
},
/**
* Loads extension manifest and copies its content to a workable path.
*
* @param {string=} path
* @returns {Promise}
*/
load: function (path) {
var selfie = this;
return new Promise(function(resolve, reject){
wrench.copyDirRecursive(path || selfie.rootDirectory, selfie.path, {forceDelete: true}, function (err) {
if (err) {
return reject(err);
}
selfie.manifest = require(join(selfie.path, "manifest.json"));
selfie.loaded = true;
resolve(selfie);
});
});
},
/**
* Writes data into the extension workable directory.
*
* @param {string} path
* @param {*} data
* @returns {Promise}
*/
writeFile: function (path, data) {
var absPath = join(this.path, path);
return new Promise(function(resolve, reject){
fs.writeFile(absPath, data, function (err) {
if (err) {
return reject(err);
}
resolve();
});
});
},
/**
* Generates a public key.
*
* BC BREAK `this.publicKey` is not stored anymore (since 1.0.0)
* BC BREAK callback parameter has been removed in favor to the promise interface.
*
* @returns {Promise} Resolves to {Buffer} containing the public key
* @example
*
* crx.generatePublicKey(function(publicKey){
* // do something with publicKey
* });
*/
generatePublicKey: function () {
var privateKey = this.privateKey;
return new Promise(function(resolve, reject){
var key = new RSA(privateKey);
resolve(key.exportKey('pkcs8-public-der'));
});
},
/**
* Generates a SHA1 package signature.
*
* BC BREAK `this.signature` is not stored anymore (since 1.0.0)
*
* @param {Buffer} contents
* @returns {Buffer}
*/
generateSignature: function (contents) {
return new Buffer(
crypto
.createSign("sha1")
.update(contents)
.sign(this.privateKey),
"binary"
);
},
/**
*
* BC BREAK `this.contents` is not stored anymore (since 1.0.0)
*
* @returns {Promise}
*/
loadContents: function () {
var archive = archiver("zip");
var selfie = this;
return new Promise(function(resolve, reject){
var contents = new Buffer('');
var allFiles = [];
if (!selfie.loaded) {
throw new Error('crx.load needs to be called first in order to prepare the workspace.');
}
// the callback is called many times
// when 'files' is null, it means we accumulated everything
// hence this weird setup
wrench.readdirRecursive(selfie.path, function(err, files){
if (err){
return reject(err);
}
// stack unless 'files' is null
if (files){
allFiles = allFiles.concat(files);
return;
}
allFiles.forEach(function (file) {
var filePath = join(selfie.path, file);
var stat = fs.statSync(filePath);
if (stat.isFile() && file !== "key.pem") {
archive.append(fs.createReadStream(filePath), { name: file });
}
});
archive.finalize();
// Relates to the issue: "Event 'finished' no longer valid #18"
// https://github.com/jed/crx/issues/18
// TODO: Buffer concat could be a problem when building a big extension.
// So ideally only the 'finish' callback must be used.
archive.on('readable', function () {
var buf = archive.read();
if (buf) {
contents = Buffer.concat([contents, buf]);
}
});
archive.on('finish', function () {
resolve(contents);
});
archive.on("error", reject);
});
});
},
/**
* Generates and returns a signed package from extension content.
*
* BC BREAK `this.package` is not stored anymore (since 1.0.0)
*
* @param {Buffer} signature
* @param {Buffer} publicKey
* @param {Buffer} contents
* @returns {Buffer}
*/
generatePackage: function (signature, publicKey, contents) {
var keyLength = publicKey.length;
var sigLength = signature.length;
var zipLength = contents.length;
var length = 16 + keyLength + sigLength + zipLength;
var crx = new Buffer(length);
crx.write("Cr24" + new Array(13).join("\x00"), "binary");
crx[4] = 2;
crx.writeUInt32LE(keyLength, 8);
crx.writeUInt32LE(sigLength, 12);
publicKey.copy(crx, 16);
signature.copy(crx, 16 + keyLength);
contents.copy(crx, 16 + keyLength + sigLength);
return crx;
},
/**
* Generates an appId from the publicKey.
* Public key has to be set for this to work, otherwise an error is thrown.
*
* BC BREAK `this.appId` is not stored anymore (since 1.0.0)
* BC BREAK introduced `publicKey` parameter as it is not stored any more since 2.0.0
*
* @param {Buffer|string} [publicKey] the public key to use to generate the app ID
* @returns {string}
*/
generateAppId: function (publicKey) {
publicKey = publicKey || this.publicKey;
if (typeof publicKey !== 'string' && !(publicKey instanceof Buffer)) {
throw new Error('Public key is neither set, nor given');
}
return crypto
.createHash("sha256")
.update(publicKey)
.digest("hex")
.slice(0, 32)
.replace(/./g, function (x) {
return (parseInt(x, 16) + 10).toString(26);
});
},
/**
* Generates an updateXML file from the extension content.
*
* BC BREAK `this.updateXML` is not stored anymore (since 1.0.0)
*
* @returns {Buffer}
*/
generateUpdateXML: function () {
if (!this.codebase) {
throw new Error("No URL provided for update.xml.");
}
return new Buffer(
"<?xml version='1.0' encoding='UTF-8'?>\n" +
"<gupdate xmlns='http://www.google.com/update2/response' protocol='2.0'>\n" +
" <app appid='" + (this.appId || this.generateAppId()) + "'>\n" +
" <updatecheck codebase='" + this.codebase + "' version='" + this.manifest.version + "' />\n" +
" </app>\n" +
"</gupdate>"
);
}
};
module.exports = ChromeExtension;
| {
if ((this instanceof ChromeExtension) !== true) {
return new ChromeExtension(attrs);
}
/*
Defaults
*/
this.appId = null;
this.manifest = '';
this.loaded = false;
this.rootDirectory = '';
this.publicKey = null;
this.privateKey = null;
| identifier_body |
create-test-list.py | #!/usr/bin/env python
"""
Rules
for *.py files
* if the changed file is __init__.py, and there is a side-band test/ dir, then test the entire test/functional directory
the reason for this is that the init files are usually organizing collections
and those can affect many different apis if they break
* if the filename is test_*.py then include it
* if the filename is *.py, then check to see if it has an associated test_FILENAME file
and if so, include it in the test
* summarize all of the above so that a test_FILENAME that is a subpath of the first bullet
is not tested twice
for non-*.py files
* if the file is in a test/functional directory, test the whole directory
"""
import subprocess
import os
import shutil
import argparse
def cleanup_tox_directory():
if os.path.exists('.tox'):
shutil.rmtree('.tox')
def examine_python_rules(line):
fname, fext = os.path.splitext(line)
filename = os.path.basename(line)
dirname = os.path.dirname(line)
test_filename = 'test_' + filename
functional_test_file = '{0}/test/functional/{1}'.format(dirname, test_filename)
functional_test_dir = '{0}/test/functional/'.format(dirname)
if filename == '__init__.py' and os.path.exists(functional_test_dir):
return functional_test_dir
elif filename.startswith('test_') and filename.endswith('.py'):
return line
elif fext == '.py' and os.path.exists(functional_test_file):
return functional_test_file
elif 'test/functional' in line and filename == '__init__.py':
print(" * Skipping {0} because it is not a test file".format(line))
elif filename == '__init__.py' and not os.path.exists(functional_test_dir):
print(" * {0} does not have a side-band test directory!".format(line))
else:
print(" * {0} did not match any rules!".format(line))
def examine_non_python_rules(line):
if 'test/functional' in line:
return os.path.dirname(line)
def determine_files_to_test(product, commit):
results = []
build_all = [
'setup.py', 'f5/bigip/contexts.py', 'f5/bigip/mixins.py',
'f5/bigip/resource.py', 'f5sdk_plugins/fixtures.py',
'f5/bigip/__init__.py'
]
output_file = "pytest.{0}.jenkins.txt".format(product)
p1 = subprocess.Popen(
['git', '--no-pager', 'diff', '--name-only', 'origin/development', commit],
stdout=subprocess.PIPE,
)
p2 = subprocess.Popen(
['egrep', '-v', '(^requirements\.|^setup.py)'],
stdin=p1.stdout,
stdout=subprocess.PIPE,
)
p3 = subprocess.Popen(
['egrep', '(^f5\/{0}\/)'.format(product)],
stdin=p2.stdout,
stdout=subprocess.PIPE,
)
out, err = p3.communicate()
out = out.splitlines()
out = filter(None, out)
if not out:
return
for line in out:
fname, fext = os.path.splitext(line)
if not os.path.exists(line):
print "{0} was not found. Maybe this is a rename?".format(line)
continue
if line in build_all:
cleanup_tox_directory()
results.append('f5/{0}'.format(product))
elif fext == '.py':
result = examine_python_rules(line)
if result:
results.append(result)
else:
result = examine_non_python_rules(line)
if result:
results.append(result)
if results:
results = set(results)
results = compress_testable_files(results)
fh = open(output_file, 'w')
fh.writelines("%s\n" % l for l in results)
fh.close()
def | (files):
lines = sorted(files)
for idx, item in enumerate(lines):
file, ext = os.path.splitext(item)
if not ext and not file.endswith('/'):
item += '/'
tmp = [x for x in lines if item in x and item != x]
for _ in tmp:
lines.remove(_)
return lines
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-c','--commit', help='Git commit to check', required=True)
args = parser.parse_args()
for product in ['iworkflow', 'bigip', 'bigiq']:
determine_files_to_test(product, args.commit)
| compress_testable_files | identifier_name |
create-test-list.py | #!/usr/bin/env python
"""
Rules
for *.py files
* if the changed file is __init__.py, and there is a side-band test/ dir, then test the entire test/functional directory
the reason for this is that the init files are usually organizing collections
and those can affect many different apis if they break
* if the filename is test_*.py then include it
* if the filename is *.py, then check to see if it has an associated test_FILENAME file
and if so, include it in the test
* summarize all of the above so that a test_FILENAME that is a subpath of the first bullet
is not tested twice
for non-*.py files
* if the file is in a test/functional directory, test the whole directory
"""
import subprocess
import os
import shutil
import argparse
def cleanup_tox_directory():
if os.path.exists('.tox'):
shutil.rmtree('.tox')
def examine_python_rules(line):
fname, fext = os.path.splitext(line)
filename = os.path.basename(line)
dirname = os.path.dirname(line)
test_filename = 'test_' + filename
functional_test_file = '{0}/test/functional/{1}'.format(dirname, test_filename)
functional_test_dir = '{0}/test/functional/'.format(dirname)
if filename == '__init__.py' and os.path.exists(functional_test_dir):
return functional_test_dir
elif filename.startswith('test_') and filename.endswith('.py'):
return line
elif fext == '.py' and os.path.exists(functional_test_file):
return functional_test_file
elif 'test/functional' in line and filename == '__init__.py':
print(" * Skipping {0} because it is not a test file".format(line))
elif filename == '__init__.py' and not os.path.exists(functional_test_dir):
print(" * {0} does not have a side-band test directory!".format(line))
else:
print(" * {0} did not match any rules!".format(line))
def examine_non_python_rules(line):
if 'test/functional' in line:
return os.path.dirname(line)
def determine_files_to_test(product, commit):
results = []
build_all = [
'setup.py', 'f5/bigip/contexts.py', 'f5/bigip/mixins.py',
'f5/bigip/resource.py', 'f5sdk_plugins/fixtures.py',
'f5/bigip/__init__.py'
]
output_file = "pytest.{0}.jenkins.txt".format(product)
p1 = subprocess.Popen(
['git', '--no-pager', 'diff', '--name-only', 'origin/development', commit],
stdout=subprocess.PIPE,
)
p2 = subprocess.Popen(
['egrep', '-v', '(^requirements\.|^setup.py)'],
stdin=p1.stdout,
stdout=subprocess.PIPE,
)
p3 = subprocess.Popen(
['egrep', '(^f5\/{0}\/)'.format(product)],
stdin=p2.stdout,
stdout=subprocess.PIPE,
)
out, err = p3.communicate()
out = out.splitlines()
out = filter(None, out)
if not out:
return
for line in out:
fname, fext = os.path.splitext(line)
if not os.path.exists(line):
print "{0} was not found. Maybe this is a rename?".format(line)
continue
if line in build_all:
cleanup_tox_directory()
results.append('f5/{0}'.format(product))
elif fext == '.py':
result = examine_python_rules(line)
if result:
results.append(result)
else:
result = examine_non_python_rules(line)
if result:
results.append(result)
if results:
results = set(results)
results = compress_testable_files(results)
fh = open(output_file, 'w')
fh.writelines("%s\n" % l for l in results)
fh.close()
def compress_testable_files(files):
lines = sorted(files)
for idx, item in enumerate(lines):
file, ext = os.path.splitext(item)
if not ext and not file.endswith('/'):
item += '/'
tmp = [x for x in lines if item in x and item != x]
for _ in tmp:
lines.remove(_)
return lines
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-c','--commit', help='Git commit to check', required=True)
args = parser.parse_args()
| for product in ['iworkflow', 'bigip', 'bigiq']:
determine_files_to_test(product, args.commit) | random_line_split |
|
create-test-list.py | #!/usr/bin/env python
"""
Rules
for *.py files
* if the changed file is __init__.py, and there is a side-band test/ dir, then test the entire test/functional directory
the reason for this is that the init files are usually organizing collections
and those can affect many different apis if they break
* if the filename is test_*.py then include it
* if the filename is *.py, then check to see if it has an associated test_FILENAME file
and if so, include it in the test
* summarize all of the above so that a test_FILENAME that is a subpath of the first bullet
is not tested twice
for non-*.py files
* if the file is in a test/functional directory, test the whole directory
"""
import subprocess
import os
import shutil
import argparse
def cleanup_tox_directory():
if os.path.exists('.tox'):
shutil.rmtree('.tox')
def examine_python_rules(line):
fname, fext = os.path.splitext(line)
filename = os.path.basename(line)
dirname = os.path.dirname(line)
test_filename = 'test_' + filename
functional_test_file = '{0}/test/functional/{1}'.format(dirname, test_filename)
functional_test_dir = '{0}/test/functional/'.format(dirname)
if filename == '__init__.py' and os.path.exists(functional_test_dir):
return functional_test_dir
elif filename.startswith('test_') and filename.endswith('.py'):
return line
elif fext == '.py' and os.path.exists(functional_test_file):
return functional_test_file
elif 'test/functional' in line and filename == '__init__.py':
print(" * Skipping {0} because it is not a test file".format(line))
elif filename == '__init__.py' and not os.path.exists(functional_test_dir):
print(" * {0} does not have a side-band test directory!".format(line))
else:
print(" * {0} did not match any rules!".format(line))
def examine_non_python_rules(line):
if 'test/functional' in line:
return os.path.dirname(line)
def determine_files_to_test(product, commit):
results = []
build_all = [
'setup.py', 'f5/bigip/contexts.py', 'f5/bigip/mixins.py',
'f5/bigip/resource.py', 'f5sdk_plugins/fixtures.py',
'f5/bigip/__init__.py'
]
output_file = "pytest.{0}.jenkins.txt".format(product)
p1 = subprocess.Popen(
['git', '--no-pager', 'diff', '--name-only', 'origin/development', commit],
stdout=subprocess.PIPE,
)
p2 = subprocess.Popen(
['egrep', '-v', '(^requirements\.|^setup.py)'],
stdin=p1.stdout,
stdout=subprocess.PIPE,
)
p3 = subprocess.Popen(
['egrep', '(^f5\/{0}\/)'.format(product)],
stdin=p2.stdout,
stdout=subprocess.PIPE,
)
out, err = p3.communicate()
out = out.splitlines()
out = filter(None, out)
if not out:
return
for line in out:
fname, fext = os.path.splitext(line)
if not os.path.exists(line):
print "{0} was not found. Maybe this is a rename?".format(line)
continue
if line in build_all:
cleanup_tox_directory()
results.append('f5/{0}'.format(product))
elif fext == '.py':
result = examine_python_rules(line)
if result:
results.append(result)
else:
result = examine_non_python_rules(line)
if result:
results.append(result)
if results:
results = set(results)
results = compress_testable_files(results)
fh = open(output_file, 'w')
fh.writelines("%s\n" % l for l in results)
fh.close()
def compress_testable_files(files):
|
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-c','--commit', help='Git commit to check', required=True)
args = parser.parse_args()
for product in ['iworkflow', 'bigip', 'bigiq']:
determine_files_to_test(product, args.commit)
| lines = sorted(files)
for idx, item in enumerate(lines):
file, ext = os.path.splitext(item)
if not ext and not file.endswith('/'):
item += '/'
tmp = [x for x in lines if item in x and item != x]
for _ in tmp:
lines.remove(_)
return lines | identifier_body |
create-test-list.py | #!/usr/bin/env python
"""
Rules
for *.py files
* if the changed file is __init__.py, and there is a side-band test/ dir, then test the entire test/functional directory
the reason for this is that the init files are usually organizing collections
and those can affect many different apis if they break
* if the filename is test_*.py then include it
* if the filename is *.py, then check to see if it has an associated test_FILENAME file
and if so, include it in the test
* summarize all of the above so that a test_FILENAME that is a subpath of the first bullet
is not tested twice
for non-*.py files
* if the file is in a test/functional directory, test the whole directory
"""
import subprocess
import os
import shutil
import argparse
def cleanup_tox_directory():
if os.path.exists('.tox'):
shutil.rmtree('.tox')
def examine_python_rules(line):
fname, fext = os.path.splitext(line)
filename = os.path.basename(line)
dirname = os.path.dirname(line)
test_filename = 'test_' + filename
functional_test_file = '{0}/test/functional/{1}'.format(dirname, test_filename)
functional_test_dir = '{0}/test/functional/'.format(dirname)
if filename == '__init__.py' and os.path.exists(functional_test_dir):
return functional_test_dir
elif filename.startswith('test_') and filename.endswith('.py'):
return line
elif fext == '.py' and os.path.exists(functional_test_file):
return functional_test_file
elif 'test/functional' in line and filename == '__init__.py':
print(" * Skipping {0} because it is not a test file".format(line))
elif filename == '__init__.py' and not os.path.exists(functional_test_dir):
print(" * {0} does not have a side-band test directory!".format(line))
else:
print(" * {0} did not match any rules!".format(line))
def examine_non_python_rules(line):
if 'test/functional' in line:
return os.path.dirname(line)
def determine_files_to_test(product, commit):
results = []
build_all = [
'setup.py', 'f5/bigip/contexts.py', 'f5/bigip/mixins.py',
'f5/bigip/resource.py', 'f5sdk_plugins/fixtures.py',
'f5/bigip/__init__.py'
]
output_file = "pytest.{0}.jenkins.txt".format(product)
p1 = subprocess.Popen(
['git', '--no-pager', 'diff', '--name-only', 'origin/development', commit],
stdout=subprocess.PIPE,
)
p2 = subprocess.Popen(
['egrep', '-v', '(^requirements\.|^setup.py)'],
stdin=p1.stdout,
stdout=subprocess.PIPE,
)
p3 = subprocess.Popen(
['egrep', '(^f5\/{0}\/)'.format(product)],
stdin=p2.stdout,
stdout=subprocess.PIPE,
)
out, err = p3.communicate()
out = out.splitlines()
out = filter(None, out)
if not out:
return
for line in out:
fname, fext = os.path.splitext(line)
if not os.path.exists(line):
print "{0} was not found. Maybe this is a rename?".format(line)
continue
if line in build_all:
cleanup_tox_directory()
results.append('f5/{0}'.format(product))
elif fext == '.py':
result = examine_python_rules(line)
if result:
results.append(result)
else:
result = examine_non_python_rules(line)
if result:
|
if results:
results = set(results)
results = compress_testable_files(results)
fh = open(output_file, 'w')
fh.writelines("%s\n" % l for l in results)
fh.close()
def compress_testable_files(files):
lines = sorted(files)
for idx, item in enumerate(lines):
file, ext = os.path.splitext(item)
if not ext and not file.endswith('/'):
item += '/'
tmp = [x for x in lines if item in x and item != x]
for _ in tmp:
lines.remove(_)
return lines
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-c','--commit', help='Git commit to check', required=True)
args = parser.parse_args()
for product in ['iworkflow', 'bigip', 'bigiq']:
determine_files_to_test(product, args.commit)
| results.append(result) | conditional_block |
traceback.rs | /// m (pattern length).
/// Returns the expected size of the vector storing the calculated blocks given this
/// information. The vector will then be initialized with the given number of 'empty'
/// State<T, D> objects and supplied to the other methods as slice.
fn init(&mut self, n: usize, m: D) -> usize;
/// Fill the column at `pos` with states initialized with the maximum distance
/// (`State::max()`).
fn set_max_state(&self, pos: usize, states: &mut [State<T, D>]);
/// This method copies over all blocks (or the one block) from a tracback column
/// into the mutable `states` slice at the given column position.
fn add_state(&self, source: &Self::TracebackColumn, pos: usize, states: &mut [State<T, D>]);
/// Initiates a `TracebackHandler` object to assist with a traceback, 'starting'
/// at the given end position.
fn init_traceback(&self, m: D, pos: usize, states: &'a [State<T, D>])
-> Self::TracebackHandler;
}
/// Objects implementing this trait should store states and have methods
/// necessary for obtaining a single traceback path. This allows to use the
/// same traceback code for the simple and the block-based Myers pattern
/// matching approaches. It is designed to be as general as possible
/// to allow different implementations.
///
/// Implementors of `TracebackHandler` keep two `State<T, D>` instances,
/// which store the information from two horizontally adjacent traceback
/// columns, encoded in the PV / MV bit vectors. The columns are accessible
/// using the methods `block()` (current / right column) and `left_block()`
/// (left column). Moving horizontally to the next position can be achieved
/// using `move_left()`.
///
/// Implementors also track the vertical cursor positions within the current
/// traceback columns (two separate cursors for left and right column).
/// `block()` and `left_block()` will always return the block that currently
/// contain the cursors.
/// `pos_bitvec()` returns a bit vector with a single activated bit at the current
/// vertical position within the *right (current)* column.
/// Moving to the next vertical position is achieved by `move_up()` and
/// `move_up_left()`. With the block based implementation, this may involve
/// switching to a new block.
pub(super) trait TracebackHandler<'a, T, D>
where
T: BitVec + 'a,
D: DistType,
{
/// Returns a reference to the current (right) block.
fn block(&self) -> &State<T, D>;
/// Returns a mutable reference to the current (right) block.
fn block_mut(&mut self) -> &mut State<T, D>;
/// Returns a reference to the left block.
fn left_block(&self) -> &State<T, D>;
/// Returns a mutable reference to the left block.
fn left_block_mut(&mut self) -> &mut State<T, D>;
/// Bit vector representing the position in the traceback. Only the bit
/// at the current position should be on.
/// For a search pattern of length 4, the initial bit vector would be
/// `0b1000`. A call to `move_up_cursor()` will shift the vector, so another
/// call to `pos_bitvec()` results in `0b100`.
/// The bit vector has a width of `T`, meaning that it can store
/// the same number of positions as the PV and MV vectors. In the
/// case of the block based algorithm, the vector only stores the
/// position within the current block.
fn pos_bitvec(&self) -> T;
/// Move up cursor by one position in traceback matrix.
///
/// # Arguments
///
/// * adjust_dist: If true, the distance score of the block is adjusted
/// based on the current cursor position before moving it up.
/// *Note concerning the block based Myers algorithm:*
/// The the active bit in bit vector returned by `pos_bitvec()`
/// is expected to jump back to the maximum (lowest) position
/// when reaching the uppermost position (like `rotate_right()` does).
fn move_up(&mut self, adjust_dist: bool);
/// Move up left cursor by one position in traceback matrix.
///
/// # Arguments
///
/// * adjust_dist: If true, the distance score of the block is adjusted
/// based on the current cursor position before moving it up.
/// However, the current cursor position of the **right** block is used,
/// **not** the one of the left block. This is an important oddity, which
/// makes only sense because of the design of the traceback algorithm.
fn move_up_left(&mut self, adjust_dist: bool);
/// Shift the view by one traceback column / block to the left. The
/// block that was on the left position previously moves to the right /
/// current block without changes. The cursor positions have to be
/// adjusted indepentedently if necessary using `move_up(false)` /
/// `move_up_left(false)`.
/// `move_left()` adjusts distance score of the new left block to
/// be correct for the left vertical cursor position. It is therefore
/// important that the cursor is moved *before* calling `move_left()`.
fn move_to_left(&mut self);
/// Rather specialized method that allows having a simpler code in Traceback::_traceback_at()
/// Checks if the position below the left cursor has a smaller distance, and if so,
/// moves the cursor to this block and returns `true`.
///
/// The problem is that the current implementation always keeps the left cursor in the
/// diagonal position for performance reasons. In this case, checking the actual left
/// distance score can be complicated with the block-based algorithm since the left cursor
/// may be at the lower block boundary. If so, the function thus has to check the topmost
/// position of the lower block and keep this block if the distance is better (lower).
fn move_left_down_if_better(&mut self) -> bool;
/// Returns a slice containing all blocks of the current traceback column
/// from top to bottom. Used for debugging only.
fn column_slice(&self) -> &[State<T, D>];
/// Returns true if topmost position in the traceback matrix has been reached,
/// meaning that the traceback is complete.
/// Technically this means, that `move_up_cursor()` was called so many times
/// until the uppermost block was reached and the pos_bitvec() does not contain
/// any bit, since shifting has removed it from the vector.
fn finished(&self) -> bool;
/// For debugging only
fn print_state(&self) {
println!(
"--- TB dist ({:?} <-> {:?})",
self.left_block().dist,
self.block().dist
);
println!(
"{:064b} m\n{:064b} + ({:?}) (left) d={:?}\n{:064b} - ({:?})\n \
{:064b} + ({:?}) (current) d={:?}\n{:064b} - ({:?})\n",
self.pos_bitvec(),
self.left_block().pv,
self.left_block().pv,
self.left_block().dist,
self.left_block().mv,
self.left_block().mv,
self.block().pv,
self.block().pv,
self.block().dist,
self.block().mv,
self.block().mv
);
}
}
pub(super) struct Traceback<'a, T, D, H>
where
T: BitVec + 'a,
D: DistType,
H: StatesHandler<'a, T, D>,
{
m: D,
positions: iter::Cycle<Range<usize>>,
handler: H,
pos: usize,
_t: PhantomData<&'a T>,
}
impl<'a, T, D, H> Traceback<'a, T, D, H>
where
T: BitVec,
D: DistType,
H: StatesHandler<'a, T, D>,
{
#[inline]
pub fn new(
states: &mut Vec<State<T, D>>,
initial_state: &H::TracebackColumn,
num_cols: usize,
m: D,
mut handler: H,
) -> Self {
// Correct traceback needs two additional columns at the left of the matrix (see below).
// Therefore reserving additional space.
let num_cols = num_cols + 2;
let n_states = handler.init(num_cols, m);
let mut tb = Traceback {
m,
positions: (0..num_cols).cycle(),
handler,
pos: 0,
_t: PhantomData,
};
// extend or truncate states vector
let curr_len = states.len();
if n_states > curr_len {
states.reserve(n_states);
states.extend((0..n_states - curr_len).map(|_| State::default()));
} else {
states.truncate(n_states);
states.shrink_to_fit();
}
| /// Prepare for a new search given n (maximum expected number of traceback columns) and | random_line_split |
|
traceback.rs | -> Self::TracebackHandler;
}
/// Objects implementing this trait should store states and have methods
/// necessary for obtaining a single traceback path. This allows to use the
/// same traceback code for the simple and the block-based Myers pattern
/// matching approaches. It is designed to be as general as possible
/// to allow different implementations.
///
/// Implementors of `TracebackHandler` keep two `State<T, D>` instances,
/// which store the information from two horizontally adjacent traceback
/// columns, encoded in the PV / MV bit vectors. The columns are accessible
/// using the methods `block()` (current / right column) and `left_block()`
/// (left column). Moving horizontally to the next position can be achieved
/// using `move_left()`.
///
/// Implementors also track the vertical cursor positions within the current
/// traceback columns (two separate cursors for left and right column).
/// `block()` and `left_block()` will always return the block that currently
/// contain the cursors.
/// `pos_bitvec()` returns a bit vector with a single activated bit at the current
/// vertical position within the *right (current)* column.
/// Moving to the next vertical position is achieved by `move_up()` and
/// `move_up_left()`. With the block based implementation, this may involve
/// switching to a new block.
pub(super) trait TracebackHandler<'a, T, D>
where
T: BitVec + 'a,
D: DistType,
{
/// Returns a reference to the current (right) block.
fn block(&self) -> &State<T, D>;
/// Returns a mutable reference to the current (right) block.
fn block_mut(&mut self) -> &mut State<T, D>;
/// Returns a reference to the left block.
fn left_block(&self) -> &State<T, D>;
/// Returns a mutable reference to the left block.
fn left_block_mut(&mut self) -> &mut State<T, D>;
/// Bit vector representing the position in the traceback. Only the bit
/// at the current position should be on.
/// For a search pattern of length 4, the initial bit vector would be
/// `0b1000`. A call to `move_up_cursor()` will shift the vector, so another
/// call to `pos_bitvec()` results in `0b100`.
/// The bit vector has a width of `T`, meaning that it can store
/// the same number of positions as the PV and MV vectors. In the
/// case of the block based algorithm, the vector only stores the
/// position within the current block.
fn pos_bitvec(&self) -> T;
/// Move up cursor by one position in traceback matrix.
///
/// # Arguments
///
/// * adjust_dist: If true, the distance score of the block is adjusted
/// based on the current cursor position before moving it up.
/// *Note concerning the block based Myers algorithm:*
/// The the active bit in bit vector returned by `pos_bitvec()`
/// is expected to jump back to the maximum (lowest) position
/// when reaching the uppermost position (like `rotate_right()` does).
fn move_up(&mut self, adjust_dist: bool);
/// Move up left cursor by one position in traceback matrix.
///
/// # Arguments
///
/// * adjust_dist: If true, the distance score of the block is adjusted
/// based on the current cursor position before moving it up.
/// However, the current cursor position of the **right** block is used,
/// **not** the one of the left block. This is an important oddity, which
/// makes only sense because of the design of the traceback algorithm.
fn move_up_left(&mut self, adjust_dist: bool);
/// Shift the view by one traceback column / block to the left. The
/// block that was on the left position previously moves to the right /
/// current block without changes. The cursor positions have to be
/// adjusted indepentedently if necessary using `move_up(false)` /
/// `move_up_left(false)`.
/// `move_left()` adjusts distance score of the new left block to
/// be correct for the left vertical cursor position. It is therefore
/// important that the cursor is moved *before* calling `move_left()`.
fn move_to_left(&mut self);
/// Rather specialized method that allows having a simpler code in Traceback::_traceback_at()
/// Checks if the position below the left cursor has a smaller distance, and if so,
/// moves the cursor to this block and returns `true`.
///
/// The problem is that the current implementation always keeps the left cursor in the
/// diagonal position for performance reasons. In this case, checking the actual left
/// distance score can be complicated with the block-based algorithm since the left cursor
/// may be at the lower block boundary. If so, the function thus has to check the topmost
/// position of the lower block and keep this block if the distance is better (lower).
fn move_left_down_if_better(&mut self) -> bool;
/// Returns a slice containing all blocks of the current traceback column
/// from top to bottom. Used for debugging only.
fn column_slice(&self) -> &[State<T, D>];
/// Returns true if topmost position in the traceback matrix has been reached,
/// meaning that the traceback is complete.
/// Technically this means, that `move_up_cursor()` was called so many times
/// until the uppermost block was reached and the pos_bitvec() does not contain
/// any bit, since shifting has removed it from the vector.
fn finished(&self) -> bool;
/// For debugging only
fn print_state(&self) {
println!(
"--- TB dist ({:?} <-> {:?})",
self.left_block().dist,
self.block().dist
);
println!(
"{:064b} m\n{:064b} + ({:?}) (left) d={:?}\n{:064b} - ({:?})\n \
{:064b} + ({:?}) (current) d={:?}\n{:064b} - ({:?})\n",
self.pos_bitvec(),
self.left_block().pv,
self.left_block().pv,
self.left_block().dist,
self.left_block().mv,
self.left_block().mv,
self.block().pv,
self.block().pv,
self.block().dist,
self.block().mv,
self.block().mv
);
}
}
pub(super) struct Traceback<'a, T, D, H>
where
T: BitVec + 'a,
D: DistType,
H: StatesHandler<'a, T, D>,
{
m: D,
positions: iter::Cycle<Range<usize>>,
handler: H,
pos: usize,
_t: PhantomData<&'a T>,
}
impl<'a, T, D, H> Traceback<'a, T, D, H>
where
T: BitVec,
D: DistType,
H: StatesHandler<'a, T, D>,
{
#[inline]
pub fn | (
states: &mut Vec<State<T, D>>,
initial_state: &H::TracebackColumn,
num_cols: usize,
m: D,
mut handler: H,
) -> Self {
// Correct traceback needs two additional columns at the left of the matrix (see below).
// Therefore reserving additional space.
let num_cols = num_cols + 2;
let n_states = handler.init(num_cols, m);
let mut tb = Traceback {
m,
positions: (0..num_cols).cycle(),
handler,
pos: 0,
_t: PhantomData,
};
// extend or truncate states vector
let curr_len = states.len();
if n_states > curr_len {
states.reserve(n_states);
states.extend((0..n_states - curr_len).map(|_| State::default()));
} else {
states.truncate(n_states);
states.shrink_to_fit();
}
// important if using unsafe in add_state(), and also for correct functioning of traceback
debug_assert!(states.len() == n_states);
// first column is used to ensure a correct path if the text (target)
// is shorter than the pattern (query)
tb.pos = tb.positions.next().unwrap();
tb.handler.set_max_state(tb.pos, states);
// initial state
tb.add_state(initial_state, states);
tb
}
#[inline]
pub fn add_state(&mut self, column: &H::TracebackColumn, states: &mut [State<T, D>]) {
self.pos = self.positions.next().unwrap();
self.handler.add_state(column, self.pos, states);
}
/// Returns the length of the current match, optionally adding the
/// alignment path to `ops`
#[inline]
pub fn traceback(
&self,
ops: Option<&mut Vec<AlignmentOperation>>,
states: &'a [State<T, D>],
) -> (D, D) {
self._traceback_at(self.pos, ops, states)
}
/// Returns the length of a match with a given end position, optionally adding the
/// alignment path to `ops`
/// only | new | identifier_name |
traceback.rs | returns `true`.
///
/// The problem is that the current implementation always keeps the left cursor in the
/// diagonal position for performance reasons. In this case, checking the actual left
/// distance score can be complicated with the block-based algorithm since the left cursor
/// may be at the lower block boundary. If so, the function thus has to check the topmost
/// position of the lower block and keep this block if the distance is better (lower).
fn move_left_down_if_better(&mut self) -> bool;
/// Returns a slice containing all blocks of the current traceback column
/// from top to bottom. Used for debugging only.
fn column_slice(&self) -> &[State<T, D>];
/// Returns true if topmost position in the traceback matrix has been reached,
/// meaning that the traceback is complete.
/// Technically this means, that `move_up_cursor()` was called so many times
/// until the uppermost block was reached and the pos_bitvec() does not contain
/// any bit, since shifting has removed it from the vector.
fn finished(&self) -> bool;
/// For debugging only
fn print_state(&self) {
println!(
"--- TB dist ({:?} <-> {:?})",
self.left_block().dist,
self.block().dist
);
println!(
"{:064b} m\n{:064b} + ({:?}) (left) d={:?}\n{:064b} - ({:?})\n \
{:064b} + ({:?}) (current) d={:?}\n{:064b} - ({:?})\n",
self.pos_bitvec(),
self.left_block().pv,
self.left_block().pv,
self.left_block().dist,
self.left_block().mv,
self.left_block().mv,
self.block().pv,
self.block().pv,
self.block().dist,
self.block().mv,
self.block().mv
);
}
}
pub(super) struct Traceback<'a, T, D, H>
where
T: BitVec + 'a,
D: DistType,
H: StatesHandler<'a, T, D>,
{
m: D,
positions: iter::Cycle<Range<usize>>,
handler: H,
pos: usize,
_t: PhantomData<&'a T>,
}
impl<'a, T, D, H> Traceback<'a, T, D, H>
where
T: BitVec,
D: DistType,
H: StatesHandler<'a, T, D>,
{
#[inline]
pub fn new(
states: &mut Vec<State<T, D>>,
initial_state: &H::TracebackColumn,
num_cols: usize,
m: D,
mut handler: H,
) -> Self {
// Correct traceback needs two additional columns at the left of the matrix (see below).
// Therefore reserving additional space.
let num_cols = num_cols + 2;
let n_states = handler.init(num_cols, m);
let mut tb = Traceback {
m,
positions: (0..num_cols).cycle(),
handler,
pos: 0,
_t: PhantomData,
};
// extend or truncate states vector
let curr_len = states.len();
if n_states > curr_len {
states.reserve(n_states);
states.extend((0..n_states - curr_len).map(|_| State::default()));
} else {
states.truncate(n_states);
states.shrink_to_fit();
}
// important if using unsafe in add_state(), and also for correct functioning of traceback
debug_assert!(states.len() == n_states);
// first column is used to ensure a correct path if the text (target)
// is shorter than the pattern (query)
tb.pos = tb.positions.next().unwrap();
tb.handler.set_max_state(tb.pos, states);
// initial state
tb.add_state(initial_state, states);
tb
}
#[inline]
pub fn add_state(&mut self, column: &H::TracebackColumn, states: &mut [State<T, D>]) {
self.pos = self.positions.next().unwrap();
self.handler.add_state(column, self.pos, states);
}
/// Returns the length of the current match, optionally adding the
/// alignment path to `ops`
#[inline]
pub fn traceback(
&self,
ops: Option<&mut Vec<AlignmentOperation>>,
states: &'a [State<T, D>],
) -> (D, D) {
self._traceback_at(self.pos, ops, states)
}
/// Returns the length of a match with a given end position, optionally adding the
/// alignment path to `ops`
/// only to be called if the `states` vec contains all states of the text
#[inline]
pub fn traceback_at(
&self,
pos: usize,
ops: Option<&mut Vec<AlignmentOperation>>,
states: &'a [State<T, D>],
) -> Option<(D, D)> {
let pos = pos + 2; // in order to be comparable since self.pos starts at 2, not 0
if pos <= self.pos {
return Some(self._traceback_at(pos, ops, states));
}
None
}
/// returns a tuple of alignment length and hit distance, optionally adding the alignment path
/// to `ops`
#[inline]
fn _traceback_at(
&self,
pos: usize,
mut ops: Option<&mut Vec<AlignmentOperation>>,
state_slice: &'a [State<T, D>],
) -> (D, D) {
use self::AlignmentOperation::*;
// Generic object that holds the necessary data and methods
let mut h = self.handler.init_traceback(self.m, pos, state_slice);
// self.print_tb_matrix(pos, state_slice);
let ops = &mut ops;
// horizontal column offset from starting point in traceback matrix (bottom right)
let mut h_offset = D::zero();
// distance of the match (will be returned)
let dist = h.block().dist;
// The cursor of the left state is always for diagonal position in the traceback matrix.
// This allows checking for a substitution by a simple comparison.
h.move_up_left(true);
// Loop for finding the traceback path
// If there are several possible solutions, substitutions are preferred over InDels
// (Subst > Ins > Del)
while !h.finished() {
let op;
// This loop is used to allow skipping `move_left()` using break (kind of similar
// to 'goto'). This was done to avoid having to inline move_left() three times,
// which would use more space.
#[allow(clippy::never_loop)]
loop {
// h.print_state();
if h.left_block().dist.wrapping_add(&D::one()) == h.block().dist {
// Diagonal (substitution)
// Since the left cursor is always in the upper diagonal position,
// a simple comparison of distances is enough to determine substitutions.
h.move_up(false);
h.move_up_left(false);
op = Subst;
} else if h.block().pv & h.pos_bitvec() != T::zero() {
// Up
h.move_up(true);
h.move_up_left(true);
op = Ins;
break;
} else if h.move_left_down_if_better() {
// Left
op = Del;
} else {
// Diagonal (match)
h.move_up(false);
h.move_up_left(false);
op = Match;
}
// Moving one position to the left, adjusting h_offset
h_offset += D::one();
h.move_to_left();
break;
}
// println!("{:?}", op);
if let Some(o) = ops.as_mut() {
o.push(op);
}
}
(h_offset, dist)
}
// Useful for debugging
#[allow(dead_code)]
fn print_tb_matrix(&self, pos: usize, state_slice: &'a [State<T, D>]) {
let mut h = self.handler.init_traceback(self.m, pos, state_slice);
let m = self.m.to_usize().unwrap();
let mut out = vec![];
for _ in 0..state_slice.len() {
let mut col_out = vec![];
let mut empty = true;
for (i, state) in h.column_slice().iter().enumerate().rev() {
if !(state.is_new() || state.is_max()) {
empty = false;
}
let w = word_size::<T>();
let end = (i + 1) * w;
let n = if end <= m { w } else { m % w };
state.write_dist_column(n, &mut col_out);
}
out.push(col_out);
h.move_to_left();
if empty {
break;
}
}
for j in (0..m).rev() {
print!("{:>4}: ", m - j + 1);
for col in out.iter().rev() {
if let Some(d) = col.get(j) | {
if *d >= (D::max_value() >> 1) {
// missing value
print!(" ");
} else {
print!("{:>4?}", d);
}
} | conditional_block |
|
traceback.rs | -> Self::TracebackHandler;
}
/// Objects implementing this trait should store states and have methods
/// necessary for obtaining a single traceback path. This allows to use the
/// same traceback code for the simple and the block-based Myers pattern
/// matching approaches. It is designed to be as general as possible
/// to allow different implementations.
///
/// Implementors of `TracebackHandler` keep two `State<T, D>` instances,
/// which store the information from two horizontally adjacent traceback
/// columns, encoded in the PV / MV bit vectors. The columns are accessible
/// using the methods `block()` (current / right column) and `left_block()`
/// (left column). Moving horizontally to the next position can be achieved
/// using `move_left()`.
///
/// Implementors also track the vertical cursor positions within the current
/// traceback columns (two separate cursors for left and right column).
/// `block()` and `left_block()` will always return the block that currently
/// contain the cursors.
/// `pos_bitvec()` returns a bit vector with a single activated bit at the current
/// vertical position within the *right (current)* column.
/// Moving to the next vertical position is achieved by `move_up()` and
/// `move_up_left()`. With the block based implementation, this may involve
/// switching to a new block.
pub(super) trait TracebackHandler<'a, T, D>
where
T: BitVec + 'a,
D: DistType,
{
/// Returns a reference to the current (right) block.
fn block(&self) -> &State<T, D>;
/// Returns a mutable reference to the current (right) block.
fn block_mut(&mut self) -> &mut State<T, D>;
/// Returns a reference to the left block.
fn left_block(&self) -> &State<T, D>;
/// Returns a mutable reference to the left block.
fn left_block_mut(&mut self) -> &mut State<T, D>;
/// Bit vector representing the position in the traceback. Only the bit
/// at the current position should be on.
/// For a search pattern of length 4, the initial bit vector would be
/// `0b1000`. A call to `move_up_cursor()` will shift the vector, so another
/// call to `pos_bitvec()` results in `0b100`.
/// The bit vector has a width of `T`, meaning that it can store
/// the same number of positions as the PV and MV vectors. In the
/// case of the block based algorithm, the vector only stores the
/// position within the current block.
fn pos_bitvec(&self) -> T;
/// Move up cursor by one position in traceback matrix.
///
/// # Arguments
///
/// * adjust_dist: If true, the distance score of the block is adjusted
/// based on the current cursor position before moving it up.
/// *Note concerning the block based Myers algorithm:*
/// The the active bit in bit vector returned by `pos_bitvec()`
/// is expected to jump back to the maximum (lowest) position
/// when reaching the uppermost position (like `rotate_right()` does).
fn move_up(&mut self, adjust_dist: bool);
/// Move up left cursor by one position in traceback matrix.
///
/// # Arguments
///
/// * adjust_dist: If true, the distance score of the block is adjusted
/// based on the current cursor position before moving it up.
/// However, the current cursor position of the **right** block is used,
/// **not** the one of the left block. This is an important oddity, which
/// makes only sense because of the design of the traceback algorithm.
fn move_up_left(&mut self, adjust_dist: bool);
/// Shift the view by one traceback column / block to the left. The
/// block that was on the left position previously moves to the right /
/// current block without changes. The cursor positions have to be
/// adjusted indepentedently if necessary using `move_up(false)` /
/// `move_up_left(false)`.
/// `move_left()` adjusts distance score of the new left block to
/// be correct for the left vertical cursor position. It is therefore
/// important that the cursor is moved *before* calling `move_left()`.
fn move_to_left(&mut self);
/// Rather specialized method that allows having a simpler code in Traceback::_traceback_at()
/// Checks if the position below the left cursor has a smaller distance, and if so,
/// moves the cursor to this block and returns `true`.
///
/// The problem is that the current implementation always keeps the left cursor in the
/// diagonal position for performance reasons. In this case, checking the actual left
/// distance score can be complicated with the block-based algorithm since the left cursor
/// may be at the lower block boundary. If so, the function thus has to check the topmost
/// position of the lower block and keep this block if the distance is better (lower).
fn move_left_down_if_better(&mut self) -> bool;
/// Returns a slice containing all blocks of the current traceback column
/// from top to bottom. Used for debugging only.
fn column_slice(&self) -> &[State<T, D>];
/// Returns true if topmost position in the traceback matrix has been reached,
/// meaning that the traceback is complete.
/// Technically this means, that `move_up_cursor()` was called so many times
/// until the uppermost block was reached and the pos_bitvec() does not contain
/// any bit, since shifting has removed it from the vector.
fn finished(&self) -> bool;
/// For debugging only
fn print_state(&self) | );
}
}
pub(super) struct Traceback<'a, T, D, H>
where
T: BitVec + 'a,
D: DistType,
H: StatesHandler<'a, T, D>,
{
m: D,
positions: iter::Cycle<Range<usize>>,
handler: H,
pos: usize,
_t: PhantomData<&'a T>,
}
impl<'a, T, D, H> Traceback<'a, T, D, H>
where
T: BitVec,
D: DistType,
H: StatesHandler<'a, T, D>,
{
#[inline]
pub fn new(
states: &mut Vec<State<T, D>>,
initial_state: &H::TracebackColumn,
num_cols: usize,
m: D,
mut handler: H,
) -> Self {
// Correct traceback needs two additional columns at the left of the matrix (see below).
// Therefore reserving additional space.
let num_cols = num_cols + 2;
let n_states = handler.init(num_cols, m);
let mut tb = Traceback {
m,
positions: (0..num_cols).cycle(),
handler,
pos: 0,
_t: PhantomData,
};
// extend or truncate states vector
let curr_len = states.len();
if n_states > curr_len {
states.reserve(n_states);
states.extend((0..n_states - curr_len).map(|_| State::default()));
} else {
states.truncate(n_states);
states.shrink_to_fit();
}
// important if using unsafe in add_state(), and also for correct functioning of traceback
debug_assert!(states.len() == n_states);
// first column is used to ensure a correct path if the text (target)
// is shorter than the pattern (query)
tb.pos = tb.positions.next().unwrap();
tb.handler.set_max_state(tb.pos, states);
// initial state
tb.add_state(initial_state, states);
tb
}
#[inline]
pub fn add_state(&mut self, column: &H::TracebackColumn, states: &mut [State<T, D>]) {
self.pos = self.positions.next().unwrap();
self.handler.add_state(column, self.pos, states);
}
/// Returns the length of the current match, optionally adding the
/// alignment path to `ops`
#[inline]
pub fn traceback(
&self,
ops: Option<&mut Vec<AlignmentOperation>>,
states: &'a [State<T, D>],
) -> (D, D) {
self._traceback_at(self.pos, ops, states)
}
/// Returns the length of a match with a given end position, optionally adding the
/// alignment path to `ops`
/// only to | {
println!(
"--- TB dist ({:?} <-> {:?})",
self.left_block().dist,
self.block().dist
);
println!(
"{:064b} m\n{:064b} + ({:?}) (left) d={:?}\n{:064b} - ({:?})\n \
{:064b} + ({:?}) (current) d={:?}\n{:064b} - ({:?})\n",
self.pos_bitvec(),
self.left_block().pv,
self.left_block().pv,
self.left_block().dist,
self.left_block().mv,
self.left_block().mv,
self.block().pv,
self.block().pv,
self.block().dist,
self.block().mv,
self.block().mv | identifier_body |
copy-entrypoints.ts | import * as fs from "fs";
import * as path from "path";
import { examples } from "./examples";
copyEntryPoints();
function assertHasProp<T extends string>(
value: unknown,
prop: T
): asserts value is Record<T, unknown> {
if (!value || typeof value !== "object" || !(prop in value)) {
throw new Error(
`Expected value to be an object containing property "${prop}", but it did not.`
);
}
}
function getProp<T extends string[]>(value: unknown, ...propPath: T): unknown {
let current = value;
for (const prop of propPath) {
assertHasProp(current, prop);
current = current[prop];
}
return current;
}
function readOutputPath(angularConfiguration: unknown): string {
const outputPath = getProp(
angularConfiguration,
"projects",
"demo",
"architect",
"build",
"options",
"outputPath"
);
if (typeof outputPath !== "string") {
throw new Error(
`Expected outputPath to be of type string but got this type instead: ${typeof outputPath}`
);
} |
function copyEntryPoints() {
const angularOptions: unknown = JSON.parse(
fs.readFileSync(path.join(__dirname, "../../../angular.json"), {
encoding: "utf-8",
})
);
const angularDistDirSetting = readOutputPath(angularOptions);
const angularDistDir = path.join(
__dirname,
"../../..",
angularDistDirSetting
);
const indexHtmlFile = path.join(angularDistDir, "index.html");
examples.forEach((e) => {
fs.copyFileSync(indexHtmlFile, path.join(angularDistDir, `${e.name}.html`));
});
} |
return outputPath;
} | random_line_split |
copy-entrypoints.ts | import * as fs from "fs";
import * as path from "path";
import { examples } from "./examples";
copyEntryPoints();
function assertHasProp<T extends string>(
value: unknown,
prop: T
): asserts value is Record<T, unknown> {
if (!value || typeof value !== "object" || !(prop in value)) |
}
function getProp<T extends string[]>(value: unknown, ...propPath: T): unknown {
let current = value;
for (const prop of propPath) {
assertHasProp(current, prop);
current = current[prop];
}
return current;
}
function readOutputPath(angularConfiguration: unknown): string {
const outputPath = getProp(
angularConfiguration,
"projects",
"demo",
"architect",
"build",
"options",
"outputPath"
);
if (typeof outputPath !== "string") {
throw new Error(
`Expected outputPath to be of type string but got this type instead: ${typeof outputPath}`
);
}
return outputPath;
}
function copyEntryPoints() {
const angularOptions: unknown = JSON.parse(
fs.readFileSync(path.join(__dirname, "../../../angular.json"), {
encoding: "utf-8",
})
);
const angularDistDirSetting = readOutputPath(angularOptions);
const angularDistDir = path.join(
__dirname,
"../../..",
angularDistDirSetting
);
const indexHtmlFile = path.join(angularDistDir, "index.html");
examples.forEach((e) => {
fs.copyFileSync(indexHtmlFile, path.join(angularDistDir, `${e.name}.html`));
});
}
| {
throw new Error(
`Expected value to be an object containing property "${prop}", but it did not.`
);
} | conditional_block |
copy-entrypoints.ts | import * as fs from "fs";
import * as path from "path";
import { examples } from "./examples";
copyEntryPoints();
function assertHasProp<T extends string>(
value: unknown,
prop: T
): asserts value is Record<T, unknown> {
if (!value || typeof value !== "object" || !(prop in value)) {
throw new Error(
`Expected value to be an object containing property "${prop}", but it did not.`
);
}
}
function | <T extends string[]>(value: unknown, ...propPath: T): unknown {
let current = value;
for (const prop of propPath) {
assertHasProp(current, prop);
current = current[prop];
}
return current;
}
function readOutputPath(angularConfiguration: unknown): string {
const outputPath = getProp(
angularConfiguration,
"projects",
"demo",
"architect",
"build",
"options",
"outputPath"
);
if (typeof outputPath !== "string") {
throw new Error(
`Expected outputPath to be of type string but got this type instead: ${typeof outputPath}`
);
}
return outputPath;
}
function copyEntryPoints() {
const angularOptions: unknown = JSON.parse(
fs.readFileSync(path.join(__dirname, "../../../angular.json"), {
encoding: "utf-8",
})
);
const angularDistDirSetting = readOutputPath(angularOptions);
const angularDistDir = path.join(
__dirname,
"../../..",
angularDistDirSetting
);
const indexHtmlFile = path.join(angularDistDir, "index.html");
examples.forEach((e) => {
fs.copyFileSync(indexHtmlFile, path.join(angularDistDir, `${e.name}.html`));
});
}
| getProp | identifier_name |
copy-entrypoints.ts | import * as fs from "fs";
import * as path from "path";
import { examples } from "./examples";
copyEntryPoints();
function assertHasProp<T extends string>(
value: unknown,
prop: T
): asserts value is Record<T, unknown> {
if (!value || typeof value !== "object" || !(prop in value)) {
throw new Error(
`Expected value to be an object containing property "${prop}", but it did not.`
);
}
}
function getProp<T extends string[]>(value: unknown, ...propPath: T): unknown {
let current = value;
for (const prop of propPath) {
assertHasProp(current, prop);
current = current[prop];
}
return current;
}
function readOutputPath(angularConfiguration: unknown): string {
const outputPath = getProp(
angularConfiguration,
"projects",
"demo",
"architect",
"build",
"options",
"outputPath"
);
if (typeof outputPath !== "string") {
throw new Error(
`Expected outputPath to be of type string but got this type instead: ${typeof outputPath}`
);
}
return outputPath;
}
function copyEntryPoints() | {
const angularOptions: unknown = JSON.parse(
fs.readFileSync(path.join(__dirname, "../../../angular.json"), {
encoding: "utf-8",
})
);
const angularDistDirSetting = readOutputPath(angularOptions);
const angularDistDir = path.join(
__dirname,
"../../..",
angularDistDirSetting
);
const indexHtmlFile = path.join(angularDistDir, "index.html");
examples.forEach((e) => {
fs.copyFileSync(indexHtmlFile, path.join(angularDistDir, `${e.name}.html`));
});
} | identifier_body |
|
observe.py | from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
import weakref
import functools
# Decorator to target specific messages.
def targets(target_messages, no_first=False):
if isinstance(target_messages, str):
target_messages = [target_messages]
def wrapper(f):
@functools.wraps(f)
def _(self, *args, **kwargs):
message = args[0]
if message in target_messages:
if no_first and kwargs["i"] == 0:
|
f(self, *args, **kwargs)
return _
return wrapper
class Observer(object):
__metaclass__ = ABCMeta
@abstractmethod
def update(self, *args, **kwargs):
pass
class Observable(object):
def __init__(self):
self.observers = weakref.WeakSet()
def register(self, observer):
self.observers.add(observer)
def unregister(self, observer):
self.observers.discard(observer)
def unregister_all(self):
self.observers.clear()
def update_observers(self, *args, **kwargs):
for observer in self.observers:
observer.update(*args, **kwargs)
def __getstate__(self):
state = self.__dict__.copy()
# Do not try to pickle observers.
del state["observers"]
return state
| return | conditional_block |
observe.py | from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
import weakref
import functools
# Decorator to target specific messages.
def targets(target_messages, no_first=False):
if isinstance(target_messages, str):
target_messages = [target_messages]
def wrapper(f):
@functools.wraps(f)
def _(self, *args, **kwargs):
message = args[0]
if message in target_messages:
if no_first and kwargs["i"] == 0:
return
f(self, *args, **kwargs)
return _
return wrapper | @abstractmethod
def update(self, *args, **kwargs):
pass
class Observable(object):
def __init__(self):
self.observers = weakref.WeakSet()
def register(self, observer):
self.observers.add(observer)
def unregister(self, observer):
self.observers.discard(observer)
def unregister_all(self):
self.observers.clear()
def update_observers(self, *args, **kwargs):
for observer in self.observers:
observer.update(*args, **kwargs)
def __getstate__(self):
state = self.__dict__.copy()
# Do not try to pickle observers.
del state["observers"]
return state |
class Observer(object):
__metaclass__ = ABCMeta
| random_line_split |
observe.py | from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
import weakref
import functools
# Decorator to target specific messages.
def targets(target_messages, no_first=False):
if isinstance(target_messages, str):
target_messages = [target_messages]
def wrapper(f):
@functools.wraps(f)
def _(self, *args, **kwargs):
message = args[0]
if message in target_messages:
if no_first and kwargs["i"] == 0:
return
f(self, *args, **kwargs)
return _
return wrapper
class Observer(object):
__metaclass__ = ABCMeta
@abstractmethod
def update(self, *args, **kwargs):
pass
class Observable(object):
def __init__(self):
self.observers = weakref.WeakSet()
def register(self, observer):
self.observers.add(observer)
def unregister(self, observer):
self.observers.discard(observer)
def unregister_all(self):
|
def update_observers(self, *args, **kwargs):
for observer in self.observers:
observer.update(*args, **kwargs)
def __getstate__(self):
state = self.__dict__.copy()
# Do not try to pickle observers.
del state["observers"]
return state
| self.observers.clear() | identifier_body |
observe.py | from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
import weakref
import functools
# Decorator to target specific messages.
def targets(target_messages, no_first=False):
if isinstance(target_messages, str):
target_messages = [target_messages]
def wrapper(f):
@functools.wraps(f)
def _(self, *args, **kwargs):
message = args[0]
if message in target_messages:
if no_first and kwargs["i"] == 0:
return
f(self, *args, **kwargs)
return _
return wrapper
class Observer(object):
__metaclass__ = ABCMeta
@abstractmethod
def update(self, *args, **kwargs):
pass
class Observable(object):
def __init__(self):
self.observers = weakref.WeakSet()
def register(self, observer):
self.observers.add(observer)
def unregister(self, observer):
self.observers.discard(observer)
def unregister_all(self):
self.observers.clear()
def update_observers(self, *args, **kwargs):
for observer in self.observers:
observer.update(*args, **kwargs)
def | (self):
state = self.__dict__.copy()
# Do not try to pickle observers.
del state["observers"]
return state
| __getstate__ | identifier_name |
wrapper.rs | //! This module holds the Wrapper newtype; used to write
//! instances of typeclasses that we don't define for types we don't
//! own
use frunk::monoid::*;
use frunk::semigroup::*;
use quickcheck::*;
/// The Wrapper NewType. Used for writing implementations of traits
/// that we don't own for type we don't own.
///
/// Avoids the orphan typeclass instances problem in Haskell.
#[derive(Eq, PartialEq, PartialOrd, Debug, Clone, Hash)]
pub struct Wrapper<A>(A);
impl<A: Arbitrary + Ord + Clone> Arbitrary for Wrapper<Max<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(Max(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary + Ord + Clone> Arbitrary for Wrapper<Min<A>> {
fn | <G: Gen>(g: &mut G) -> Self {
Wrapper(Min(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary> Arbitrary for Wrapper<All<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(All(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary> Arbitrary for Wrapper<Any<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(Any(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary> Arbitrary for Wrapper<Product<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(Product(Arbitrary::arbitrary(g)))
}
}
impl<A: Semigroup> Semigroup for Wrapper<A> {
fn combine(&self, other: &Self) -> Self {
Wrapper(self.0.combine(&other.0))
}
}
impl<A: Monoid> Monoid for Wrapper<A> {
fn empty() -> Self {
Wrapper(<A as Monoid>::empty())
}
}
| arbitrary | identifier_name |
wrapper.rs | //! This module holds the Wrapper newtype; used to write
//! instances of typeclasses that we don't define for types we don't
//! own
use frunk::monoid::*;
use frunk::semigroup::*;
use quickcheck::*;
/// The Wrapper NewType. Used for writing implementations of traits
/// that we don't own for type we don't own.
///
/// Avoids the orphan typeclass instances problem in Haskell.
#[derive(Eq, PartialEq, PartialOrd, Debug, Clone, Hash)]
pub struct Wrapper<A>(A);
impl<A: Arbitrary + Ord + Clone> Arbitrary for Wrapper<Max<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(Max(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary + Ord + Clone> Arbitrary for Wrapper<Min<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(Min(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary> Arbitrary for Wrapper<All<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(All(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary> Arbitrary for Wrapper<Any<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(Any(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary> Arbitrary for Wrapper<Product<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(Product(Arbitrary::arbitrary(g)))
}
}
impl<A: Semigroup> Semigroup for Wrapper<A> {
fn combine(&self, other: &Self) -> Self {
Wrapper(self.0.combine(&other.0))
}
}
impl<A: Monoid> Monoid for Wrapper<A> {
fn empty() -> Self |
}
| {
Wrapper(<A as Monoid>::empty())
} | identifier_body |
wrapper.rs | //! This module holds the Wrapper newtype; used to write
//! instances of typeclasses that we don't define for types we don't
//! own
use frunk::monoid::*;
use frunk::semigroup::*;
use quickcheck::*;
/// The Wrapper NewType. Used for writing implementations of traits
/// that we don't own for type we don't own.
///
/// Avoids the orphan typeclass instances problem in Haskell.
#[derive(Eq, PartialEq, PartialOrd, Debug, Clone, Hash)]
pub struct Wrapper<A>(A);
impl<A: Arbitrary + Ord + Clone> Arbitrary for Wrapper<Max<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(Max(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary + Ord + Clone> Arbitrary for Wrapper<Min<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(Min(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary> Arbitrary for Wrapper<All<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(All(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary> Arbitrary for Wrapper<Any<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(Any(Arbitrary::arbitrary(g)))
}
}
impl<A: Arbitrary> Arbitrary for Wrapper<Product<A>> {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Wrapper(Product(Arbitrary::arbitrary(g)))
}
}
impl<A: Semigroup> Semigroup for Wrapper<A> { |
impl<A: Monoid> Monoid for Wrapper<A> {
fn empty() -> Self {
Wrapper(<A as Monoid>::empty())
}
} | fn combine(&self, other: &Self) -> Self {
Wrapper(self.0.combine(&other.0))
}
} | random_line_split |
manage_languages_page.js | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview 'settings-manage-languages-page' is a sub-page for enabling
* and disabling languages.
*
* @group Chrome Settings Elements
* @element settings-manage-languages-page
*/
Polymer({
is: 'settings-manage-languages-page',
properties: {
/**
* Preferences state.
*/
prefs: {
type: Object,
notify: true,
},
/**
* @type {!LanguagesModel|undefined}
*/
languages: {
type: Object,
notify: true,
},
/**
* @private {!Array<!{code: string, displayName: string,
* nativeDisplayName: string, enabled: boolean}>|
* undefined}
*/
availableLanguages_: Array,
},
/** @private {!LanguageHelper} */
languageHelper_: LanguageHelperImpl.getInstance(),
observers: [
'enabledLanguagesChanged_(languages.enabledLanguages.*)',
],
/**
* Handler for removing a language.
* @param {!{model: !{item: !LanguageInfo}}} e
* @private
*/
onRemoveLanguageTap_: function(e) {
this.languageHelper_.disableLanguage(e.model.item.language.code);
},
/**
* Handler for adding a language.
* @param {!{model: {item: !chrome.languageSettingsPrivate.Language}}} e
* @private
*/
onAddLanguageTap_: function(e) {
this.languageHelper_.enableLanguage(e.model.item.code);
},
/**
* True if a language is not the current or prospective UI language.
* @param {string} languageCode
* @param {string} prospectiveUILanguageCode
* @return {boolean}
* @private
*/
canRemoveLanguage_: function(languageCode, prospectiveUILanguageCode) {
if (languageCode == navigator.language ||
languageCode == prospectiveUILanguageCode) |
assert(this.languages.enabledLanguages.length > 1);
return true;
},
/**
* Updates the available languages that are bound to the iron-list.
* @private
*/
enabledLanguagesChanged_: function() {
if (!this.availableLanguages_) {
var availableLanguages = [];
for (var i = 0; i < this.languages.supportedLanguages.length; i++) {
var language = this.languages.supportedLanguages[i];
availableLanguages.push({
code: language.code,
displayName: language.displayName,
nativeDisplayName: language.nativeDisplayName,
enabled: this.languageHelper_.isLanguageEnabled(language.code),
});
}
// Set the Polymer property after building the full array.
this.availableLanguages_ = availableLanguages;
} else {
// Update the available languages in place.
for (var i = 0; i < this.availableLanguages_.length; i++) {
this.set('availableLanguages_.' + i + '.enabled',
this.languageHelper_.isLanguageEnabled(
this.availableLanguages_[i].code));
}
}
},
});
| {
return false;
} | conditional_block |
manage_languages_page.js | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview 'settings-manage-languages-page' is a sub-page for enabling
* and disabling languages.
*
* @group Chrome Settings Elements
* @element settings-manage-languages-page
*/
Polymer({
is: 'settings-manage-languages-page',
properties: {
/**
* Preferences state.
*/
prefs: {
type: Object,
notify: true,
},
/**
* @type {!LanguagesModel|undefined}
*/
languages: {
type: Object,
notify: true,
},
/**
* @private {!Array<!{code: string, displayName: string,
* nativeDisplayName: string, enabled: boolean}>|
* undefined}
*/
availableLanguages_: Array,
},
/** @private {!LanguageHelper} */
languageHelper_: LanguageHelperImpl.getInstance(),
observers: [
'enabledLanguagesChanged_(languages.enabledLanguages.*)',
],
/**
* Handler for removing a language.
* @param {!{model: !{item: !LanguageInfo}}} e
* @private
*/
onRemoveLanguageTap_: function(e) {
this.languageHelper_.disableLanguage(e.model.item.language.code);
},
/**
* Handler for adding a language.
* @param {!{model: {item: !chrome.languageSettingsPrivate.Language}}} e
* @private
*/
onAddLanguageTap_: function(e) {
this.languageHelper_.enableLanguage(e.model.item.code);
},
/**
* True if a language is not the current or prospective UI language.
* @param {string} languageCode
* @param {string} prospectiveUILanguageCode
* @return {boolean}
* @private
*/
canRemoveLanguage_: function(languageCode, prospectiveUILanguageCode) {
if (languageCode == navigator.language ||
languageCode == prospectiveUILanguageCode) {
return false;
}
assert(this.languages.enabledLanguages.length > 1);
return true;
},
/**
* Updates the available languages that are bound to the iron-list.
* @private
*/
enabledLanguagesChanged_: function() {
if (!this.availableLanguages_) {
var availableLanguages = [];
for (var i = 0; i < this.languages.supportedLanguages.length; i++) {
var language = this.languages.supportedLanguages[i];
availableLanguages.push({
code: language.code,
displayName: language.displayName,
nativeDisplayName: language.nativeDisplayName,
enabled: this.languageHelper_.isLanguageEnabled(language.code),
});
}
// Set the Polymer property after building the full array.
this.availableLanguages_ = availableLanguages;
} else { | for (var i = 0; i < this.availableLanguages_.length; i++) {
this.set('availableLanguages_.' + i + '.enabled',
this.languageHelper_.isLanguageEnabled(
this.availableLanguages_[i].code));
}
}
},
}); | // Update the available languages in place. | random_line_split |
QtVariant.py | import sys
import os
default_variant = 'PySide'
env_api = os.environ.get('QT_API', 'pyqt')
if '--pyside' in sys.argv:
variant = 'PySide'
elif '--pyqt4' in sys.argv:
variant = 'PyQt4'
elif env_api == 'pyside':
variant = 'PySide'
elif env_api == 'pyqt':
variant = 'PyQt4'
else:
variant = default_variant
if variant == 'PySide':
from PySide import QtGui, QtCore
# This will be passed on to new versions of matplotlib
os.environ['QT_API'] = 'pyside'
def QtLoadUI(uifile):
from PySide import QtUiTools
loader = QtUiTools.QUiLoader()
uif = QtCore.QFile(uifile)
uif.open(QtCore.QFile.ReadOnly)
result = loader.load(uif)
uif.close()
return result
elif variant == 'PyQt4':
import sip
api2_classes = [
'QData', 'QDateTime', 'QString', 'QTextStream',
'QTime', 'QUrl', 'QVariant',
]
for cl in api2_classes:
sip.setapi(cl, 2)
from PyQt4 import QtGui, QtCore
QtCore.Signal = QtCore.pyqtSignal
QtCore.QString = str
os.environ['QT_API'] = 'pyqt'
def QtLoadUI(uifile):
from PyQt4 import uic
return uic.loadUi(uifile) | else:
raise ImportError("Python Variant not specified")
__all__ = [QtGui, QtCore, QtLoadUI, variant] | random_line_split |
|
QtVariant.py | import sys
import os
default_variant = 'PySide'
env_api = os.environ.get('QT_API', 'pyqt')
if '--pyside' in sys.argv:
variant = 'PySide'
elif '--pyqt4' in sys.argv:
variant = 'PyQt4'
elif env_api == 'pyside':
variant = 'PySide'
elif env_api == 'pyqt':
variant = 'PyQt4'
else:
variant = default_variant
if variant == 'PySide':
from PySide import QtGui, QtCore
# This will be passed on to new versions of matplotlib
os.environ['QT_API'] = 'pyside'
def QtLoadUI(uifile):
from PySide import QtUiTools
loader = QtUiTools.QUiLoader()
uif = QtCore.QFile(uifile)
uif.open(QtCore.QFile.ReadOnly)
result = loader.load(uif)
uif.close()
return result
elif variant == 'PyQt4':
import sip
api2_classes = [
'QData', 'QDateTime', 'QString', 'QTextStream',
'QTime', 'QUrl', 'QVariant',
]
for cl in api2_classes:
sip.setapi(cl, 2)
from PyQt4 import QtGui, QtCore
QtCore.Signal = QtCore.pyqtSignal
QtCore.QString = str
os.environ['QT_API'] = 'pyqt'
def QtLoadUI(uifile):
|
else:
raise ImportError("Python Variant not specified")
__all__ = [QtGui, QtCore, QtLoadUI, variant] | from PyQt4 import uic
return uic.loadUi(uifile) | identifier_body |
QtVariant.py | import sys
import os
default_variant = 'PySide'
env_api = os.environ.get('QT_API', 'pyqt')
if '--pyside' in sys.argv:
variant = 'PySide'
elif '--pyqt4' in sys.argv:
variant = 'PyQt4'
elif env_api == 'pyside':
variant = 'PySide'
elif env_api == 'pyqt':
variant = 'PyQt4'
else:
variant = default_variant
if variant == 'PySide':
from PySide import QtGui, QtCore
# This will be passed on to new versions of matplotlib
os.environ['QT_API'] = 'pyside'
def | (uifile):
from PySide import QtUiTools
loader = QtUiTools.QUiLoader()
uif = QtCore.QFile(uifile)
uif.open(QtCore.QFile.ReadOnly)
result = loader.load(uif)
uif.close()
return result
elif variant == 'PyQt4':
import sip
api2_classes = [
'QData', 'QDateTime', 'QString', 'QTextStream',
'QTime', 'QUrl', 'QVariant',
]
for cl in api2_classes:
sip.setapi(cl, 2)
from PyQt4 import QtGui, QtCore
QtCore.Signal = QtCore.pyqtSignal
QtCore.QString = str
os.environ['QT_API'] = 'pyqt'
def QtLoadUI(uifile):
from PyQt4 import uic
return uic.loadUi(uifile)
else:
raise ImportError("Python Variant not specified")
__all__ = [QtGui, QtCore, QtLoadUI, variant] | QtLoadUI | identifier_name |
QtVariant.py | import sys
import os
default_variant = 'PySide'
env_api = os.environ.get('QT_API', 'pyqt')
if '--pyside' in sys.argv:
variant = 'PySide'
elif '--pyqt4' in sys.argv:
variant = 'PyQt4'
elif env_api == 'pyside':
variant = 'PySide'
elif env_api == 'pyqt':
variant = 'PyQt4'
else:
|
if variant == 'PySide':
from PySide import QtGui, QtCore
# This will be passed on to new versions of matplotlib
os.environ['QT_API'] = 'pyside'
def QtLoadUI(uifile):
from PySide import QtUiTools
loader = QtUiTools.QUiLoader()
uif = QtCore.QFile(uifile)
uif.open(QtCore.QFile.ReadOnly)
result = loader.load(uif)
uif.close()
return result
elif variant == 'PyQt4':
import sip
api2_classes = [
'QData', 'QDateTime', 'QString', 'QTextStream',
'QTime', 'QUrl', 'QVariant',
]
for cl in api2_classes:
sip.setapi(cl, 2)
from PyQt4 import QtGui, QtCore
QtCore.Signal = QtCore.pyqtSignal
QtCore.QString = str
os.environ['QT_API'] = 'pyqt'
def QtLoadUI(uifile):
from PyQt4 import uic
return uic.loadUi(uifile)
else:
raise ImportError("Python Variant not specified")
__all__ = [QtGui, QtCore, QtLoadUI, variant] | variant = default_variant | conditional_block |
indexes.py | # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import hashlib
import os
from elasticsearch import Elasticsearch, TransportError
from elasticsearch.helpers import bulk_index
from warehouse.utils import AttributeDict
class Index(object):
| hashlib.md5(os.urandom(16)).hexdigest()[:8],
])
# Create this index
self.es.indices.create(index, {
"mappings": {
doc_type._type: doc_type.get_mapping()
for doc_type in self.types.values()
},
})
# Index everything into the new index
for doc_type in self.types.values():
doc_type.index_all(index=index)
# Update the alias unless we've been told not to
if alias:
self.update_alias(self._index, index, keep_old=keep_old)
def update_alias(self, alias, index, keep_old=False):
# Get the old index from ElasticSearch
try:
old_index = self.es.indices.get_alias(self._index).keys()[0]
except TransportError as exc:
if not exc.status_code == 404:
raise
old_index = None
# Remove the alias to the old index if it exists
if old_index is not None:
actions = [{"remove": {"index": old_index, "alias": alias}}]
else:
actions = []
# Add the alias to the new index
actions += [{"add": {"index": index, "alias": alias}}]
# Update To the New Index
self.es.indices.update_aliases({"actions": actions})
# Delete the old index if it exists and unless we're keeping it
if not keep_old and old_index is not None:
self.es.indices.delete(old_index)
class BaseMapping(object):
SEARCH_LIMIT = 25
def __init__(self, index):
self.index = index
def get_mapping(self):
raise NotImplementedError
def get_indexable(self):
raise NotImplementedError
def extract_id(self, item):
raise NotImplementedError
def extract_document(self, item):
raise NotImplementedError
def index_all(self, index=None):
# Determine which index we are indexing into
_index = index if index is not None else self.index._index
# Bulk Index our documents
bulk_index(
self.index.es,
[
{
"_index": _index,
"_type": self._type,
"_id": self.extract_id(item),
"_source": self.extract_document(item),
}
for item in self.get_indexable()
],
)
def search(self, query):
raise NotImplementedError
| _index = "warehouse"
def __init__(self, models, config):
self.models = models
self.config = config
self.es = Elasticsearch(
hosts=self.config.hosts,
**self.config.get("client_options", {})
)
self.types = AttributeDict()
def register(self, type_):
obj = type_(self)
self.types[obj._type] = obj
def reindex(self, index=None, alias=True, keep_old=False):
# Generate an Index Name for Warehouse
index = "".join([
index if index is not None else self._index, | identifier_body |
indexes.py | # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import hashlib
import os
from elasticsearch import Elasticsearch, TransportError
from elasticsearch.helpers import bulk_index
from warehouse.utils import AttributeDict
class Index(object):
_index = "warehouse"
def __init__(self, models, config):
self.models = models
self.config = config
self.es = Elasticsearch(
hosts=self.config.hosts,
**self.config.get("client_options", {})
)
self.types = AttributeDict()
def register(self, type_):
obj = type_(self)
self.types[obj._type] = obj
def reindex(self, index=None, alias=True, keep_old=False):
# Generate an Index Name for Warehouse
index = "".join([
index if index is not None else self._index,
hashlib.md5(os.urandom(16)).hexdigest()[:8],
])
# Create this index
self.es.indices.create(index, {
"mappings": {
doc_type._type: doc_type.get_mapping()
for doc_type in self.types.values()
},
})
# Index everything into the new index
for doc_type in self.types.values():
doc_type.index_all(index=index)
# Update the alias unless we've been told not to
if alias:
|
def update_alias(self, alias, index, keep_old=False):
# Get the old index from ElasticSearch
try:
old_index = self.es.indices.get_alias(self._index).keys()[0]
except TransportError as exc:
if not exc.status_code == 404:
raise
old_index = None
# Remove the alias to the old index if it exists
if old_index is not None:
actions = [{"remove": {"index": old_index, "alias": alias}}]
else:
actions = []
# Add the alias to the new index
actions += [{"add": {"index": index, "alias": alias}}]
# Update To the New Index
self.es.indices.update_aliases({"actions": actions})
# Delete the old index if it exists and unless we're keeping it
if not keep_old and old_index is not None:
self.es.indices.delete(old_index)
class BaseMapping(object):
SEARCH_LIMIT = 25
def __init__(self, index):
self.index = index
def get_mapping(self):
raise NotImplementedError
def get_indexable(self):
raise NotImplementedError
def extract_id(self, item):
raise NotImplementedError
def extract_document(self, item):
raise NotImplementedError
def index_all(self, index=None):
# Determine which index we are indexing into
_index = index if index is not None else self.index._index
# Bulk Index our documents
bulk_index(
self.index.es,
[
{
"_index": _index,
"_type": self._type,
"_id": self.extract_id(item),
"_source": self.extract_document(item),
}
for item in self.get_indexable()
],
)
def search(self, query):
raise NotImplementedError
| self.update_alias(self._index, index, keep_old=keep_old) | conditional_block |
indexes.py | # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import hashlib
import os
from elasticsearch import Elasticsearch, TransportError
from elasticsearch.helpers import bulk_index
from warehouse.utils import AttributeDict
class Index(object):
_index = "warehouse"
def __init__(self, models, config):
self.models = models
self.config = config
self.es = Elasticsearch(
hosts=self.config.hosts,
**self.config.get("client_options", {})
)
self.types = AttributeDict()
def register(self, type_):
obj = type_(self)
self.types[obj._type] = obj
def reindex(self, index=None, alias=True, keep_old=False):
# Generate an Index Name for Warehouse
index = "".join([
index if index is not None else self._index,
hashlib.md5(os.urandom(16)).hexdigest()[:8],
])
# Create this index
self.es.indices.create(index, {
"mappings": {
doc_type._type: doc_type.get_mapping()
for doc_type in self.types.values()
},
})
# Index everything into the new index
for doc_type in self.types.values():
doc_type.index_all(index=index)
# Update the alias unless we've been told not to
if alias:
self.update_alias(self._index, index, keep_old=keep_old)
def update_alias(self, alias, index, keep_old=False):
# Get the old index from ElasticSearch
try:
old_index = self.es.indices.get_alias(self._index).keys()[0]
except TransportError as exc:
if not exc.status_code == 404:
raise
old_index = None
# Remove the alias to the old index if it exists
if old_index is not None:
actions = [{"remove": {"index": old_index, "alias": alias}}]
else:
actions = []
# Add the alias to the new index
actions += [{"add": {"index": index, "alias": alias}}]
# Update To the New Index
self.es.indices.update_aliases({"actions": actions})
# Delete the old index if it exists and unless we're keeping it
if not keep_old and old_index is not None:
self.es.indices.delete(old_index)
class BaseMapping(object):
SEARCH_LIMIT = 25
def __init__(self, index):
self.index = index
def get_mapping(self):
raise NotImplementedError
def get_indexable(self):
raise NotImplementedError
def extract_id(self, item):
raise NotImplementedError
def extract_document(self, item):
raise NotImplementedError
def index_all(self, index=None):
# Determine which index we are indexing into
_index = index if index is not None else self.index._index
# Bulk Index our documents
bulk_index(
self.index.es,
[
{
"_index": _index,
"_type": self._type,
"_id": self.extract_id(item),
"_source": self.extract_document(item),
}
for item in self.get_indexable()
], | )
def search(self, query):
raise NotImplementedError | random_line_split |
|
indexes.py | # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import hashlib
import os
from elasticsearch import Elasticsearch, TransportError
from elasticsearch.helpers import bulk_index
from warehouse.utils import AttributeDict
class Index(object):
_index = "warehouse"
def __init__(self, models, config):
self.models = models
self.config = config
self.es = Elasticsearch(
hosts=self.config.hosts,
**self.config.get("client_options", {})
)
self.types = AttributeDict()
def register(self, type_):
obj = type_(self)
self.types[obj._type] = obj
def reindex(self, index=None, alias=True, keep_old=False):
# Generate an Index Name for Warehouse
index = "".join([
index if index is not None else self._index,
hashlib.md5(os.urandom(16)).hexdigest()[:8],
])
# Create this index
self.es.indices.create(index, {
"mappings": {
doc_type._type: doc_type.get_mapping()
for doc_type in self.types.values()
},
})
# Index everything into the new index
for doc_type in self.types.values():
doc_type.index_all(index=index)
# Update the alias unless we've been told not to
if alias:
self.update_alias(self._index, index, keep_old=keep_old)
def update_alias(self, alias, index, keep_old=False):
# Get the old index from ElasticSearch
try:
old_index = self.es.indices.get_alias(self._index).keys()[0]
except TransportError as exc:
if not exc.status_code == 404:
raise
old_index = None
# Remove the alias to the old index if it exists
if old_index is not None:
actions = [{"remove": {"index": old_index, "alias": alias}}]
else:
actions = []
# Add the alias to the new index
actions += [{"add": {"index": index, "alias": alias}}]
# Update To the New Index
self.es.indices.update_aliases({"actions": actions})
# Delete the old index if it exists and unless we're keeping it
if not keep_old and old_index is not None:
self.es.indices.delete(old_index)
class BaseMapping(object):
SEARCH_LIMIT = 25
def __init__(self, index):
self.index = index
def get_mapping(self):
raise NotImplementedError
def get_indexable(self):
raise NotImplementedError
def extract_id(self, item):
raise NotImplementedError
def extract_document(self, item):
raise NotImplementedError
def index_all(self, index=None):
# Determine which index we are indexing into
_index = index if index is not None else self.index._index
# Bulk Index our documents
bulk_index(
self.index.es,
[
{
"_index": _index,
"_type": self._type,
"_id": self.extract_id(item),
"_source": self.extract_document(item),
}
for item in self.get_indexable()
],
)
def | (self, query):
raise NotImplementedError
| search | identifier_name |
__init__.py | # Copyright (c) 2014, Guillermo López-Anglada. Please see the AUTHORS file for details.
# All rights reserved. Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.)
from logging.handlers import RotatingFileHandler
from os import path
import logging
import logging.config
import logging.handlers
import os
import sys
class LogDir(object):
'''
Locates the log dir for plugin logs.
'''
@staticmethod
def find():
return LogDir()._find_log_dir()
def _test(self, a, b):
if a == b:
return 'folder'
elif a == b + '.sublime-package':
return 'sublime-package'
def _find_path(self, start, package):
while True:
result = self._test(os.path.basename(start), package)
if result == 'folder':
if os.path.exists(path.join(path.dirname(start), 'User')):
return path.join(path.dirname(start), '.logs')
elif result == 'sublime-package':
parent = path.dirname(start)
if path.exists(path.join(path.dirname(parent), 'Packages')):
return path.join(path.dirname(parent), 'Packages', '.logs')
if path.dirname(start) == start:
return
start = path.dirname(start)
def _find_log_dir(self):
package = __name__.split('.')[0]
if package == '__main__':
return
start = path.dirname(__file__)
logs_path = self._find_path(start, package)
if not logs_path:
return
if not path.exists(logs_path):
os.mkdir(logs_path)
return logs_path
class NullPluginLogger(object):
'''
Supresses log records.
'''
def __init__(self, name):
pass
def debug(self, message, *args, **kwargs):
pass
def info(self, message, *args, **kwargs):
pass
def warn(self, message, *args, **kwargs):
pass
def warning(self, message, *args, **kwargs):
pass
def error(self, message, *args, **kwargs):
pass
def critical(self, message, *args, **kwargs):
pass
class PluginLogger(object):
'''
Logs events.
'''
log_dir = LogDir.find()
def __init__(self, name):
self.logger = logging.getLogger(name)
# Only attach handlers to the top-level logger in the hierarchy.
if '.' in name:
return
default_level = logging.ERROR
user_level = self._get_log_level_from_file()
self.logger.setLevel(user_level if user_level is not None else default_level)
f = logging.Formatter('%(asctime)s %(levelname)-5s %(name)s %(message)s')
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.WARNING)
consoleHandler.setFormatter(f)
self.logger.addHandler(consoleHandler)
| file_name = self._file_name()
if file_name:
# FIXME: RotatingFileHandler does not rollover ever.
if os.path.exists(file_name):
try:
os.uznlink(file_name)
except:
pass
fileHandler = RotatingFileHandler(file_name, maxBytes=1<<20)
fileHandler.setFormatter(f)
self.logger.addHandler(fileHandler)
else:
print("cannot find log file path: %s" % file_name)
def warn_aboug_logging_level(self):
if self.logger.level <= logging.DEBUG:
package = __name__.split('.')[0]
self.warning("debug level set to DEBUG; check or delete %s", self._get_path_to_log())
def _get_path_to_log(self):
package = __name__.split('.')[0]
p = os.path.join(self.log_dir, package)
return p
def _get_log_level_from_file(self):
p = self._get_path_to_log()
if os.path.exists(p):
with open(p, 'rt') as f:
text = f.read().strip().upper()
return getattr(logging, text, None)
def _file_name(self):
p = __name__.split('.')[0]
return os.path.join(self.log_dir, '{}.log'.format(p))
def debug(self, message, *args, **kwargs):
self.logger.debug(message, *args, **kwargs)
def info(self, message, *args, **kwargs):
self.logger.info(message, *args, **kwargs)
def warn(self, message, *args, **kwargs):
self.logger.warning(message, *args, **kwargs)
def warning(self, message, *args, **kwargs):
self.logger.warning(message, *args, **kwargs)
def error(self, message, *args, **kwargs):
self.logger.error(message, *args, **kwargs)
def critical(self, message, *args, **kwargs):
self.logger.critical(message, *args, **kwargs) | random_line_split |
|
__init__.py | # Copyright (c) 2014, Guillermo López-Anglada. Please see the AUTHORS file for details.
# All rights reserved. Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.)
from logging.handlers import RotatingFileHandler
from os import path
import logging
import logging.config
import logging.handlers
import os
import sys
class LogDir(object):
'''
Locates the log dir for plugin logs.
'''
@staticmethod
def find():
return LogDir()._find_log_dir()
def _test(self, a, b):
if a == b:
return 'folder'
elif a == b + '.sublime-package':
return 'sublime-package'
def _find_path(self, start, package):
while True:
result = self._test(os.path.basename(start), package)
if result == 'folder':
i |
elif result == 'sublime-package':
parent = path.dirname(start)
if path.exists(path.join(path.dirname(parent), 'Packages')):
return path.join(path.dirname(parent), 'Packages', '.logs')
if path.dirname(start) == start:
return
start = path.dirname(start)
def _find_log_dir(self):
package = __name__.split('.')[0]
if package == '__main__':
return
start = path.dirname(__file__)
logs_path = self._find_path(start, package)
if not logs_path:
return
if not path.exists(logs_path):
os.mkdir(logs_path)
return logs_path
class NullPluginLogger(object):
'''
Supresses log records.
'''
def __init__(self, name):
pass
def debug(self, message, *args, **kwargs):
pass
def info(self, message, *args, **kwargs):
pass
def warn(self, message, *args, **kwargs):
pass
def warning(self, message, *args, **kwargs):
pass
def error(self, message, *args, **kwargs):
pass
def critical(self, message, *args, **kwargs):
pass
class PluginLogger(object):
'''
Logs events.
'''
log_dir = LogDir.find()
def __init__(self, name):
self.logger = logging.getLogger(name)
# Only attach handlers to the top-level logger in the hierarchy.
if '.' in name:
return
default_level = logging.ERROR
user_level = self._get_log_level_from_file()
self.logger.setLevel(user_level if user_level is not None else default_level)
f = logging.Formatter('%(asctime)s %(levelname)-5s %(name)s %(message)s')
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.WARNING)
consoleHandler.setFormatter(f)
self.logger.addHandler(consoleHandler)
file_name = self._file_name()
if file_name:
# FIXME: RotatingFileHandler does not rollover ever.
if os.path.exists(file_name):
try:
os.uznlink(file_name)
except:
pass
fileHandler = RotatingFileHandler(file_name, maxBytes=1<<20)
fileHandler.setFormatter(f)
self.logger.addHandler(fileHandler)
else:
print("cannot find log file path: %s" % file_name)
def warn_aboug_logging_level(self):
if self.logger.level <= logging.DEBUG:
package = __name__.split('.')[0]
self.warning("debug level set to DEBUG; check or delete %s", self._get_path_to_log())
def _get_path_to_log(self):
package = __name__.split('.')[0]
p = os.path.join(self.log_dir, package)
return p
def _get_log_level_from_file(self):
p = self._get_path_to_log()
if os.path.exists(p):
with open(p, 'rt') as f:
text = f.read().strip().upper()
return getattr(logging, text, None)
def _file_name(self):
p = __name__.split('.')[0]
return os.path.join(self.log_dir, '{}.log'.format(p))
def debug(self, message, *args, **kwargs):
self.logger.debug(message, *args, **kwargs)
def info(self, message, *args, **kwargs):
self.logger.info(message, *args, **kwargs)
def warn(self, message, *args, **kwargs):
self.logger.warning(message, *args, **kwargs)
def warning(self, message, *args, **kwargs):
self.logger.warning(message, *args, **kwargs)
def error(self, message, *args, **kwargs):
self.logger.error(message, *args, **kwargs)
def critical(self, message, *args, **kwargs):
self.logger.critical(message, *args, **kwargs)
| f os.path.exists(path.join(path.dirname(start), 'User')):
return path.join(path.dirname(start), '.logs')
| conditional_block |
__init__.py | # Copyright (c) 2014, Guillermo López-Anglada. Please see the AUTHORS file for details.
# All rights reserved. Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.)
from logging.handlers import RotatingFileHandler
from os import path
import logging
import logging.config
import logging.handlers
import os
import sys
class LogDir(object):
'''
Locates the log dir for plugin logs.
'''
@staticmethod
def find():
return LogDir()._find_log_dir()
def _test(self, a, b):
if a == b:
return 'folder'
elif a == b + '.sublime-package':
return 'sublime-package'
def _find_path(self, start, package):
while True:
result = self._test(os.path.basename(start), package)
if result == 'folder':
if os.path.exists(path.join(path.dirname(start), 'User')):
return path.join(path.dirname(start), '.logs')
elif result == 'sublime-package':
parent = path.dirname(start)
if path.exists(path.join(path.dirname(parent), 'Packages')):
return path.join(path.dirname(parent), 'Packages', '.logs')
if path.dirname(start) == start:
return
start = path.dirname(start)
def _find_log_dir(self):
package = __name__.split('.')[0]
if package == '__main__':
return
start = path.dirname(__file__)
logs_path = self._find_path(start, package)
if not logs_path:
return
if not path.exists(logs_path):
os.mkdir(logs_path)
return logs_path
class NullPluginLogger(object):
'''
Supresses log records.
'''
def __init__(self, name):
p |
def debug(self, message, *args, **kwargs):
pass
def info(self, message, *args, **kwargs):
pass
def warn(self, message, *args, **kwargs):
pass
def warning(self, message, *args, **kwargs):
pass
def error(self, message, *args, **kwargs):
pass
def critical(self, message, *args, **kwargs):
pass
class PluginLogger(object):
'''
Logs events.
'''
log_dir = LogDir.find()
def __init__(self, name):
self.logger = logging.getLogger(name)
# Only attach handlers to the top-level logger in the hierarchy.
if '.' in name:
return
default_level = logging.ERROR
user_level = self._get_log_level_from_file()
self.logger.setLevel(user_level if user_level is not None else default_level)
f = logging.Formatter('%(asctime)s %(levelname)-5s %(name)s %(message)s')
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.WARNING)
consoleHandler.setFormatter(f)
self.logger.addHandler(consoleHandler)
file_name = self._file_name()
if file_name:
# FIXME: RotatingFileHandler does not rollover ever.
if os.path.exists(file_name):
try:
os.uznlink(file_name)
except:
pass
fileHandler = RotatingFileHandler(file_name, maxBytes=1<<20)
fileHandler.setFormatter(f)
self.logger.addHandler(fileHandler)
else:
print("cannot find log file path: %s" % file_name)
def warn_aboug_logging_level(self):
if self.logger.level <= logging.DEBUG:
package = __name__.split('.')[0]
self.warning("debug level set to DEBUG; check or delete %s", self._get_path_to_log())
def _get_path_to_log(self):
package = __name__.split('.')[0]
p = os.path.join(self.log_dir, package)
return p
def _get_log_level_from_file(self):
p = self._get_path_to_log()
if os.path.exists(p):
with open(p, 'rt') as f:
text = f.read().strip().upper()
return getattr(logging, text, None)
def _file_name(self):
p = __name__.split('.')[0]
return os.path.join(self.log_dir, '{}.log'.format(p))
def debug(self, message, *args, **kwargs):
self.logger.debug(message, *args, **kwargs)
def info(self, message, *args, **kwargs):
self.logger.info(message, *args, **kwargs)
def warn(self, message, *args, **kwargs):
self.logger.warning(message, *args, **kwargs)
def warning(self, message, *args, **kwargs):
self.logger.warning(message, *args, **kwargs)
def error(self, message, *args, **kwargs):
self.logger.error(message, *args, **kwargs)
def critical(self, message, *args, **kwargs):
self.logger.critical(message, *args, **kwargs)
| ass
| identifier_body |
__init__.py | # Copyright (c) 2014, Guillermo López-Anglada. Please see the AUTHORS file for details.
# All rights reserved. Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.)
from logging.handlers import RotatingFileHandler
from os import path
import logging
import logging.config
import logging.handlers
import os
import sys
class LogDir(object):
'''
Locates the log dir for plugin logs.
'''
@staticmethod
def find():
return LogDir()._find_log_dir()
def _test(self, a, b):
if a == b:
return 'folder'
elif a == b + '.sublime-package':
return 'sublime-package'
def _find_path(self, start, package):
while True:
result = self._test(os.path.basename(start), package)
if result == 'folder':
if os.path.exists(path.join(path.dirname(start), 'User')):
return path.join(path.dirname(start), '.logs')
elif result == 'sublime-package':
parent = path.dirname(start)
if path.exists(path.join(path.dirname(parent), 'Packages')):
return path.join(path.dirname(parent), 'Packages', '.logs')
if path.dirname(start) == start:
return
start = path.dirname(start)
def _find_log_dir(self):
package = __name__.split('.')[0]
if package == '__main__':
return
start = path.dirname(__file__)
logs_path = self._find_path(start, package)
if not logs_path:
return
if not path.exists(logs_path):
os.mkdir(logs_path)
return logs_path
class NullPluginLogger(object):
'''
Supresses log records.
'''
def __init__(self, name):
pass
def debug(self, message, *args, **kwargs):
pass
def info(self, message, *args, **kwargs):
pass
def warn(self, message, *args, **kwargs):
pass
def warning(self, message, *args, **kwargs):
pass
def error(self, message, *args, **kwargs):
pass
def critical(self, message, *args, **kwargs):
pass
class PluginLogger(object):
'''
Logs events.
'''
log_dir = LogDir.find()
def __init__(self, name):
self.logger = logging.getLogger(name)
# Only attach handlers to the top-level logger in the hierarchy.
if '.' in name:
return
default_level = logging.ERROR
user_level = self._get_log_level_from_file()
self.logger.setLevel(user_level if user_level is not None else default_level)
f = logging.Formatter('%(asctime)s %(levelname)-5s %(name)s %(message)s')
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.WARNING)
consoleHandler.setFormatter(f)
self.logger.addHandler(consoleHandler)
file_name = self._file_name()
if file_name:
# FIXME: RotatingFileHandler does not rollover ever.
if os.path.exists(file_name):
try:
os.uznlink(file_name)
except:
pass
fileHandler = RotatingFileHandler(file_name, maxBytes=1<<20)
fileHandler.setFormatter(f)
self.logger.addHandler(fileHandler)
else:
print("cannot find log file path: %s" % file_name)
def warn_aboug_logging_level(self):
if self.logger.level <= logging.DEBUG:
package = __name__.split('.')[0]
self.warning("debug level set to DEBUG; check or delete %s", self._get_path_to_log())
def _get_path_to_log(self):
package = __name__.split('.')[0]
p = os.path.join(self.log_dir, package)
return p
def _get_log_level_from_file(self):
p = self._get_path_to_log()
if os.path.exists(p):
with open(p, 'rt') as f:
text = f.read().strip().upper()
return getattr(logging, text, None)
def _file_name(self):
p = __name__.split('.')[0]
return os.path.join(self.log_dir, '{}.log'.format(p))
def debug(self, message, *args, **kwargs):
self.logger.debug(message, *args, **kwargs)
def i | self, message, *args, **kwargs):
self.logger.info(message, *args, **kwargs)
def warn(self, message, *args, **kwargs):
self.logger.warning(message, *args, **kwargs)
def warning(self, message, *args, **kwargs):
self.logger.warning(message, *args, **kwargs)
def error(self, message, *args, **kwargs):
self.logger.error(message, *args, **kwargs)
def critical(self, message, *args, **kwargs):
self.logger.critical(message, *args, **kwargs)
| nfo( | identifier_name |
text-list-control.component.ts | import { Component, OnInit, forwardRef } from '@angular/core';
import { ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms';
@Component({
selector: 'app-text-list-control',
templateUrl: './text-list-control.component.html',
styleUrls: ['./text-list-control.component.scss'],
providers: [{
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => TextListControlComponent),
multi: true
}]
})
export class TextListControlComponent implements OnInit, ControlValueAccessor {
private actuallTextList: string[];
set textlist(list: string[]) {
if (this.textList.toString() !== list.toString()) |
}
get textList(): string[] {
return this.actuallTextList;
}
onChangeCallback: Function;
constructor() { }
ngOnInit() {
}
writeValue(val: string[]) {
this.actuallTextList = val;
}
registerOnChange(fn: Function) {
this.onChangeCallback = fn;
}
registerOnTouched() {}
}
| {
this.actuallTextList = list;
} | conditional_block |
text-list-control.component.ts | import { Component, OnInit, forwardRef } from '@angular/core';
import { ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms';
@Component({
selector: 'app-text-list-control',
templateUrl: './text-list-control.component.html',
styleUrls: ['./text-list-control.component.scss'],
providers: [{
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => TextListControlComponent),
multi: true
}]
})
export class TextListControlComponent implements OnInit, ControlValueAccessor {
private actuallTextList: string[];
set textlist(list: string[]) {
if (this.textList.toString() !== list.toString()) {
this.actuallTextList = list;
}
}
get textList(): string[] {
return this.actuallTextList;
}
onChangeCallback: Function;
constructor() { }
ngOnInit() {
}
writeValue(val: string[]) |
registerOnChange(fn: Function) {
this.onChangeCallback = fn;
}
registerOnTouched() {}
}
| {
this.actuallTextList = val;
} | identifier_body |
text-list-control.component.ts | import { Component, OnInit, forwardRef } from '@angular/core';
import { ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms';
@Component({
selector: 'app-text-list-control',
templateUrl: './text-list-control.component.html',
styleUrls: ['./text-list-control.component.scss'],
providers: [{
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => TextListControlComponent),
multi: true
}]
})
export class TextListControlComponent implements OnInit, ControlValueAccessor {
private actuallTextList: string[];
set textlist(list: string[]) {
if (this.textList.toString() !== list.toString()) {
this.actuallTextList = list;
}
}
get textList(): string[] { |
constructor() { }
ngOnInit() {
}
writeValue(val: string[]) {
this.actuallTextList = val;
}
registerOnChange(fn: Function) {
this.onChangeCallback = fn;
}
registerOnTouched() {}
} | return this.actuallTextList;
}
onChangeCallback: Function; | random_line_split |
text-list-control.component.ts | import { Component, OnInit, forwardRef } from '@angular/core';
import { ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms';
@Component({
selector: 'app-text-list-control',
templateUrl: './text-list-control.component.html',
styleUrls: ['./text-list-control.component.scss'],
providers: [{
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => TextListControlComponent),
multi: true
}]
})
export class TextListControlComponent implements OnInit, ControlValueAccessor {
private actuallTextList: string[];
set textlist(list: string[]) {
if (this.textList.toString() !== list.toString()) {
this.actuallTextList = list;
}
}
get textList(): string[] {
return this.actuallTextList;
}
onChangeCallback: Function;
constructor() { }
ngOnInit() {
}
writeValue(val: string[]) {
this.actuallTextList = val;
}
| (fn: Function) {
this.onChangeCallback = fn;
}
registerOnTouched() {}
}
| registerOnChange | identifier_name |
zwave.py | """Mock helpers for Z-Wave component."""
from pydispatch import dispatcher
from tests.async_mock import MagicMock
def value_changed(value):
"""Fire a value changed."""
dispatcher.send(
MockNetwork.SIGNAL_VALUE_CHANGED,
value=value,
node=value.node,
network=value.node._network,
)
def | (node):
"""Fire a node changed."""
dispatcher.send(MockNetwork.SIGNAL_NODE, node=node, network=node._network)
def notification(node_id, network=None):
"""Fire a notification."""
dispatcher.send(
MockNetwork.SIGNAL_NOTIFICATION, args={"nodeId": node_id}, network=network
)
class MockOption(MagicMock):
"""Mock Z-Wave options."""
def __init__(self, device=None, config_path=None, user_path=None, cmd_line=None):
"""Initialize a Z-Wave mock options."""
super().__init__()
self.device = device
self.config_path = config_path
self.user_path = user_path
self.cmd_line = cmd_line
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
class MockNetwork(MagicMock):
"""Mock Z-Wave network."""
SIGNAL_NETWORK_FAILED = "mock_NetworkFailed"
SIGNAL_NETWORK_STARTED = "mock_NetworkStarted"
SIGNAL_NETWORK_READY = "mock_NetworkReady"
SIGNAL_NETWORK_STOPPED = "mock_NetworkStopped"
SIGNAL_NETWORK_RESETTED = "mock_DriverResetted"
SIGNAL_NETWORK_AWAKED = "mock_DriverAwaked"
SIGNAL_DRIVER_FAILED = "mock_DriverFailed"
SIGNAL_DRIVER_READY = "mock_DriverReady"
SIGNAL_DRIVER_RESET = "mock_DriverReset"
SIGNAL_DRIVER_REMOVED = "mock_DriverRemoved"
SIGNAL_GROUP = "mock_Group"
SIGNAL_NODE = "mock_Node"
SIGNAL_NODE_ADDED = "mock_NodeAdded"
SIGNAL_NODE_EVENT = "mock_NodeEvent"
SIGNAL_NODE_NAMING = "mock_NodeNaming"
SIGNAL_NODE_NEW = "mock_NodeNew"
SIGNAL_NODE_PROTOCOL_INFO = "mock_NodeProtocolInfo"
SIGNAL_NODE_READY = "mock_NodeReady"
SIGNAL_NODE_REMOVED = "mock_NodeRemoved"
SIGNAL_SCENE_EVENT = "mock_SceneEvent"
SIGNAL_VALUE = "mock_Value"
SIGNAL_VALUE_ADDED = "mock_ValueAdded"
SIGNAL_VALUE_CHANGED = "mock_ValueChanged"
SIGNAL_VALUE_REFRESHED = "mock_ValueRefreshed"
SIGNAL_VALUE_REMOVED = "mock_ValueRemoved"
SIGNAL_POLLING_ENABLED = "mock_PollingEnabled"
SIGNAL_POLLING_DISABLED = "mock_PollingDisabled"
SIGNAL_CREATE_BUTTON = "mock_CreateButton"
SIGNAL_DELETE_BUTTON = "mock_DeleteButton"
SIGNAL_BUTTON_ON = "mock_ButtonOn"
SIGNAL_BUTTON_OFF = "mock_ButtonOff"
SIGNAL_ESSENTIAL_NODE_QUERIES_COMPLETE = "mock_EssentialNodeQueriesComplete"
SIGNAL_NODE_QUERIES_COMPLETE = "mock_NodeQueriesComplete"
SIGNAL_AWAKE_NODES_QUERIED = "mock_AwakeNodesQueried"
SIGNAL_ALL_NODES_QUERIED = "mock_AllNodesQueried"
SIGNAL_ALL_NODES_QUERIED_SOME_DEAD = "mock_AllNodesQueriedSomeDead"
SIGNAL_MSG_COMPLETE = "mock_MsgComplete"
SIGNAL_NOTIFICATION = "mock_Notification"
SIGNAL_CONTROLLER_COMMAND = "mock_ControllerCommand"
SIGNAL_CONTROLLER_WAITING = "mock_ControllerWaiting"
STATE_STOPPED = 0
STATE_FAILED = 1
STATE_RESETTED = 3
STATE_STARTED = 5
STATE_AWAKED = 7
STATE_READY = 10
def __init__(self, options=None, *args, **kwargs):
"""Initialize a Z-Wave mock network."""
super().__init__()
self.options = options
self.state = MockNetwork.STATE_STOPPED
class MockNode(MagicMock):
"""Mock Z-Wave node."""
def __init__(
self,
*,
node_id=567,
name="Mock Node",
manufacturer_id="ABCD",
product_id="123",
product_type="678",
command_classes=None,
can_wake_up_value=True,
manufacturer_name="Test Manufacturer",
product_name="Test Product",
network=None,
**kwargs,
):
"""Initialize a Z-Wave mock node."""
super().__init__()
self.node_id = node_id
self.name = name
self.manufacturer_id = manufacturer_id
self.product_id = product_id
self.product_type = product_type
self.manufacturer_name = manufacturer_name
self.product_name = product_name
self.can_wake_up_value = can_wake_up_value
self._command_classes = command_classes or []
if network is not None:
self._network = network
for attr_name in kwargs:
setattr(self, attr_name, kwargs[attr_name])
def has_command_class(self, command_class):
"""Test if mock has a command class."""
return command_class in self._command_classes
def get_battery_level(self):
"""Return mock battery level."""
return 42
def can_wake_up(self):
"""Return whether the node can wake up."""
return self.can_wake_up_value
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
class MockValue(MagicMock):
"""Mock Z-Wave value."""
_mock_value_id = 1234
def __init__(
self,
*,
label="Mock Value",
node=None,
instance=0,
index=0,
value_id=None,
**kwargs,
):
"""Initialize a Z-Wave mock value."""
super().__init__()
self.label = label
self.node = node
self.instance = instance
self.index = index
if value_id is None:
MockValue._mock_value_id += 1
value_id = MockValue._mock_value_id
self.value_id = value_id
self.object_id = value_id
for attr_name in kwargs:
setattr(self, attr_name, kwargs[attr_name])
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
def refresh(self):
"""Mock refresh of node value."""
value_changed(self)
class MockEntityValues:
"""Mock Z-Wave entity values."""
def __init__(self, **kwargs):
"""Initialize the mock zwave values."""
self.primary = None
self.wakeup = None
self.battery = None
self.power = None
for name in kwargs:
setattr(self, name, kwargs[name])
def __iter__(self):
"""Allow iteration over all values."""
return iter(self.__dict__.values())
| node_changed | identifier_name |
zwave.py | """Mock helpers for Z-Wave component."""
from pydispatch import dispatcher
from tests.async_mock import MagicMock
def value_changed(value):
"""Fire a value changed."""
dispatcher.send(
MockNetwork.SIGNAL_VALUE_CHANGED,
value=value,
node=value.node,
network=value.node._network,
)
def node_changed(node):
"""Fire a node changed."""
dispatcher.send(MockNetwork.SIGNAL_NODE, node=node, network=node._network)
def notification(node_id, network=None):
"""Fire a notification."""
dispatcher.send(
MockNetwork.SIGNAL_NOTIFICATION, args={"nodeId": node_id}, network=network
)
class MockOption(MagicMock):
"""Mock Z-Wave options."""
def __init__(self, device=None, config_path=None, user_path=None, cmd_line=None):
"""Initialize a Z-Wave mock options."""
super().__init__()
self.device = device
self.config_path = config_path
self.user_path = user_path
self.cmd_line = cmd_line
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
class MockNetwork(MagicMock):
"""Mock Z-Wave network."""
SIGNAL_NETWORK_FAILED = "mock_NetworkFailed"
SIGNAL_NETWORK_STARTED = "mock_NetworkStarted"
SIGNAL_NETWORK_READY = "mock_NetworkReady"
SIGNAL_NETWORK_STOPPED = "mock_NetworkStopped"
SIGNAL_NETWORK_RESETTED = "mock_DriverResetted"
SIGNAL_NETWORK_AWAKED = "mock_DriverAwaked"
SIGNAL_DRIVER_FAILED = "mock_DriverFailed"
SIGNAL_DRIVER_READY = "mock_DriverReady"
SIGNAL_DRIVER_RESET = "mock_DriverReset"
SIGNAL_DRIVER_REMOVED = "mock_DriverRemoved"
SIGNAL_GROUP = "mock_Group"
SIGNAL_NODE = "mock_Node"
SIGNAL_NODE_ADDED = "mock_NodeAdded"
SIGNAL_NODE_EVENT = "mock_NodeEvent"
SIGNAL_NODE_NAMING = "mock_NodeNaming"
SIGNAL_NODE_NEW = "mock_NodeNew"
SIGNAL_NODE_PROTOCOL_INFO = "mock_NodeProtocolInfo"
SIGNAL_NODE_READY = "mock_NodeReady"
SIGNAL_NODE_REMOVED = "mock_NodeRemoved"
SIGNAL_SCENE_EVENT = "mock_SceneEvent"
SIGNAL_VALUE = "mock_Value"
SIGNAL_VALUE_ADDED = "mock_ValueAdded"
SIGNAL_VALUE_CHANGED = "mock_ValueChanged"
SIGNAL_VALUE_REFRESHED = "mock_ValueRefreshed"
SIGNAL_VALUE_REMOVED = "mock_ValueRemoved"
SIGNAL_POLLING_ENABLED = "mock_PollingEnabled"
SIGNAL_POLLING_DISABLED = "mock_PollingDisabled"
SIGNAL_CREATE_BUTTON = "mock_CreateButton"
SIGNAL_DELETE_BUTTON = "mock_DeleteButton"
SIGNAL_BUTTON_ON = "mock_ButtonOn"
SIGNAL_BUTTON_OFF = "mock_ButtonOff"
SIGNAL_ESSENTIAL_NODE_QUERIES_COMPLETE = "mock_EssentialNodeQueriesComplete"
SIGNAL_NODE_QUERIES_COMPLETE = "mock_NodeQueriesComplete"
SIGNAL_AWAKE_NODES_QUERIED = "mock_AwakeNodesQueried"
SIGNAL_ALL_NODES_QUERIED = "mock_AllNodesQueried"
SIGNAL_ALL_NODES_QUERIED_SOME_DEAD = "mock_AllNodesQueriedSomeDead"
SIGNAL_MSG_COMPLETE = "mock_MsgComplete"
SIGNAL_NOTIFICATION = "mock_Notification"
SIGNAL_CONTROLLER_COMMAND = "mock_ControllerCommand"
SIGNAL_CONTROLLER_WAITING = "mock_ControllerWaiting"
STATE_STOPPED = 0
STATE_FAILED = 1
STATE_RESETTED = 3
STATE_STARTED = 5
STATE_AWAKED = 7 |
def __init__(self, options=None, *args, **kwargs):
"""Initialize a Z-Wave mock network."""
super().__init__()
self.options = options
self.state = MockNetwork.STATE_STOPPED
class MockNode(MagicMock):
"""Mock Z-Wave node."""
def __init__(
self,
*,
node_id=567,
name="Mock Node",
manufacturer_id="ABCD",
product_id="123",
product_type="678",
command_classes=None,
can_wake_up_value=True,
manufacturer_name="Test Manufacturer",
product_name="Test Product",
network=None,
**kwargs,
):
"""Initialize a Z-Wave mock node."""
super().__init__()
self.node_id = node_id
self.name = name
self.manufacturer_id = manufacturer_id
self.product_id = product_id
self.product_type = product_type
self.manufacturer_name = manufacturer_name
self.product_name = product_name
self.can_wake_up_value = can_wake_up_value
self._command_classes = command_classes or []
if network is not None:
self._network = network
for attr_name in kwargs:
setattr(self, attr_name, kwargs[attr_name])
def has_command_class(self, command_class):
"""Test if mock has a command class."""
return command_class in self._command_classes
def get_battery_level(self):
"""Return mock battery level."""
return 42
def can_wake_up(self):
"""Return whether the node can wake up."""
return self.can_wake_up_value
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
class MockValue(MagicMock):
"""Mock Z-Wave value."""
_mock_value_id = 1234
def __init__(
self,
*,
label="Mock Value",
node=None,
instance=0,
index=0,
value_id=None,
**kwargs,
):
"""Initialize a Z-Wave mock value."""
super().__init__()
self.label = label
self.node = node
self.instance = instance
self.index = index
if value_id is None:
MockValue._mock_value_id += 1
value_id = MockValue._mock_value_id
self.value_id = value_id
self.object_id = value_id
for attr_name in kwargs:
setattr(self, attr_name, kwargs[attr_name])
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
def refresh(self):
"""Mock refresh of node value."""
value_changed(self)
class MockEntityValues:
"""Mock Z-Wave entity values."""
def __init__(self, **kwargs):
"""Initialize the mock zwave values."""
self.primary = None
self.wakeup = None
self.battery = None
self.power = None
for name in kwargs:
setattr(self, name, kwargs[name])
def __iter__(self):
"""Allow iteration over all values."""
return iter(self.__dict__.values()) | STATE_READY = 10 | random_line_split |
zwave.py | """Mock helpers for Z-Wave component."""
from pydispatch import dispatcher
from tests.async_mock import MagicMock
def value_changed(value):
"""Fire a value changed."""
dispatcher.send(
MockNetwork.SIGNAL_VALUE_CHANGED,
value=value,
node=value.node,
network=value.node._network,
)
def node_changed(node):
"""Fire a node changed."""
dispatcher.send(MockNetwork.SIGNAL_NODE, node=node, network=node._network)
def notification(node_id, network=None):
"""Fire a notification."""
dispatcher.send(
MockNetwork.SIGNAL_NOTIFICATION, args={"nodeId": node_id}, network=network
)
class MockOption(MagicMock):
"""Mock Z-Wave options."""
def __init__(self, device=None, config_path=None, user_path=None, cmd_line=None):
"""Initialize a Z-Wave mock options."""
super().__init__()
self.device = device
self.config_path = config_path
self.user_path = user_path
self.cmd_line = cmd_line
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
class MockNetwork(MagicMock):
"""Mock Z-Wave network."""
SIGNAL_NETWORK_FAILED = "mock_NetworkFailed"
SIGNAL_NETWORK_STARTED = "mock_NetworkStarted"
SIGNAL_NETWORK_READY = "mock_NetworkReady"
SIGNAL_NETWORK_STOPPED = "mock_NetworkStopped"
SIGNAL_NETWORK_RESETTED = "mock_DriverResetted"
SIGNAL_NETWORK_AWAKED = "mock_DriverAwaked"
SIGNAL_DRIVER_FAILED = "mock_DriverFailed"
SIGNAL_DRIVER_READY = "mock_DriverReady"
SIGNAL_DRIVER_RESET = "mock_DriverReset"
SIGNAL_DRIVER_REMOVED = "mock_DriverRemoved"
SIGNAL_GROUP = "mock_Group"
SIGNAL_NODE = "mock_Node"
SIGNAL_NODE_ADDED = "mock_NodeAdded"
SIGNAL_NODE_EVENT = "mock_NodeEvent"
SIGNAL_NODE_NAMING = "mock_NodeNaming"
SIGNAL_NODE_NEW = "mock_NodeNew"
SIGNAL_NODE_PROTOCOL_INFO = "mock_NodeProtocolInfo"
SIGNAL_NODE_READY = "mock_NodeReady"
SIGNAL_NODE_REMOVED = "mock_NodeRemoved"
SIGNAL_SCENE_EVENT = "mock_SceneEvent"
SIGNAL_VALUE = "mock_Value"
SIGNAL_VALUE_ADDED = "mock_ValueAdded"
SIGNAL_VALUE_CHANGED = "mock_ValueChanged"
SIGNAL_VALUE_REFRESHED = "mock_ValueRefreshed"
SIGNAL_VALUE_REMOVED = "mock_ValueRemoved"
SIGNAL_POLLING_ENABLED = "mock_PollingEnabled"
SIGNAL_POLLING_DISABLED = "mock_PollingDisabled"
SIGNAL_CREATE_BUTTON = "mock_CreateButton"
SIGNAL_DELETE_BUTTON = "mock_DeleteButton"
SIGNAL_BUTTON_ON = "mock_ButtonOn"
SIGNAL_BUTTON_OFF = "mock_ButtonOff"
SIGNAL_ESSENTIAL_NODE_QUERIES_COMPLETE = "mock_EssentialNodeQueriesComplete"
SIGNAL_NODE_QUERIES_COMPLETE = "mock_NodeQueriesComplete"
SIGNAL_AWAKE_NODES_QUERIED = "mock_AwakeNodesQueried"
SIGNAL_ALL_NODES_QUERIED = "mock_AllNodesQueried"
SIGNAL_ALL_NODES_QUERIED_SOME_DEAD = "mock_AllNodesQueriedSomeDead"
SIGNAL_MSG_COMPLETE = "mock_MsgComplete"
SIGNAL_NOTIFICATION = "mock_Notification"
SIGNAL_CONTROLLER_COMMAND = "mock_ControllerCommand"
SIGNAL_CONTROLLER_WAITING = "mock_ControllerWaiting"
STATE_STOPPED = 0
STATE_FAILED = 1
STATE_RESETTED = 3
STATE_STARTED = 5
STATE_AWAKED = 7
STATE_READY = 10
def __init__(self, options=None, *args, **kwargs):
"""Initialize a Z-Wave mock network."""
super().__init__()
self.options = options
self.state = MockNetwork.STATE_STOPPED
class MockNode(MagicMock):
| self.name = name
self.manufacturer_id = manufacturer_id
self.product_id = product_id
self.product_type = product_type
self.manufacturer_name = manufacturer_name
self.product_name = product_name
self.can_wake_up_value = can_wake_up_value
self._command_classes = command_classes or []
if network is not None:
self._network = network
for attr_name in kwargs:
setattr(self, attr_name, kwargs[attr_name])
def has_command_class(self, command_class):
"""Test if mock has a command class."""
return command_class in self._command_classes
def get_battery_level(self):
"""Return mock battery level."""
return 42
def can_wake_up(self):
"""Return whether the node can wake up."""
return self.can_wake_up_value
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
class MockValue(MagicMock):
"""Mock Z-Wave value."""
_mock_value_id = 1234
def __init__(
self,
*,
label="Mock Value",
node=None,
instance=0,
index=0,
value_id=None,
**kwargs,
):
"""Initialize a Z-Wave mock value."""
super().__init__()
self.label = label
self.node = node
self.instance = instance
self.index = index
if value_id is None:
MockValue._mock_value_id += 1
value_id = MockValue._mock_value_id
self.value_id = value_id
self.object_id = value_id
for attr_name in kwargs:
setattr(self, attr_name, kwargs[attr_name])
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
def refresh(self):
"""Mock refresh of node value."""
value_changed(self)
class MockEntityValues:
"""Mock Z-Wave entity values."""
def __init__(self, **kwargs):
"""Initialize the mock zwave values."""
self.primary = None
self.wakeup = None
self.battery = None
self.power = None
for name in kwargs:
setattr(self, name, kwargs[name])
def __iter__(self):
"""Allow iteration over all values."""
return iter(self.__dict__.values())
| """Mock Z-Wave node."""
def __init__(
self,
*,
node_id=567,
name="Mock Node",
manufacturer_id="ABCD",
product_id="123",
product_type="678",
command_classes=None,
can_wake_up_value=True,
manufacturer_name="Test Manufacturer",
product_name="Test Product",
network=None,
**kwargs,
):
"""Initialize a Z-Wave mock node."""
super().__init__()
self.node_id = node_id | identifier_body |
zwave.py | """Mock helpers for Z-Wave component."""
from pydispatch import dispatcher
from tests.async_mock import MagicMock
def value_changed(value):
"""Fire a value changed."""
dispatcher.send(
MockNetwork.SIGNAL_VALUE_CHANGED,
value=value,
node=value.node,
network=value.node._network,
)
def node_changed(node):
"""Fire a node changed."""
dispatcher.send(MockNetwork.SIGNAL_NODE, node=node, network=node._network)
def notification(node_id, network=None):
"""Fire a notification."""
dispatcher.send(
MockNetwork.SIGNAL_NOTIFICATION, args={"nodeId": node_id}, network=network
)
class MockOption(MagicMock):
"""Mock Z-Wave options."""
def __init__(self, device=None, config_path=None, user_path=None, cmd_line=None):
"""Initialize a Z-Wave mock options."""
super().__init__()
self.device = device
self.config_path = config_path
self.user_path = user_path
self.cmd_line = cmd_line
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
class MockNetwork(MagicMock):
"""Mock Z-Wave network."""
SIGNAL_NETWORK_FAILED = "mock_NetworkFailed"
SIGNAL_NETWORK_STARTED = "mock_NetworkStarted"
SIGNAL_NETWORK_READY = "mock_NetworkReady"
SIGNAL_NETWORK_STOPPED = "mock_NetworkStopped"
SIGNAL_NETWORK_RESETTED = "mock_DriverResetted"
SIGNAL_NETWORK_AWAKED = "mock_DriverAwaked"
SIGNAL_DRIVER_FAILED = "mock_DriverFailed"
SIGNAL_DRIVER_READY = "mock_DriverReady"
SIGNAL_DRIVER_RESET = "mock_DriverReset"
SIGNAL_DRIVER_REMOVED = "mock_DriverRemoved"
SIGNAL_GROUP = "mock_Group"
SIGNAL_NODE = "mock_Node"
SIGNAL_NODE_ADDED = "mock_NodeAdded"
SIGNAL_NODE_EVENT = "mock_NodeEvent"
SIGNAL_NODE_NAMING = "mock_NodeNaming"
SIGNAL_NODE_NEW = "mock_NodeNew"
SIGNAL_NODE_PROTOCOL_INFO = "mock_NodeProtocolInfo"
SIGNAL_NODE_READY = "mock_NodeReady"
SIGNAL_NODE_REMOVED = "mock_NodeRemoved"
SIGNAL_SCENE_EVENT = "mock_SceneEvent"
SIGNAL_VALUE = "mock_Value"
SIGNAL_VALUE_ADDED = "mock_ValueAdded"
SIGNAL_VALUE_CHANGED = "mock_ValueChanged"
SIGNAL_VALUE_REFRESHED = "mock_ValueRefreshed"
SIGNAL_VALUE_REMOVED = "mock_ValueRemoved"
SIGNAL_POLLING_ENABLED = "mock_PollingEnabled"
SIGNAL_POLLING_DISABLED = "mock_PollingDisabled"
SIGNAL_CREATE_BUTTON = "mock_CreateButton"
SIGNAL_DELETE_BUTTON = "mock_DeleteButton"
SIGNAL_BUTTON_ON = "mock_ButtonOn"
SIGNAL_BUTTON_OFF = "mock_ButtonOff"
SIGNAL_ESSENTIAL_NODE_QUERIES_COMPLETE = "mock_EssentialNodeQueriesComplete"
SIGNAL_NODE_QUERIES_COMPLETE = "mock_NodeQueriesComplete"
SIGNAL_AWAKE_NODES_QUERIED = "mock_AwakeNodesQueried"
SIGNAL_ALL_NODES_QUERIED = "mock_AllNodesQueried"
SIGNAL_ALL_NODES_QUERIED_SOME_DEAD = "mock_AllNodesQueriedSomeDead"
SIGNAL_MSG_COMPLETE = "mock_MsgComplete"
SIGNAL_NOTIFICATION = "mock_Notification"
SIGNAL_CONTROLLER_COMMAND = "mock_ControllerCommand"
SIGNAL_CONTROLLER_WAITING = "mock_ControllerWaiting"
STATE_STOPPED = 0
STATE_FAILED = 1
STATE_RESETTED = 3
STATE_STARTED = 5
STATE_AWAKED = 7
STATE_READY = 10
def __init__(self, options=None, *args, **kwargs):
"""Initialize a Z-Wave mock network."""
super().__init__()
self.options = options
self.state = MockNetwork.STATE_STOPPED
class MockNode(MagicMock):
"""Mock Z-Wave node."""
def __init__(
self,
*,
node_id=567,
name="Mock Node",
manufacturer_id="ABCD",
product_id="123",
product_type="678",
command_classes=None,
can_wake_up_value=True,
manufacturer_name="Test Manufacturer",
product_name="Test Product",
network=None,
**kwargs,
):
"""Initialize a Z-Wave mock node."""
super().__init__()
self.node_id = node_id
self.name = name
self.manufacturer_id = manufacturer_id
self.product_id = product_id
self.product_type = product_type
self.manufacturer_name = manufacturer_name
self.product_name = product_name
self.can_wake_up_value = can_wake_up_value
self._command_classes = command_classes or []
if network is not None:
self._network = network
for attr_name in kwargs:
|
def has_command_class(self, command_class):
"""Test if mock has a command class."""
return command_class in self._command_classes
def get_battery_level(self):
"""Return mock battery level."""
return 42
def can_wake_up(self):
"""Return whether the node can wake up."""
return self.can_wake_up_value
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
class MockValue(MagicMock):
"""Mock Z-Wave value."""
_mock_value_id = 1234
def __init__(
self,
*,
label="Mock Value",
node=None,
instance=0,
index=0,
value_id=None,
**kwargs,
):
"""Initialize a Z-Wave mock value."""
super().__init__()
self.label = label
self.node = node
self.instance = instance
self.index = index
if value_id is None:
MockValue._mock_value_id += 1
value_id = MockValue._mock_value_id
self.value_id = value_id
self.object_id = value_id
for attr_name in kwargs:
setattr(self, attr_name, kwargs[attr_name])
def _get_child_mock(self, **kw):
"""Create child mocks with right MagicMock class."""
return MagicMock(**kw)
def refresh(self):
"""Mock refresh of node value."""
value_changed(self)
class MockEntityValues:
"""Mock Z-Wave entity values."""
def __init__(self, **kwargs):
"""Initialize the mock zwave values."""
self.primary = None
self.wakeup = None
self.battery = None
self.power = None
for name in kwargs:
setattr(self, name, kwargs[name])
def __iter__(self):
"""Allow iteration over all values."""
return iter(self.__dict__.values())
| setattr(self, attr_name, kwargs[attr_name]) | conditional_block |
client.py | import json
from autobahn.twisted.websocket import WebSocketClientProtocol, WebSocketClientFactory
class SocketClientProtocol(WebSocketClientProtocol):
def emit(self, event_name, **kwargs):
payload = self._format_outbound_data(event_name, **kwargs)
self.sendMessage(payload)
def _format_outbound_data(self, event, **kwargs):
""" Format outbound message as JSON """
message = {'event': event}
for key in kwargs.keys():
message[key] = kwargs.get(key)
return json.dumps(message).encode('utf8')
def onMessage(self, payload, isBinary):
self.factory.handle_message(self, payload)
class | (WebSocketClientFactory):
protocol = SocketClientProtocol
def __init__(self, *args, **kwargs):
WebSocketClientFactory.__init__(self, *args, **kwargs)
self.callbacks = {}
self.register_callbacks()
def register_callbacks(self):
pass
def on(self, event_name, callback):
self.callbacks[event_name] = callback
def fire_callback(self, client, event_name, **kwargs):
if event_name in self.callbacks:
self.callbacks[event_name](client, **kwargs)
def handle_message(self, client, message):
payload = self.parse_message(message)
if payload:
event = payload.pop('event')
self.fire_callback(client, event, **payload)
def parse_message(self, message):
payload = json.loads(message)
if 'event' in payload:
output = payload
return output
| BaseSocketClientFactory | identifier_name |
client.py | import json
from autobahn.twisted.websocket import WebSocketClientProtocol, WebSocketClientFactory
class SocketClientProtocol(WebSocketClientProtocol):
def emit(self, event_name, **kwargs):
payload = self._format_outbound_data(event_name, **kwargs)
self.sendMessage(payload)
def _format_outbound_data(self, event, **kwargs):
""" Format outbound message as JSON """
message = {'event': event}
for key in kwargs.keys():
message[key] = kwargs.get(key)
return json.dumps(message).encode('utf8')
def onMessage(self, payload, isBinary):
self.factory.handle_message(self, payload)
class BaseSocketClientFactory(WebSocketClientFactory):
protocol = SocketClientProtocol
def __init__(self, *args, **kwargs):
WebSocketClientFactory.__init__(self, *args, **kwargs)
self.callbacks = {}
self.register_callbacks()
def register_callbacks(self):
pass
def on(self, event_name, callback):
self.callbacks[event_name] = callback
def fire_callback(self, client, event_name, **kwargs):
if event_name in self.callbacks:
self.callbacks[event_name](client, **kwargs)
def handle_message(self, client, message):
payload = self.parse_message(message)
if payload:
|
def parse_message(self, message):
payload = json.loads(message)
if 'event' in payload:
output = payload
return output
| event = payload.pop('event')
self.fire_callback(client, event, **payload) | conditional_block |
client.py | import json
from autobahn.twisted.websocket import WebSocketClientProtocol, WebSocketClientFactory
class SocketClientProtocol(WebSocketClientProtocol):
def emit(self, event_name, **kwargs):
payload = self._format_outbound_data(event_name, **kwargs)
self.sendMessage(payload)
def _format_outbound_data(self, event, **kwargs):
""" Format outbound message as JSON """
message = {'event': event}
for key in kwargs.keys():
message[key] = kwargs.get(key)
return json.dumps(message).encode('utf8')
def onMessage(self, payload, isBinary):
self.factory.handle_message(self, payload)
class BaseSocketClientFactory(WebSocketClientFactory):
|
if payload:
event = payload.pop('event')
self.fire_callback(client, event, **payload)
def parse_message(self, message):
payload = json.loads(message)
if 'event' in payload:
output = payload
return output
| protocol = SocketClientProtocol
def __init__(self, *args, **kwargs):
WebSocketClientFactory.__init__(self, *args, **kwargs)
self.callbacks = {}
self.register_callbacks()
def register_callbacks(self):
pass
def on(self, event_name, callback):
self.callbacks[event_name] = callback
def fire_callback(self, client, event_name, **kwargs):
if event_name in self.callbacks:
self.callbacks[event_name](client, **kwargs)
def handle_message(self, client, message):
payload = self.parse_message(message) | identifier_body |
client.py | import json
from autobahn.twisted.websocket import WebSocketClientProtocol, WebSocketClientFactory |
class SocketClientProtocol(WebSocketClientProtocol):
def emit(self, event_name, **kwargs):
payload = self._format_outbound_data(event_name, **kwargs)
self.sendMessage(payload)
def _format_outbound_data(self, event, **kwargs):
""" Format outbound message as JSON """
message = {'event': event}
for key in kwargs.keys():
message[key] = kwargs.get(key)
return json.dumps(message).encode('utf8')
def onMessage(self, payload, isBinary):
self.factory.handle_message(self, payload)
class BaseSocketClientFactory(WebSocketClientFactory):
protocol = SocketClientProtocol
def __init__(self, *args, **kwargs):
WebSocketClientFactory.__init__(self, *args, **kwargs)
self.callbacks = {}
self.register_callbacks()
def register_callbacks(self):
pass
def on(self, event_name, callback):
self.callbacks[event_name] = callback
def fire_callback(self, client, event_name, **kwargs):
if event_name in self.callbacks:
self.callbacks[event_name](client, **kwargs)
def handle_message(self, client, message):
payload = self.parse_message(message)
if payload:
event = payload.pop('event')
self.fire_callback(client, event, **payload)
def parse_message(self, message):
payload = json.loads(message)
if 'event' in payload:
output = payload
return output | random_line_split |
|
oop_utils.rs | use super::Universe;
use super::oop::*;
use ast::sexpr::SExpr;
use std::fmt::{self, Formatter, Display};
// Format impl
unsafe fn fmt_oop(oop: Oop, u: &Universe, fmt: &mut Formatter) -> fmt::Result {
if oop == NULL_OOP {
write!(fmt, "<null>")?;
} else if Singleton::is_singleton(oop) {
write!(fmt, "{:?}", Singleton::from_oop(oop).unwrap())?;
} else if u.oop_is_fixnum(oop) {
let i = Fixnum::from_raw(oop);
write!(fmt, "{}", i.value())?;
} else if u.oop_is_pair(oop) {
let mut p = Pair::from_raw(oop);
write!(fmt, "({}", FmtOop(p.car, u))?;
while u.oop_is_pair(p.cdr) {
p = Pair::from_raw(p.cdr);
write!(fmt, " {}", FmtOop(p.car, u))?;
}
if Singleton::is_nil(p.cdr) {
write!(fmt, ")")?;
} else {
write!(fmt, " . {})", FmtOop(p.cdr, u))?;
}
} else if u.oop_is_symbol(oop) {
let s = Symbol::from_raw(oop);
write!(fmt, "{}", s.as_str())?;
} else if u.oop_is_closure(oop) | else if u.oop_is_closure(oop) {
let mb = MutBox::from_raw(oop);
write!(fmt, "<Box {} @{:#x}>", FmtOop(mb.value(), u), oop)?;
} else if u.oop_is_ooparray(oop) {
let arr = OopArray::from_raw(oop);
write!(fmt, "[")?;
for (i, oop) in arr.content().iter().enumerate() {
if i != 0 {
write!(fmt, ", ")?;
}
fmt_oop(*oop, u, fmt)?;
}
write!(fmt, "]")?;
} else if u.oop_is_i64array(oop) {
let arr = OopArray::from_raw(oop);
write!(fmt, "i64[")?;
for (i, val) in arr.content().iter().enumerate() {
if i != 0 {
write!(fmt, ", ")?;
}
write!(fmt, "{}", val)?;
}
write!(fmt, "]")?;
} else {
write!(fmt, "<UnknownOop {:#x}>", oop)?;
}
Ok(())
}
pub struct FmtOop<'a>(pub Oop, pub &'a Universe);
impl<'a> Display for FmtOop<'a> {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
unsafe { fmt_oop(self.0, self.1, fmt) }
}
}
pub fn oop_to_sexpr(_oop: Handle<Closure>, _u: &Universe) -> SExpr {
panic!("oop_to_sexpr: not implemenetd")
}
| {
let clo = Closure::from_raw(oop);
write!(fmt, "<Closure {} @{:#x}>", clo.info().name(), oop)?;
} | conditional_block |
oop_utils.rs | use super::Universe;
use super::oop::*;
use ast::sexpr::SExpr;
use std::fmt::{self, Formatter, Display};
// Format impl
unsafe fn fmt_oop(oop: Oop, u: &Universe, fmt: &mut Formatter) -> fmt::Result {
if oop == NULL_OOP {
write!(fmt, "<null>")?;
} else if Singleton::is_singleton(oop) {
write!(fmt, "{:?}", Singleton::from_oop(oop).unwrap())?;
} else if u.oop_is_fixnum(oop) {
let i = Fixnum::from_raw(oop);
write!(fmt, "{}", i.value())?;
} else if u.oop_is_pair(oop) {
let mut p = Pair::from_raw(oop);
write!(fmt, "({}", FmtOop(p.car, u))?;
while u.oop_is_pair(p.cdr) {
p = Pair::from_raw(p.cdr);
write!(fmt, " {}", FmtOop(p.car, u))?;
}
if Singleton::is_nil(p.cdr) {
write!(fmt, ")")?;
} else {
write!(fmt, " . {})", FmtOop(p.cdr, u))?;
}
} else if u.oop_is_symbol(oop) {
let s = Symbol::from_raw(oop);
write!(fmt, "{}", s.as_str())?;
} else if u.oop_is_closure(oop) {
let clo = Closure::from_raw(oop);
write!(fmt, "<Closure {} @{:#x}>", clo.info().name(), oop)?;
} else if u.oop_is_closure(oop) {
let mb = MutBox::from_raw(oop);
write!(fmt, "<Box {} @{:#x}>", FmtOop(mb.value(), u), oop)?;
} else if u.oop_is_ooparray(oop) {
let arr = OopArray::from_raw(oop);
write!(fmt, "[")?;
for (i, oop) in arr.content().iter().enumerate() {
if i != 0 {
write!(fmt, ", ")?;
}
fmt_oop(*oop, u, fmt)?;
}
write!(fmt, "]")?;
} else if u.oop_is_i64array(oop) {
let arr = OopArray::from_raw(oop);
write!(fmt, "i64[")?;
for (i, val) in arr.content().iter().enumerate() {
if i != 0 {
write!(fmt, ", ")?;
}
write!(fmt, "{}", val)?;
}
write!(fmt, "]")?;
} else {
write!(fmt, "<UnknownOop {:#x}>", oop)?;
}
Ok(())
}
pub struct FmtOop<'a>(pub Oop, pub &'a Universe);
impl<'a> Display for FmtOop<'a> {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
unsafe { fmt_oop(self.0, self.1, fmt) }
}
}
pub fn | (_oop: Handle<Closure>, _u: &Universe) -> SExpr {
panic!("oop_to_sexpr: not implemenetd")
}
| oop_to_sexpr | identifier_name |
oop_utils.rs | use super::Universe;
use super::oop::*;
use ast::sexpr::SExpr;
use std::fmt::{self, Formatter, Display};
// Format impl
unsafe fn fmt_oop(oop: Oop, u: &Universe, fmt: &mut Formatter) -> fmt::Result {
if oop == NULL_OOP {
write!(fmt, "<null>")?;
} else if Singleton::is_singleton(oop) {
write!(fmt, "{:?}", Singleton::from_oop(oop).unwrap())?;
} else if u.oop_is_fixnum(oop) {
let i = Fixnum::from_raw(oop);
write!(fmt, "{}", i.value())?;
} else if u.oop_is_pair(oop) {
let mut p = Pair::from_raw(oop);
write!(fmt, "({}", FmtOop(p.car, u))?;
while u.oop_is_pair(p.cdr) {
p = Pair::from_raw(p.cdr);
write!(fmt, " {}", FmtOop(p.car, u))?;
}
if Singleton::is_nil(p.cdr) {
write!(fmt, ")")?;
} else {
write!(fmt, " . {})", FmtOop(p.cdr, u))?;
}
} else if u.oop_is_symbol(oop) {
let s = Symbol::from_raw(oop);
write!(fmt, "{}", s.as_str())?;
} else if u.oop_is_closure(oop) {
let clo = Closure::from_raw(oop);
write!(fmt, "<Closure {} @{:#x}>", clo.info().name(), oop)?;
} else if u.oop_is_closure(oop) {
let mb = MutBox::from_raw(oop);
write!(fmt, "<Box {} @{:#x}>", FmtOop(mb.value(), u), oop)?;
} else if u.oop_is_ooparray(oop) {
let arr = OopArray::from_raw(oop);
write!(fmt, "[")?;
for (i, oop) in arr.content().iter().enumerate() {
if i != 0 {
write!(fmt, ", ")?;
}
fmt_oop(*oop, u, fmt)?;
}
write!(fmt, "]")?;
} else if u.oop_is_i64array(oop) {
let arr = OopArray::from_raw(oop);
write!(fmt, "i64[")?;
for (i, val) in arr.content().iter().enumerate() {
if i != 0 {
write!(fmt, ", ")?;
}
write!(fmt, "{}", val)?;
}
write!(fmt, "]")?;
} else {
write!(fmt, "<UnknownOop {:#x}>", oop)?;
}
Ok(())
}
pub struct FmtOop<'a>(pub Oop, pub &'a Universe);
impl<'a> Display for FmtOop<'a> {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
unsafe { fmt_oop(self.0, self.1, fmt) }
} | panic!("oop_to_sexpr: not implemenetd")
} | }
pub fn oop_to_sexpr(_oop: Handle<Closure>, _u: &Universe) -> SExpr { | random_line_split |
i2c_lcd.py | #!/usr/bin/env python
import time
import smbus
BUS = smbus.SMBus(1)
def write_word(addr, data):
global BLEN
temp = data
if BLEN == 1:
temp |= 0x08
else:
temp &= 0xF7
BUS.write_byte(addr ,temp)
def send_command(comm):
# Send bit7-4 firstly
buf = comm & 0xF0
buf |= 0x04 # RS = 0, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
# Send bit3-0 secondly
buf = (comm & 0x0F) << 4
buf |= 0x04 # RS = 0, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
def send_data(data):
# Send bit7-4 firstly
|
def init(addr, bl):
# global BUS
# BUS = smbus.SMBus(1)
global LCD_ADDR
global BLEN
LCD_ADDR = addr
BLEN = bl
try:
send_command(0x33) # Must initialize to 8-line mode at first
time.sleep(0.005)
send_command(0x32) # Then initialize to 4-line mode
time.sleep(0.005)
send_command(0x28) # 2 Lines & 5*7 dots
time.sleep(0.005)
send_command(0x0C) # Enable display without cursor
time.sleep(0.005)
send_command(0x01) # Clear Screen
BUS.write_byte(LCD_ADDR, 0x08)
except:
return False
else:
return True
def clear():
send_command(0x01) # Clear Screen
def openlight(): # Enable the backlight
BUS.write_byte(0x27,0x08)
BUS.close()
def write(x, y, str):
if x < 0:
x = 0
if x > 15:
x = 15
if y <0:
y = 0
if y > 1:
y = 1
# Move cursor
addr = 0x80 + 0x40 * y + x
send_command(addr)
for chr in str:
send_data(ord(chr))
if __name__ == '__main__':
init(0x27, 1)
write(4, 0, 'Hello')
write(7, 1, 'world!')
| buf = data & 0xF0
buf |= 0x05 # RS = 1, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
# Send bit3-0 secondly
buf = (data & 0x0F) << 4
buf |= 0x05 # RS = 1, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf) | identifier_body |
i2c_lcd.py | #!/usr/bin/env python
import time
import smbus
BUS = smbus.SMBus(1)
def write_word(addr, data):
global BLEN
temp = data
if BLEN == 1:
temp |= 0x08
else:
temp &= 0xF7
BUS.write_byte(addr ,temp)
def send_command(comm):
# Send bit7-4 firstly
buf = comm & 0xF0
buf |= 0x04 # RS = 0, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
# Send bit3-0 secondly
buf = (comm & 0x0F) << 4
buf |= 0x04 # RS = 0, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
def | (data):
# Send bit7-4 firstly
buf = data & 0xF0
buf |= 0x05 # RS = 1, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
# Send bit3-0 secondly
buf = (data & 0x0F) << 4
buf |= 0x05 # RS = 1, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
def init(addr, bl):
# global BUS
# BUS = smbus.SMBus(1)
global LCD_ADDR
global BLEN
LCD_ADDR = addr
BLEN = bl
try:
send_command(0x33) # Must initialize to 8-line mode at first
time.sleep(0.005)
send_command(0x32) # Then initialize to 4-line mode
time.sleep(0.005)
send_command(0x28) # 2 Lines & 5*7 dots
time.sleep(0.005)
send_command(0x0C) # Enable display without cursor
time.sleep(0.005)
send_command(0x01) # Clear Screen
BUS.write_byte(LCD_ADDR, 0x08)
except:
return False
else:
return True
def clear():
send_command(0x01) # Clear Screen
def openlight(): # Enable the backlight
BUS.write_byte(0x27,0x08)
BUS.close()
def write(x, y, str):
if x < 0:
x = 0
if x > 15:
x = 15
if y <0:
y = 0
if y > 1:
y = 1
# Move cursor
addr = 0x80 + 0x40 * y + x
send_command(addr)
for chr in str:
send_data(ord(chr))
if __name__ == '__main__':
init(0x27, 1)
write(4, 0, 'Hello')
write(7, 1, 'world!')
| send_data | identifier_name |
i2c_lcd.py | #!/usr/bin/env python
import time
import smbus
BUS = smbus.SMBus(1)
def write_word(addr, data):
global BLEN
temp = data
if BLEN == 1:
temp |= 0x08
else:
temp &= 0xF7
BUS.write_byte(addr ,temp)
def send_command(comm):
# Send bit7-4 firstly
buf = comm & 0xF0
buf |= 0x04 # RS = 0, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0 | write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
def send_data(data):
# Send bit7-4 firstly
buf = data & 0xF0
buf |= 0x05 # RS = 1, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
# Send bit3-0 secondly
buf = (data & 0x0F) << 4
buf |= 0x05 # RS = 1, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
def init(addr, bl):
# global BUS
# BUS = smbus.SMBus(1)
global LCD_ADDR
global BLEN
LCD_ADDR = addr
BLEN = bl
try:
send_command(0x33) # Must initialize to 8-line mode at first
time.sleep(0.005)
send_command(0x32) # Then initialize to 4-line mode
time.sleep(0.005)
send_command(0x28) # 2 Lines & 5*7 dots
time.sleep(0.005)
send_command(0x0C) # Enable display without cursor
time.sleep(0.005)
send_command(0x01) # Clear Screen
BUS.write_byte(LCD_ADDR, 0x08)
except:
return False
else:
return True
def clear():
send_command(0x01) # Clear Screen
def openlight(): # Enable the backlight
BUS.write_byte(0x27,0x08)
BUS.close()
def write(x, y, str):
if x < 0:
x = 0
if x > 15:
x = 15
if y <0:
y = 0
if y > 1:
y = 1
# Move cursor
addr = 0x80 + 0x40 * y + x
send_command(addr)
for chr in str:
send_data(ord(chr))
if __name__ == '__main__':
init(0x27, 1)
write(4, 0, 'Hello')
write(7, 1, 'world!') | write_word(LCD_ADDR ,buf)
# Send bit3-0 secondly
buf = (comm & 0x0F) << 4
buf |= 0x04 # RS = 0, RW = 0, EN = 1 | random_line_split |
i2c_lcd.py | #!/usr/bin/env python
import time
import smbus
BUS = smbus.SMBus(1)
def write_word(addr, data):
global BLEN
temp = data
if BLEN == 1:
temp |= 0x08
else:
temp &= 0xF7
BUS.write_byte(addr ,temp)
def send_command(comm):
# Send bit7-4 firstly
buf = comm & 0xF0
buf |= 0x04 # RS = 0, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
# Send bit3-0 secondly
buf = (comm & 0x0F) << 4
buf |= 0x04 # RS = 0, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
def send_data(data):
# Send bit7-4 firstly
buf = data & 0xF0
buf |= 0x05 # RS = 1, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
# Send bit3-0 secondly
buf = (data & 0x0F) << 4
buf |= 0x05 # RS = 1, RW = 0, EN = 1
write_word(LCD_ADDR ,buf)
time.sleep(0.002)
buf &= 0xFB # Make EN = 0
write_word(LCD_ADDR ,buf)
def init(addr, bl):
# global BUS
# BUS = smbus.SMBus(1)
global LCD_ADDR
global BLEN
LCD_ADDR = addr
BLEN = bl
try:
send_command(0x33) # Must initialize to 8-line mode at first
time.sleep(0.005)
send_command(0x32) # Then initialize to 4-line mode
time.sleep(0.005)
send_command(0x28) # 2 Lines & 5*7 dots
time.sleep(0.005)
send_command(0x0C) # Enable display without cursor
time.sleep(0.005)
send_command(0x01) # Clear Screen
BUS.write_byte(LCD_ADDR, 0x08)
except:
return False
else:
|
def clear():
send_command(0x01) # Clear Screen
def openlight(): # Enable the backlight
BUS.write_byte(0x27,0x08)
BUS.close()
def write(x, y, str):
if x < 0:
x = 0
if x > 15:
x = 15
if y <0:
y = 0
if y > 1:
y = 1
# Move cursor
addr = 0x80 + 0x40 * y + x
send_command(addr)
for chr in str:
send_data(ord(chr))
if __name__ == '__main__':
init(0x27, 1)
write(4, 0, 'Hello')
write(7, 1, 'world!')
| return True | conditional_block |
makepot.js | /*
* grunt-wp-i18n
* https://github.com/cedaro/grunt-wp-i18n
*
* Copyright (c) 2014 Cedaro, LLC
* Licensed under the MIT license.
*/
'use strict';
module.exports = function( grunt ) {
var _ = require( 'underscore' ),
gettext = require( 'gettext-parser' ),
path = require( 'path' ),
pkg = require( '../package.json' ),
util = require( './lib/util' ).init( grunt ),
localConfig = util.getLocalConfig(),
wp = require( './lib/wordpress' ).init( grunt),
msgMerge = require( './lib/msgmerge' ).init( grunt ),
async = require( 'async' );
// Mix no-conflict string functions into the Underscore namespace.
_.str = require( 'underscore.string' );
_.mixin( _.str.exports() );
/**
* Generate a POT file for translating strings.
*
* php-cli should be in the system path to run this task.
*
* @link http://develop.svn.wordpress.org/trunk/tools/i18n/
*/
grunt.registerMultiTask( 'makepot', 'Generate a POT file for translating strings.', function() {
var done = this.async(),
defaultI18nToolsPath = path.resolve( __dirname, '../vendor/wp-i18n-tools/' ),
gruntBase = process.cwd(),
cmdArgs, o, originalPot;
o = this.options({
cwd: process.cwd(),
domainPath: '',
exclude: [],
include: [],
i18nToolsPath: defaultI18nToolsPath,
mainFile: '',
potComments: '',
potFilename: '',
potHeaders: {},
processPot: null,
type: 'wp-plugin',
updateTimestamp: true,
updatePoFiles: false
});
// Set the current working directory.
o.cwd = path.resolve( process.cwd(), o.cwd );
grunt.file.setBase( o.cwd );
// Attempt to discover the main project file.
if ( '' === o.mainFile ) {
o.mainFile = wp.getMainFile( o.type );
}
// Use Domain Path header if the domain path hasn't been set.
if ( '' === o.domainPath ) |
// Use the Text Domain header or project folder name
// for the pot file if it hasn't been set.
if ( '' === o.potFilename ) {
o.potFilename = wp.getHeader( 'Text Domain', o.mainFile ) + '.pot' || wp.slugify() + '.pot';
}
o.domainPath = _.ltrim( o.domainPath, [ '/', '\\' ] );
o.i18nToolsPath = localConfig.i18nToolsPath || o.i18nToolsPath;
o.potFile = path.join( o.cwd, o.domainPath, o.potFilename );
// Make sure the makepot.php script exists.
o.makepotScript = path.join( o.i18nToolsPath, 'makepot.php' );
if ( ! grunt.file.exists( o.makepotScript ) ) {
grunt.fatal( 'makepot.php could not be found in ' + o.i18nToolsPath );
}
// Create the domain path directory if it doesn't exist.
grunt.file.mkdir( path.resolve( o.cwd, o.domainPath ) );
// Reset the working directory.
grunt.file.setBase( gruntBase );
// Exclude the node_modules directory by default.
o.exclude.push( 'node_modules/.*' );
// Build the list of CLI args.
cmdArgs = [
o.makepotScript,
o.type,
o.cwd,
o.potFile
];
if ( defaultI18nToolsPath === o.i18nToolsPath ) {
// Use the custom CLI script that extends makepot.php.
o.makepotScript = path.join( o.i18nToolsPath, 'grunt-makepot.php' );
// Only add custom CLI args if using the bundled tools.
cmdArgs[0] = o.makepotScript;
cmdArgs.push( o.mainFile.split( '.' ).shift() );
cmdArgs.push( o.exclude.join( ',' ) );
cmdArgs.push( o.include.join( ',' ) );
}
// Parse the existing POT file to compare for changes.
if ( ! o.updateTimestamp && grunt.file.exists( o.potFile ) ) {
originalPot = gettext.po.parse( grunt.file.read( o.potFile ) );
}
grunt.util.spawn({
cmd: 'php',
args: cmdArgs,
opts: { stdio: 'inherit' }
}, function( error, result, code ) {
var matches, pattern, pot, poFiles;
if ( 0 === code && grunt.file.exists( o.potFile ) ) {
pot = grunt.file.read( o.potFile );
// Update the comments header.
pattern = /# <!=([\s\S]+?)=!>/;
if ( '' === o.potComments && ( matches = pot.match( pattern ) ) ) {
o.potComments = matches[1];
}
o.potComments = '# ' + o.potComments.replace( /\n(# )?/g, '\n# ' ).replace( '{year}', new Date().getFullYear() );
pot = pot.replace( pattern, o.potComments );
// Remove duplicate entries from the POT file.
pot = gettext.po.parse( pot );
// Merge custom headers.
pot.headers['x-generator'] = 'grunt-wp-i18n ' + pkg.version;
pot = util.mergeHeaders( pot, o.potHeaders );
// Allow the POT file to be modified with a callback.
if ( _.isFunction( o.processPot ) ) {
pot = o.processPot.call( undefined, pot, o );
}
// Determine if the creation date is the only thing that changed.
if ( ! o.updateTimestamp && ! _.isUndefined( originalPot ) ) {
pot = util.comparePotFiles( originalPot, pot ) ? originalPot : pot;
}
// Fix headers.
pot = gettext.po.compile( pot ).toString();
pot = util.fixHeaders( pot );
// Save the POT file.
grunt.file.write( o.potFile, pot );
grunt.log.ok( 'POT file saved to ' + path.relative( process.cwd(), o.potFile ) );
// Maybe update .po files
if ( o.updatePoFiles ) {
poFiles = msgMerge.searchPoFiles( o.potFile, o.type );
async.eachSeries( poFiles, function( poFile, done ) {
msgMerge.msgMerge( o.potFile, poFile, done );
}, done );
} else {
done( error, result );
}
} else {
done( error, result );
}
});
});
};
| {
o.domainPath = wp.getHeader( 'Domain Path', o.mainFile );
} | conditional_block |
makepot.js | /*
* grunt-wp-i18n
* https://github.com/cedaro/grunt-wp-i18n
*
* Copyright (c) 2014 Cedaro, LLC
* Licensed under the MIT license.
*/
'use strict';
module.exports = function( grunt ) {
var _ = require( 'underscore' ),
gettext = require( 'gettext-parser' ),
path = require( 'path' ),
pkg = require( '../package.json' ),
util = require( './lib/util' ).init( grunt ),
localConfig = util.getLocalConfig(),
wp = require( './lib/wordpress' ).init( grunt),
msgMerge = require( './lib/msgmerge' ).init( grunt ),
async = require( 'async' );
// Mix no-conflict string functions into the Underscore namespace.
_.str = require( 'underscore.string' );
_.mixin( _.str.exports() );
/**
* Generate a POT file for translating strings.
*
* php-cli should be in the system path to run this task.
*
* @link http://develop.svn.wordpress.org/trunk/tools/i18n/
*/
grunt.registerMultiTask( 'makepot', 'Generate a POT file for translating strings.', function() {
var done = this.async(),
defaultI18nToolsPath = path.resolve( __dirname, '../vendor/wp-i18n-tools/' ),
gruntBase = process.cwd(),
cmdArgs, o, originalPot;
o = this.options({
cwd: process.cwd(),
domainPath: '',
exclude: [],
include: [],
i18nToolsPath: defaultI18nToolsPath,
mainFile: '',
potComments: '',
potFilename: '',
potHeaders: {},
processPot: null,
type: 'wp-plugin',
updateTimestamp: true,
updatePoFiles: false
});
// Set the current working directory.
o.cwd = path.resolve( process.cwd(), o.cwd );
grunt.file.setBase( o.cwd );
// Attempt to discover the main project file.
if ( '' === o.mainFile ) {
o.mainFile = wp.getMainFile( o.type );
}
// Use Domain Path header if the domain path hasn't been set.
if ( '' === o.domainPath ) {
o.domainPath = wp.getHeader( 'Domain Path', o.mainFile );
}
// Use the Text Domain header or project folder name
// for the pot file if it hasn't been set.
if ( '' === o.potFilename ) {
o.potFilename = wp.getHeader( 'Text Domain', o.mainFile ) + '.pot' || wp.slugify() + '.pot';
}
o.domainPath = _.ltrim( o.domainPath, [ '/', '\\' ] );
o.i18nToolsPath = localConfig.i18nToolsPath || o.i18nToolsPath;
o.potFile = path.join( o.cwd, o.domainPath, o.potFilename );
// Make sure the makepot.php script exists.
o.makepotScript = path.join( o.i18nToolsPath, 'makepot.php' );
if ( ! grunt.file.exists( o.makepotScript ) ) {
grunt.fatal( 'makepot.php could not be found in ' + o.i18nToolsPath );
}
// Create the domain path directory if it doesn't exist.
grunt.file.mkdir( path.resolve( o.cwd, o.domainPath ) );
// Reset the working directory.
grunt.file.setBase( gruntBase );
// Exclude the node_modules directory by default.
o.exclude.push( 'node_modules/.*' );
// Build the list of CLI args.
cmdArgs = [
o.makepotScript,
o.type,
o.cwd,
o.potFile
];
if ( defaultI18nToolsPath === o.i18nToolsPath ) {
// Use the custom CLI script that extends makepot.php.
o.makepotScript = path.join( o.i18nToolsPath, 'grunt-makepot.php' );
// Only add custom CLI args if using the bundled tools.
cmdArgs[0] = o.makepotScript;
cmdArgs.push( o.mainFile.split( '.' ).shift() );
cmdArgs.push( o.exclude.join( ',' ) );
cmdArgs.push( o.include.join( ',' ) );
}
// Parse the existing POT file to compare for changes.
if ( ! o.updateTimestamp && grunt.file.exists( o.potFile ) ) {
originalPot = gettext.po.parse( grunt.file.read( o.potFile ) );
}
grunt.util.spawn({
cmd: 'php',
args: cmdArgs,
opts: { stdio: 'inherit' }
}, function( error, result, code ) {
var matches, pattern, pot, poFiles;
if ( 0 === code && grunt.file.exists( o.potFile ) ) {
pot = grunt.file.read( o.potFile );
| pattern = /# <!=([\s\S]+?)=!>/;
if ( '' === o.potComments && ( matches = pot.match( pattern ) ) ) {
o.potComments = matches[1];
}
o.potComments = '# ' + o.potComments.replace( /\n(# )?/g, '\n# ' ).replace( '{year}', new Date().getFullYear() );
pot = pot.replace( pattern, o.potComments );
// Remove duplicate entries from the POT file.
pot = gettext.po.parse( pot );
// Merge custom headers.
pot.headers['x-generator'] = 'grunt-wp-i18n ' + pkg.version;
pot = util.mergeHeaders( pot, o.potHeaders );
// Allow the POT file to be modified with a callback.
if ( _.isFunction( o.processPot ) ) {
pot = o.processPot.call( undefined, pot, o );
}
// Determine if the creation date is the only thing that changed.
if ( ! o.updateTimestamp && ! _.isUndefined( originalPot ) ) {
pot = util.comparePotFiles( originalPot, pot ) ? originalPot : pot;
}
// Fix headers.
pot = gettext.po.compile( pot ).toString();
pot = util.fixHeaders( pot );
// Save the POT file.
grunt.file.write( o.potFile, pot );
grunt.log.ok( 'POT file saved to ' + path.relative( process.cwd(), o.potFile ) );
// Maybe update .po files
if ( o.updatePoFiles ) {
poFiles = msgMerge.searchPoFiles( o.potFile, o.type );
async.eachSeries( poFiles, function( poFile, done ) {
msgMerge.msgMerge( o.potFile, poFile, done );
}, done );
} else {
done( error, result );
}
} else {
done( error, result );
}
});
});
}; |
// Update the comments header.
| random_line_split |
resource.js | /* global phantom, exports, require, console */
(function () {
'use strict';
exports.resolveUrl = function (url, verbose) {
var fs = require('fs');
// assume http if no protocol is specified
// and we're not looking at a local file
if (!url.match(/:\/\//)) {
if (!fs.exists(url)) {
url = 'http://' + url;
if (verbose) {
console.log('Missing protocol, assuming http');
}
} else if (verbose) {
console.log('"' + url + '" exists locally, using that.');
console.log('Prepend a protocol (e.g. http:// or https://) to override this behavior');
}
}
return url;
};
exports.loadWithLibs = function (url, verbose, onload, width) {
var page = require('webpage').create();
if (width) {
page.viewportSize = { width: width, height: 800 };
}
page.open(url, function (status) {
if (status !== 'success' && verbose) {
console.log('Failed to load "' + url + '"');
phantom.exit();
} else {
if(page.evaluate(function () { return typeof jQuery; }) !== 'function') {
page.injectJs('vendor/jquery-1.8.2.js');
}
if(page.evaluate(function () { return typeof _; }) !== 'function') {
page.injectJs('vendor/underscore-1.4.2.js');
}
page.injectJs('lib/obj.js');
page.injectJs('lib/css.js');
page.onConsoleMessage = function (msg) {
console.log(msg);
};
page.evaluate(function () {
/* global $ */
if(window.fullyLoaded !== true) { // do not check twice
window.fullyLoaded = false;
$(function () { window.fullyLoaded = true; });
}
});
politelyWait(
function () {
return page.evaluate(function () { return window.fullyLoaded; }) === true;
},
function () { // aka _.partial(onload, page)
// Their page got to load so now blast the newest vanilla jquery in there
page.injectJs('vendor/jquery-1.8.2.js');
page.injectJs('vendor/underscore-1.4.2.js');
onload(page);
}
);
}
});
};
function | (testFx, onReady, timeOutMillis) {
var maxtimeOutMillis = timeOutMillis ? timeOutMillis : 3000, //< Default Max Timout is 3s
start = new Date().getTime(),
condition = false,
interval = setInterval(function() {
if ( (new Date().getTime() - start < maxtimeOutMillis) && !condition ) {
// If not time-out yet and condition not yet fulfilled
condition = testFx(); //< defensive code
} else {
if(!condition) {
// If condition still not fulfilled (timeout but condition is 'false')
// then just go ahead, we gave it some time
onReady(); //< Do what it's supposed to do once the condition is fulfilled
} else {
// Condition fulfilled (timeout and/or condition is 'true')
onReady(); //< Do what it's supposed to do once the condition is fulfilled
clearInterval(interval); //< Stop this interval
}
}
}, 250); //< repeat check every 250ms
}
}());
| politelyWait | identifier_name |
resource.js | /* global phantom, exports, require, console */
(function () {
'use strict';
exports.resolveUrl = function (url, verbose) {
var fs = require('fs');
// assume http if no protocol is specified
// and we're not looking at a local file
if (!url.match(/:\/\//)) {
if (!fs.exists(url)) {
url = 'http://' + url;
if (verbose) {
console.log('Missing protocol, assuming http');
}
} else if (verbose) {
console.log('"' + url + '" exists locally, using that.');
console.log('Prepend a protocol (e.g. http:// or https://) to override this behavior');
}
}
return url;
};
exports.loadWithLibs = function (url, verbose, onload, width) {
var page = require('webpage').create();
if (width) {
page.viewportSize = { width: width, height: 800 };
}
page.open(url, function (status) {
if (status !== 'success' && verbose) {
console.log('Failed to load "' + url + '"');
phantom.exit();
} else {
if(page.evaluate(function () { return typeof jQuery; }) !== 'function') {
page.injectJs('vendor/jquery-1.8.2.js');
}
if(page.evaluate(function () { return typeof _; }) !== 'function') |
page.injectJs('lib/obj.js');
page.injectJs('lib/css.js');
page.onConsoleMessage = function (msg) {
console.log(msg);
};
page.evaluate(function () {
/* global $ */
if(window.fullyLoaded !== true) { // do not check twice
window.fullyLoaded = false;
$(function () { window.fullyLoaded = true; });
}
});
politelyWait(
function () {
return page.evaluate(function () { return window.fullyLoaded; }) === true;
},
function () { // aka _.partial(onload, page)
// Their page got to load so now blast the newest vanilla jquery in there
page.injectJs('vendor/jquery-1.8.2.js');
page.injectJs('vendor/underscore-1.4.2.js');
onload(page);
}
);
}
});
};
function politelyWait(testFx, onReady, timeOutMillis) {
var maxtimeOutMillis = timeOutMillis ? timeOutMillis : 3000, //< Default Max Timout is 3s
start = new Date().getTime(),
condition = false,
interval = setInterval(function() {
if ( (new Date().getTime() - start < maxtimeOutMillis) && !condition ) {
// If not time-out yet and condition not yet fulfilled
condition = testFx(); //< defensive code
} else {
if(!condition) {
// If condition still not fulfilled (timeout but condition is 'false')
// then just go ahead, we gave it some time
onReady(); //< Do what it's supposed to do once the condition is fulfilled
} else {
// Condition fulfilled (timeout and/or condition is 'true')
onReady(); //< Do what it's supposed to do once the condition is fulfilled
clearInterval(interval); //< Stop this interval
}
}
}, 250); //< repeat check every 250ms
}
}());
| {
page.injectJs('vendor/underscore-1.4.2.js');
} | conditional_block |
resource.js | /* global phantom, exports, require, console */
(function () {
'use strict';
exports.resolveUrl = function (url, verbose) {
var fs = require('fs');
// assume http if no protocol is specified
// and we're not looking at a local file
if (!url.match(/:\/\//)) {
if (!fs.exists(url)) {
url = 'http://' + url;
if (verbose) {
console.log('Missing protocol, assuming http');
}
} else if (verbose) {
console.log('"' + url + '" exists locally, using that.');
console.log('Prepend a protocol (e.g. http:// or https://) to override this behavior');
}
}
return url;
};
exports.loadWithLibs = function (url, verbose, onload, width) {
var page = require('webpage').create();
if (width) {
page.viewportSize = { width: width, height: 800 };
}
page.open(url, function (status) {
if (status !== 'success' && verbose) {
console.log('Failed to load "' + url + '"');
phantom.exit();
} else {
if(page.evaluate(function () { return typeof jQuery; }) !== 'function') {
page.injectJs('vendor/jquery-1.8.2.js');
}
if(page.evaluate(function () { return typeof _; }) !== 'function') {
page.injectJs('vendor/underscore-1.4.2.js');
}
page.injectJs('lib/obj.js');
page.injectJs('lib/css.js');
page.onConsoleMessage = function (msg) {
console.log(msg);
};
page.evaluate(function () {
/* global $ */
if(window.fullyLoaded !== true) { // do not check twice
window.fullyLoaded = false;
$(function () { window.fullyLoaded = true; });
}
});
politelyWait(
function () {
return page.evaluate(function () { return window.fullyLoaded; }) === true;
},
function () { // aka _.partial(onload, page)
// Their page got to load so now blast the newest vanilla jquery in there
page.injectJs('vendor/jquery-1.8.2.js');
page.injectJs('vendor/underscore-1.4.2.js');
onload(page);
}
);
}
});
};
function politelyWait(testFx, onReady, timeOutMillis) {
var maxtimeOutMillis = timeOutMillis ? timeOutMillis : 3000, //< Default Max Timout is 3s
start = new Date().getTime(),
condition = false,
interval = setInterval(function() { | // If not time-out yet and condition not yet fulfilled
condition = testFx(); //< defensive code
} else {
if(!condition) {
// If condition still not fulfilled (timeout but condition is 'false')
// then just go ahead, we gave it some time
onReady(); //< Do what it's supposed to do once the condition is fulfilled
} else {
// Condition fulfilled (timeout and/or condition is 'true')
onReady(); //< Do what it's supposed to do once the condition is fulfilled
clearInterval(interval); //< Stop this interval
}
}
}, 250); //< repeat check every 250ms
}
}()); | if ( (new Date().getTime() - start < maxtimeOutMillis) && !condition ) { | random_line_split |
resource.js | /* global phantom, exports, require, console */
(function () {
'use strict';
exports.resolveUrl = function (url, verbose) {
var fs = require('fs');
// assume http if no protocol is specified
// and we're not looking at a local file
if (!url.match(/:\/\//)) {
if (!fs.exists(url)) {
url = 'http://' + url;
if (verbose) {
console.log('Missing protocol, assuming http');
}
} else if (verbose) {
console.log('"' + url + '" exists locally, using that.');
console.log('Prepend a protocol (e.g. http:// or https://) to override this behavior');
}
}
return url;
};
exports.loadWithLibs = function (url, verbose, onload, width) {
var page = require('webpage').create();
if (width) {
page.viewportSize = { width: width, height: 800 };
}
page.open(url, function (status) {
if (status !== 'success' && verbose) {
console.log('Failed to load "' + url + '"');
phantom.exit();
} else {
if(page.evaluate(function () { return typeof jQuery; }) !== 'function') {
page.injectJs('vendor/jquery-1.8.2.js');
}
if(page.evaluate(function () { return typeof _; }) !== 'function') {
page.injectJs('vendor/underscore-1.4.2.js');
}
page.injectJs('lib/obj.js');
page.injectJs('lib/css.js');
page.onConsoleMessage = function (msg) {
console.log(msg);
};
page.evaluate(function () {
/* global $ */
if(window.fullyLoaded !== true) { // do not check twice
window.fullyLoaded = false;
$(function () { window.fullyLoaded = true; });
}
});
politelyWait(
function () {
return page.evaluate(function () { return window.fullyLoaded; }) === true;
},
function () { // aka _.partial(onload, page)
// Their page got to load so now blast the newest vanilla jquery in there
page.injectJs('vendor/jquery-1.8.2.js');
page.injectJs('vendor/underscore-1.4.2.js');
onload(page);
}
);
}
});
};
function politelyWait(testFx, onReady, timeOutMillis) | }
}());
| {
var maxtimeOutMillis = timeOutMillis ? timeOutMillis : 3000, //< Default Max Timout is 3s
start = new Date().getTime(),
condition = false,
interval = setInterval(function() {
if ( (new Date().getTime() - start < maxtimeOutMillis) && !condition ) {
// If not time-out yet and condition not yet fulfilled
condition = testFx(); //< defensive code
} else {
if(!condition) {
// If condition still not fulfilled (timeout but condition is 'false')
// then just go ahead, we gave it some time
onReady(); //< Do what it's supposed to do once the condition is fulfilled
} else {
// Condition fulfilled (timeout and/or condition is 'true')
onReady(); //< Do what it's supposed to do once the condition is fulfilled
clearInterval(interval); //< Stop this interval
}
}
}, 250); //< repeat check every 250ms | identifier_body |
main.rs | extern crate crypto;
extern crate hyper;
extern crate rustc_serialize;
extern crate rand;
mod hmac_sha1;
use hyper::server::{Server, Request, Response};
use hyper::status::StatusCode;
use hyper::net::Fresh;
use hyper::uri::RequestUri::AbsolutePath;
const HOST: &'static str = "localhost:9000";
const DELAY: u32 = 1;
fn main() {
let key = gen_key();
println!("Key: {} (len {})", format_hex(&key[..]), key.len());
let server = Server::http(HOST).unwrap();
println!("test.txt hmac: {} (Shhhh!)",
format_hex(&file_hmac(&key[..], "test.txt").unwrap()[..]));
println!("Listening on port 9000");
server.handle(
move |req: Request, res: Response| {
handle_request(&key[..], req, res)
}
).unwrap();
}
fn format_hex(hex: &[u8]) -> String {
use std::fmt::Write;
let mut s = String::new();
for el in hex.iter() {
write!(&mut s, "{:02x}", el).unwrap();
}
s
}
fn gen_key() -> Vec<u8> {
use rand::Rng;
let mut rng = rand::thread_rng();
let key_len = rng.gen_range(10, 256);
rng.gen_iter().take(key_len).collect()
}
fn handle_request(key: &[u8], req: Request, mut res: Response<Fresh>) {
match req.method {
hyper::Get => {
match req.uri {
AbsolutePath(path) => *res.status_mut() = handle_path(key, &path[..]),
_ => *res.status_mut() = StatusCode::NotFound,
}
},
_ => *res.status_mut() = StatusCode::MethodNotAllowed,
}
send_response(res);
}
fn handle_path(key: &[u8], path: &str) -> StatusCode {
let full_path = format!("http://{}/{}", HOST, path);
match hyper::Url::parse(&full_path[..]).ok().and_then(|url| url.query_pairs()) {
Some(pairs) => {
if pairs.len() == 2 {
let (ref arg1, ref filename) = pairs[0];
let (ref arg2, ref signature) = pairs[1];
if &arg1[..]=="file" && &arg2[..]=="signature" {
check_signature(key, &filename[..], &signature[..])
}
else { StatusCode::BadRequest }
}
else { StatusCode::BadRequest }
},
_ => StatusCode::NotFound,
}
}
fn send_response(res: Response) {
match res.status() {
StatusCode::Ok =>
{ res.send(b"<h1>Server says everything is a-okay</h1>\n").unwrap(); },
StatusCode::BadRequest =>
{ res.send(b"<h1>400: Bad Request</h1>\n").unwrap(); },
StatusCode::NotFound =>
{ res.send(b"<h1>404: Not Found</h1>\n").unwrap(); },
StatusCode::MethodNotAllowed =>
{ res.send(b"<h1>405: Method Not Allowed</h1>\n").unwrap(); },
StatusCode::InternalServerError =>
{ res.send(b"<h1>500: Internal Server Error</h1>\n").unwrap(); },
_ => {},
}
}
fn check_signature(key: &[u8], filename: &str, signature: &str) -> StatusCode {
use rustc_serialize::hex::FromHex;
let parsed_signature = match signature.from_hex() {
Ok(sig) => sig,
_ => return StatusCode::BadRequest,
};
let file_hash = match file_hmac(key, filename) {
Ok(sha1) => sha1,
_ => return StatusCode::NotFound,
};
if insecure_compare(&file_hash[..], &parsed_signature[..]) {
StatusCode::Ok
}
else {
StatusCode::InternalServerError
}
}
fn file_hmac(key: &[u8], filename: &str) -> std::io::Result<[u8; 20]> {
use std::io::prelude::*;
use std::fs::File;
let mut file = try!(File::open(filename));
let mut s = String::new();
try!(file.read_to_string(&mut s));
Ok(hmac_sha1::hmac_sha1(key, &s.into_bytes()[..]))
}
fn insecure_compare(first: &[u8], second: &[u8]) -> bool {
for (x, y) in first.iter().zip(second.iter()) {
if { x != y } { return false; }
std::thread::sleep_ms(DELAY);
}
if first.len() != second.len() { //do this after step-by-step to preserve
return false; //element-by-element comparison | #[cfg(test)]
mod tests {
#[test] #[ignore]
fn insecure_compare() {
assert!(super::insecure_compare(b"yellow submarine", b"yellow submarine"),
"should have been equal");
assert!(!super::insecure_compare(b"yellow submarine", b"yellow_submarine"),
"should have been unequal");
}
} | }
true
}
| random_line_split |
main.rs | extern crate crypto;
extern crate hyper;
extern crate rustc_serialize;
extern crate rand;
mod hmac_sha1;
use hyper::server::{Server, Request, Response};
use hyper::status::StatusCode;
use hyper::net::Fresh;
use hyper::uri::RequestUri::AbsolutePath;
const HOST: &'static str = "localhost:9000";
const DELAY: u32 = 1;
fn main() {
let key = gen_key();
println!("Key: {} (len {})", format_hex(&key[..]), key.len());
let server = Server::http(HOST).unwrap();
println!("test.txt hmac: {} (Shhhh!)",
format_hex(&file_hmac(&key[..], "test.txt").unwrap()[..]));
println!("Listening on port 9000");
server.handle(
move |req: Request, res: Response| {
handle_request(&key[..], req, res)
}
).unwrap();
}
fn format_hex(hex: &[u8]) -> String {
use std::fmt::Write;
let mut s = String::new();
for el in hex.iter() {
write!(&mut s, "{:02x}", el).unwrap();
}
s
}
fn gen_key() -> Vec<u8> {
use rand::Rng;
let mut rng = rand::thread_rng();
let key_len = rng.gen_range(10, 256);
rng.gen_iter().take(key_len).collect()
}
fn handle_request(key: &[u8], req: Request, mut res: Response<Fresh>) {
match req.method {
hyper::Get => {
match req.uri {
AbsolutePath(path) => *res.status_mut() = handle_path(key, &path[..]),
_ => *res.status_mut() = StatusCode::NotFound,
}
},
_ => *res.status_mut() = StatusCode::MethodNotAllowed,
}
send_response(res);
}
fn handle_path(key: &[u8], path: &str) -> StatusCode {
let full_path = format!("http://{}/{}", HOST, path);
match hyper::Url::parse(&full_path[..]).ok().and_then(|url| url.query_pairs()) {
Some(pairs) => {
if pairs.len() == 2 {
let (ref arg1, ref filename) = pairs[0];
let (ref arg2, ref signature) = pairs[1];
if &arg1[..]=="file" && &arg2[..]=="signature" {
check_signature(key, &filename[..], &signature[..])
}
else { StatusCode::BadRequest }
}
else { StatusCode::BadRequest }
},
_ => StatusCode::NotFound,
}
}
fn send_response(res: Response) {
match res.status() {
StatusCode::Ok =>
{ res.send(b"<h1>Server says everything is a-okay</h1>\n").unwrap(); },
StatusCode::BadRequest =>
{ res.send(b"<h1>400: Bad Request</h1>\n").unwrap(); },
StatusCode::NotFound =>
{ res.send(b"<h1>404: Not Found</h1>\n").unwrap(); },
StatusCode::MethodNotAllowed =>
{ res.send(b"<h1>405: Method Not Allowed</h1>\n").unwrap(); },
StatusCode::InternalServerError =>
{ res.send(b"<h1>500: Internal Server Error</h1>\n").unwrap(); },
_ => | ,
}
}
fn check_signature(key: &[u8], filename: &str, signature: &str) -> StatusCode {
use rustc_serialize::hex::FromHex;
let parsed_signature = match signature.from_hex() {
Ok(sig) => sig,
_ => return StatusCode::BadRequest,
};
let file_hash = match file_hmac(key, filename) {
Ok(sha1) => sha1,
_ => return StatusCode::NotFound,
};
if insecure_compare(&file_hash[..], &parsed_signature[..]) {
StatusCode::Ok
}
else {
StatusCode::InternalServerError
}
}
fn file_hmac(key: &[u8], filename: &str) -> std::io::Result<[u8; 20]> {
use std::io::prelude::*;
use std::fs::File;
let mut file = try!(File::open(filename));
let mut s = String::new();
try!(file.read_to_string(&mut s));
Ok(hmac_sha1::hmac_sha1(key, &s.into_bytes()[..]))
}
fn insecure_compare(first: &[u8], second: &[u8]) -> bool {
for (x, y) in first.iter().zip(second.iter()) {
if { x != y } { return false; }
std::thread::sleep_ms(DELAY);
}
if first.len() != second.len() { //do this after step-by-step to preserve
return false; //element-by-element comparison
}
true
}
#[cfg(test)]
mod tests {
#[test] #[ignore]
fn insecure_compare() {
assert!(super::insecure_compare(b"yellow submarine", b"yellow submarine"),
"should have been equal");
assert!(!super::insecure_compare(b"yellow submarine", b"yellow_submarine"),
"should have been unequal");
}
}
| {} | conditional_block |
main.rs | extern crate crypto;
extern crate hyper;
extern crate rustc_serialize;
extern crate rand;
mod hmac_sha1;
use hyper::server::{Server, Request, Response};
use hyper::status::StatusCode;
use hyper::net::Fresh;
use hyper::uri::RequestUri::AbsolutePath;
const HOST: &'static str = "localhost:9000";
const DELAY: u32 = 1;
fn main() {
let key = gen_key();
println!("Key: {} (len {})", format_hex(&key[..]), key.len());
let server = Server::http(HOST).unwrap();
println!("test.txt hmac: {} (Shhhh!)",
format_hex(&file_hmac(&key[..], "test.txt").unwrap()[..]));
println!("Listening on port 9000");
server.handle(
move |req: Request, res: Response| {
handle_request(&key[..], req, res)
}
).unwrap();
}
fn format_hex(hex: &[u8]) -> String {
use std::fmt::Write;
let mut s = String::new();
for el in hex.iter() {
write!(&mut s, "{:02x}", el).unwrap();
}
s
}
fn gen_key() -> Vec<u8> {
use rand::Rng;
let mut rng = rand::thread_rng();
let key_len = rng.gen_range(10, 256);
rng.gen_iter().take(key_len).collect()
}
fn handle_request(key: &[u8], req: Request, mut res: Response<Fresh>) {
match req.method {
hyper::Get => {
match req.uri {
AbsolutePath(path) => *res.status_mut() = handle_path(key, &path[..]),
_ => *res.status_mut() = StatusCode::NotFound,
}
},
_ => *res.status_mut() = StatusCode::MethodNotAllowed,
}
send_response(res);
}
fn handle_path(key: &[u8], path: &str) -> StatusCode {
let full_path = format!("http://{}/{}", HOST, path);
match hyper::Url::parse(&full_path[..]).ok().and_then(|url| url.query_pairs()) {
Some(pairs) => {
if pairs.len() == 2 {
let (ref arg1, ref filename) = pairs[0];
let (ref arg2, ref signature) = pairs[1];
if &arg1[..]=="file" && &arg2[..]=="signature" {
check_signature(key, &filename[..], &signature[..])
}
else { StatusCode::BadRequest }
}
else { StatusCode::BadRequest }
},
_ => StatusCode::NotFound,
}
}
fn send_response(res: Response) {
match res.status() {
StatusCode::Ok =>
{ res.send(b"<h1>Server says everything is a-okay</h1>\n").unwrap(); },
StatusCode::BadRequest =>
{ res.send(b"<h1>400: Bad Request</h1>\n").unwrap(); },
StatusCode::NotFound =>
{ res.send(b"<h1>404: Not Found</h1>\n").unwrap(); },
StatusCode::MethodNotAllowed =>
{ res.send(b"<h1>405: Method Not Allowed</h1>\n").unwrap(); },
StatusCode::InternalServerError =>
{ res.send(b"<h1>500: Internal Server Error</h1>\n").unwrap(); },
_ => {},
}
}
fn check_signature(key: &[u8], filename: &str, signature: &str) -> StatusCode {
use rustc_serialize::hex::FromHex;
let parsed_signature = match signature.from_hex() {
Ok(sig) => sig,
_ => return StatusCode::BadRequest,
};
let file_hash = match file_hmac(key, filename) {
Ok(sha1) => sha1,
_ => return StatusCode::NotFound,
};
if insecure_compare(&file_hash[..], &parsed_signature[..]) {
StatusCode::Ok
}
else {
StatusCode::InternalServerError
}
}
fn file_hmac(key: &[u8], filename: &str) -> std::io::Result<[u8; 20]> |
fn insecure_compare(first: &[u8], second: &[u8]) -> bool {
for (x, y) in first.iter().zip(second.iter()) {
if { x != y } { return false; }
std::thread::sleep_ms(DELAY);
}
if first.len() != second.len() { //do this after step-by-step to preserve
return false; //element-by-element comparison
}
true
}
#[cfg(test)]
mod tests {
#[test] #[ignore]
fn insecure_compare() {
assert!(super::insecure_compare(b"yellow submarine", b"yellow submarine"),
"should have been equal");
assert!(!super::insecure_compare(b"yellow submarine", b"yellow_submarine"),
"should have been unequal");
}
}
| {
use std::io::prelude::*;
use std::fs::File;
let mut file = try!(File::open(filename));
let mut s = String::new();
try!(file.read_to_string(&mut s));
Ok(hmac_sha1::hmac_sha1(key, &s.into_bytes()[..]))
} | identifier_body |
main.rs | extern crate crypto;
extern crate hyper;
extern crate rustc_serialize;
extern crate rand;
mod hmac_sha1;
use hyper::server::{Server, Request, Response};
use hyper::status::StatusCode;
use hyper::net::Fresh;
use hyper::uri::RequestUri::AbsolutePath;
const HOST: &'static str = "localhost:9000";
const DELAY: u32 = 1;
fn main() {
let key = gen_key();
println!("Key: {} (len {})", format_hex(&key[..]), key.len());
let server = Server::http(HOST).unwrap();
println!("test.txt hmac: {} (Shhhh!)",
format_hex(&file_hmac(&key[..], "test.txt").unwrap()[..]));
println!("Listening on port 9000");
server.handle(
move |req: Request, res: Response| {
handle_request(&key[..], req, res)
}
).unwrap();
}
fn format_hex(hex: &[u8]) -> String {
use std::fmt::Write;
let mut s = String::new();
for el in hex.iter() {
write!(&mut s, "{:02x}", el).unwrap();
}
s
}
fn gen_key() -> Vec<u8> {
use rand::Rng;
let mut rng = rand::thread_rng();
let key_len = rng.gen_range(10, 256);
rng.gen_iter().take(key_len).collect()
}
fn handle_request(key: &[u8], req: Request, mut res: Response<Fresh>) {
match req.method {
hyper::Get => {
match req.uri {
AbsolutePath(path) => *res.status_mut() = handle_path(key, &path[..]),
_ => *res.status_mut() = StatusCode::NotFound,
}
},
_ => *res.status_mut() = StatusCode::MethodNotAllowed,
}
send_response(res);
}
fn handle_path(key: &[u8], path: &str) -> StatusCode {
let full_path = format!("http://{}/{}", HOST, path);
match hyper::Url::parse(&full_path[..]).ok().and_then(|url| url.query_pairs()) {
Some(pairs) => {
if pairs.len() == 2 {
let (ref arg1, ref filename) = pairs[0];
let (ref arg2, ref signature) = pairs[1];
if &arg1[..]=="file" && &arg2[..]=="signature" {
check_signature(key, &filename[..], &signature[..])
}
else { StatusCode::BadRequest }
}
else { StatusCode::BadRequest }
},
_ => StatusCode::NotFound,
}
}
fn send_response(res: Response) {
match res.status() {
StatusCode::Ok =>
{ res.send(b"<h1>Server says everything is a-okay</h1>\n").unwrap(); },
StatusCode::BadRequest =>
{ res.send(b"<h1>400: Bad Request</h1>\n").unwrap(); },
StatusCode::NotFound =>
{ res.send(b"<h1>404: Not Found</h1>\n").unwrap(); },
StatusCode::MethodNotAllowed =>
{ res.send(b"<h1>405: Method Not Allowed</h1>\n").unwrap(); },
StatusCode::InternalServerError =>
{ res.send(b"<h1>500: Internal Server Error</h1>\n").unwrap(); },
_ => {},
}
}
fn check_signature(key: &[u8], filename: &str, signature: &str) -> StatusCode {
use rustc_serialize::hex::FromHex;
let parsed_signature = match signature.from_hex() {
Ok(sig) => sig,
_ => return StatusCode::BadRequest,
};
let file_hash = match file_hmac(key, filename) {
Ok(sha1) => sha1,
_ => return StatusCode::NotFound,
};
if insecure_compare(&file_hash[..], &parsed_signature[..]) {
StatusCode::Ok
}
else {
StatusCode::InternalServerError
}
}
fn file_hmac(key: &[u8], filename: &str) -> std::io::Result<[u8; 20]> {
use std::io::prelude::*;
use std::fs::File;
let mut file = try!(File::open(filename));
let mut s = String::new();
try!(file.read_to_string(&mut s));
Ok(hmac_sha1::hmac_sha1(key, &s.into_bytes()[..]))
}
fn | (first: &[u8], second: &[u8]) -> bool {
for (x, y) in first.iter().zip(second.iter()) {
if { x != y } { return false; }
std::thread::sleep_ms(DELAY);
}
if first.len() != second.len() { //do this after step-by-step to preserve
return false; //element-by-element comparison
}
true
}
#[cfg(test)]
mod tests {
#[test] #[ignore]
fn insecure_compare() {
assert!(super::insecure_compare(b"yellow submarine", b"yellow submarine"),
"should have been equal");
assert!(!super::insecure_compare(b"yellow submarine", b"yellow_submarine"),
"should have been unequal");
}
}
| insecure_compare | identifier_name |
math.rs | // Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::{
atomic::{AtomicU32, Ordering},
Mutex,
};
struct MovingAvgU32Inner {
buffer: Vec<u32>,
current_index: usize,
sum: u32,
}
pub struct MovingAvgU32 {
protected: Mutex<MovingAvgU32Inner>,
cached_avg: AtomicU32,
}
impl MovingAvgU32 {
pub fn new(size: usize) -> Self {
MovingAvgU32 {
protected: Mutex::new(MovingAvgU32Inner {
buffer: vec![0; size],
current_index: 0,
sum: 0,
}),
cached_avg: AtomicU32::new(0),
}
}
pub fn add(&self, sample: u32) -> (u32, u32) {
let mut inner = self.protected.lock().unwrap();
let current_index = (inner.current_index + 1) % inner.buffer.len();
inner.current_index = current_index;
let old_avg = inner.sum / inner.buffer.len() as u32;
inner.sum = inner.sum + sample - inner.buffer[current_index];
inner.buffer[current_index] = sample;
let new_avg = inner.sum / inner.buffer.len() as u32;
self.cached_avg.store(new_avg, Ordering::Relaxed);
(old_avg, new_avg)
}
pub fn fetch(&self) -> u32 {
self.cached_avg.load(Ordering::Relaxed)
}
pub fn clear(&self) {
let mut inner = self.protected.lock().unwrap();
inner.buffer.fill(0);
inner.current_index = 0;
inner.sum = 0;
self.cached_avg.store(0, Ordering::Relaxed);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_monotonic_sequence() {
let avg = MovingAvgU32::new(5);
for i in (0..100).rev() {
avg.add(i);
if 100 - i >= 5 {
assert_eq!(avg.fetch(), i + 2);
} else {
assert_eq!(avg.fetch(), ((i + 99) * (100 - i) / 10));
}
}
avg.clear();
for i in 0..100 {
avg.add(i);
if i >= 4 {
assert_eq!(avg.fetch(), i - 2);
} else {
assert_eq!(avg.fetch(), (i * (i + 1) / 10));
}
}
}
#[test]
fn | () {
use rand::Rng;
let mut rng = rand::thread_rng();
let avg = MovingAvgU32::new(105);
let mut external_sum = 0;
for _ in 0..100 {
let n: u32 = rng.gen_range(0..u32::MAX / 100);
external_sum += n;
avg.add(n);
assert_eq!(avg.fetch(), external_sum / 105);
}
}
}
| test_random_sequence | identifier_name |
math.rs | // Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::{
atomic::{AtomicU32, Ordering},
Mutex,
};
struct MovingAvgU32Inner {
buffer: Vec<u32>,
current_index: usize,
sum: u32,
}
pub struct MovingAvgU32 {
protected: Mutex<MovingAvgU32Inner>,
cached_avg: AtomicU32,
}
impl MovingAvgU32 {
pub fn new(size: usize) -> Self {
MovingAvgU32 {
protected: Mutex::new(MovingAvgU32Inner {
buffer: vec![0; size], | cached_avg: AtomicU32::new(0),
}
}
pub fn add(&self, sample: u32) -> (u32, u32) {
let mut inner = self.protected.lock().unwrap();
let current_index = (inner.current_index + 1) % inner.buffer.len();
inner.current_index = current_index;
let old_avg = inner.sum / inner.buffer.len() as u32;
inner.sum = inner.sum + sample - inner.buffer[current_index];
inner.buffer[current_index] = sample;
let new_avg = inner.sum / inner.buffer.len() as u32;
self.cached_avg.store(new_avg, Ordering::Relaxed);
(old_avg, new_avg)
}
pub fn fetch(&self) -> u32 {
self.cached_avg.load(Ordering::Relaxed)
}
pub fn clear(&self) {
let mut inner = self.protected.lock().unwrap();
inner.buffer.fill(0);
inner.current_index = 0;
inner.sum = 0;
self.cached_avg.store(0, Ordering::Relaxed);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_monotonic_sequence() {
let avg = MovingAvgU32::new(5);
for i in (0..100).rev() {
avg.add(i);
if 100 - i >= 5 {
assert_eq!(avg.fetch(), i + 2);
} else {
assert_eq!(avg.fetch(), ((i + 99) * (100 - i) / 10));
}
}
avg.clear();
for i in 0..100 {
avg.add(i);
if i >= 4 {
assert_eq!(avg.fetch(), i - 2);
} else {
assert_eq!(avg.fetch(), (i * (i + 1) / 10));
}
}
}
#[test]
fn test_random_sequence() {
use rand::Rng;
let mut rng = rand::thread_rng();
let avg = MovingAvgU32::new(105);
let mut external_sum = 0;
for _ in 0..100 {
let n: u32 = rng.gen_range(0..u32::MAX / 100);
external_sum += n;
avg.add(n);
assert_eq!(avg.fetch(), external_sum / 105);
}
}
} | current_index: 0,
sum: 0,
}), | random_line_split |
math.rs | // Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::{
atomic::{AtomicU32, Ordering},
Mutex,
};
struct MovingAvgU32Inner {
buffer: Vec<u32>,
current_index: usize,
sum: u32,
}
pub struct MovingAvgU32 {
protected: Mutex<MovingAvgU32Inner>,
cached_avg: AtomicU32,
}
impl MovingAvgU32 {
pub fn new(size: usize) -> Self {
MovingAvgU32 {
protected: Mutex::new(MovingAvgU32Inner {
buffer: vec![0; size],
current_index: 0,
sum: 0,
}),
cached_avg: AtomicU32::new(0),
}
}
pub fn add(&self, sample: u32) -> (u32, u32) {
let mut inner = self.protected.lock().unwrap();
let current_index = (inner.current_index + 1) % inner.buffer.len();
inner.current_index = current_index;
let old_avg = inner.sum / inner.buffer.len() as u32;
inner.sum = inner.sum + sample - inner.buffer[current_index];
inner.buffer[current_index] = sample;
let new_avg = inner.sum / inner.buffer.len() as u32;
self.cached_avg.store(new_avg, Ordering::Relaxed);
(old_avg, new_avg)
}
pub fn fetch(&self) -> u32 {
self.cached_avg.load(Ordering::Relaxed)
}
pub fn clear(&self) {
let mut inner = self.protected.lock().unwrap();
inner.buffer.fill(0);
inner.current_index = 0;
inner.sum = 0;
self.cached_avg.store(0, Ordering::Relaxed);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_monotonic_sequence() {
let avg = MovingAvgU32::new(5);
for i in (0..100).rev() {
avg.add(i);
if 100 - i >= 5 {
assert_eq!(avg.fetch(), i + 2);
} else {
assert_eq!(avg.fetch(), ((i + 99) * (100 - i) / 10));
}
}
avg.clear();
for i in 0..100 {
avg.add(i);
if i >= 4 {
assert_eq!(avg.fetch(), i - 2);
} else |
}
}
#[test]
fn test_random_sequence() {
use rand::Rng;
let mut rng = rand::thread_rng();
let avg = MovingAvgU32::new(105);
let mut external_sum = 0;
for _ in 0..100 {
let n: u32 = rng.gen_range(0..u32::MAX / 100);
external_sum += n;
avg.add(n);
assert_eq!(avg.fetch(), external_sum / 105);
}
}
}
| {
assert_eq!(avg.fetch(), (i * (i + 1) / 10));
} | conditional_block |
math.rs | // Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::{
atomic::{AtomicU32, Ordering},
Mutex,
};
struct MovingAvgU32Inner {
buffer: Vec<u32>,
current_index: usize,
sum: u32,
}
pub struct MovingAvgU32 {
protected: Mutex<MovingAvgU32Inner>,
cached_avg: AtomicU32,
}
impl MovingAvgU32 {
pub fn new(size: usize) -> Self {
MovingAvgU32 {
protected: Mutex::new(MovingAvgU32Inner {
buffer: vec![0; size],
current_index: 0,
sum: 0,
}),
cached_avg: AtomicU32::new(0),
}
}
pub fn add(&self, sample: u32) -> (u32, u32) {
let mut inner = self.protected.lock().unwrap();
let current_index = (inner.current_index + 1) % inner.buffer.len();
inner.current_index = current_index;
let old_avg = inner.sum / inner.buffer.len() as u32;
inner.sum = inner.sum + sample - inner.buffer[current_index];
inner.buffer[current_index] = sample;
let new_avg = inner.sum / inner.buffer.len() as u32;
self.cached_avg.store(new_avg, Ordering::Relaxed);
(old_avg, new_avg)
}
pub fn fetch(&self) -> u32 {
self.cached_avg.load(Ordering::Relaxed)
}
pub fn clear(&self) {
let mut inner = self.protected.lock().unwrap();
inner.buffer.fill(0);
inner.current_index = 0;
inner.sum = 0;
self.cached_avg.store(0, Ordering::Relaxed);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_monotonic_sequence() {
let avg = MovingAvgU32::new(5);
for i in (0..100).rev() {
avg.add(i);
if 100 - i >= 5 {
assert_eq!(avg.fetch(), i + 2);
} else {
assert_eq!(avg.fetch(), ((i + 99) * (100 - i) / 10));
}
}
avg.clear();
for i in 0..100 {
avg.add(i);
if i >= 4 {
assert_eq!(avg.fetch(), i - 2);
} else {
assert_eq!(avg.fetch(), (i * (i + 1) / 10));
}
}
}
#[test]
fn test_random_sequence() |
}
| {
use rand::Rng;
let mut rng = rand::thread_rng();
let avg = MovingAvgU32::new(105);
let mut external_sum = 0;
for _ in 0..100 {
let n: u32 = rng.gen_range(0..u32::MAX / 100);
external_sum += n;
avg.add(n);
assert_eq!(avg.fetch(), external_sum / 105);
}
} | identifier_body |
django.py | path.dirname(path.dirname(__file__)), 'resources', 'file.txt'
))
MOCK_IMAGE = path.abspath(path.join(
path.dirname(path.dirname(__file__)), 'resources', 'image.jpg'
))
def get_file(filepath=MOCK_FILE, **kwargs):
""" Generate a content file.
:return ContentFile:
"""
with open(filepath, 'rb') as f:
name = path.basename(filepath)
return get_contentfile(f.read(), name)
def get_image(filepath=MOCK_IMAGE):
""" Generate a content image.
:return ContentFile:
"""
return get_file(filepath)
def get_relation(_scheme=None, _typemixer=None, **params):
""" Function description. """
if VERSION < (1, 8):
scheme = _scheme.related.parent_model
else:
scheme = _scheme.related_model
if scheme is ContentType:
choices = [m for m in models.get_models() if m is not ContentType]
return ContentType.objects.get_for_model(faker.random_element(choices))
return TypeMixer(scheme, mixer=_typemixer._TypeMixer__mixer,
factory=_typemixer._TypeMixer__factory,
fake=_typemixer._TypeMixer__fake,).blend(**params)
def get_datetime(**params):
""" Support Django TZ support. """
return faker.datetime(tzinfo=settings.USE_TZ)
class GenFactory(BaseFactory):
""" Map a django classes to simple types. """
types = {
(models.AutoField, models.PositiveIntegerField): t.PositiveInteger,
models.BigIntegerField: t.BigInteger,
models.BooleanField: bool,
(models.CharField, models.SlugField): str,
models.DateField: datetime.date,
models.DecimalField: decimal.Decimal,
models.EmailField: t.EmailString,
models.FloatField: float,
models.GenericIPAddressField: t.IPString,
models.IPAddressField: t.IP4String,
models.IntegerField: int,
models.PositiveSmallIntegerField: t.PositiveSmallInteger,
models.SmallIntegerField: t.SmallInteger,
models.TextField: t.Text,
models.TimeField: datetime.time,
models.URLField: t.URL,
}
generators = {
models.BinaryField: faker.pybytes,
models.DateTimeField: get_datetime,
models.FileField: get_file,
models.FilePathField: lambda: MOCK_FILE,
models.ForeignKey: get_relation,
models.ImageField: get_image,
models.ManyToManyField: get_relation,
models.OneToOneField: get_relation,
}
class TypeMixerMeta(BaseTypeMixerMeta):
""" Load django models from strings. """
def __new__(mcs, name, bases, params):
""" Associate Scheme with Django models.
Cache Django models.
:return mixer.backend.django.TypeMixer: A generated class.
"""
params['models_cache'] = dict()
cls = super(TypeMixerMeta, mcs).__new__(mcs, name, bases, params)
return cls
def __load_cls(cls, cls_type):
if isinstance(cls_type, _.string_types):
if '.' in cls_type:
app_label, model_name = cls_type.split(".")
return models.get_model(app_label, model_name)
else:
try:
if cls_type not in cls.models_cache:
cls.__update_cache()
return cls.models_cache[cls_type]
except KeyError:
raise ValueError('Model "%s" not found.' % cls_type)
return cls_type
def __update_cache(cls):
""" Update apps cache for Django < 1.7. """
if VERSION < (1, 7):
for app_models in models.loading.cache.app_models.values():
for name, model in app_models.items():
cls.models_cache[name] = model
else:
from django.apps import apps
for app in apps.all_models:
for name, model in apps.all_models[app].items():
cls.models_cache[name] = model
class TypeMixer(_.with_metaclass(TypeMixerMeta, BaseTypeMixer)):
""" TypeMixer for Django. """
__metaclass__ = TypeMixerMeta
factory = GenFactory
def postprocess(self, target, postprocess_values):
""" Fill postprocess_values. """
for name, deffered in postprocess_values:
if not type(deffered.scheme) is GenericForeignKey:
continue
name, value = self._get_value(name, deffered.value)
setattr(target, name, value)
if self.__mixer:
target = self.__mixer.postprocess(target)
for name, deffered in postprocess_values:
if type(deffered.scheme) is GenericForeignKey or not target.pk:
continue
name, value = self._get_value(name, deffered.value)
# # If the ManyToMany relation has an intermediary model,
# # the add and remove methods do not exist.
if not deffered.scheme.rel.through._meta.auto_created and self.__mixer: # noqa
self.__mixer.blend(
deffered.scheme.rel.through, **{
deffered.scheme.m2m_field_name(): target,
deffered.scheme.m2m_reverse_field_name(): value})
continue
if not isinstance(value, (list, tuple)):
value = [value]
setattr(target, name, value)
return target
def get_value(self, name, value):
""" Set value to generated instance.
:return : None or (name, value) for later use
"""
field = self.__fields.get(name)
if field:
if (field.scheme in self.__scheme._meta.local_many_to_many or
type(field.scheme) is GenericForeignKey):
return name, _Deffered(value, field.scheme)
return self._get_value(name, value, field)
return super(TypeMixer, self).get_value(name, value)
def _get_value(self, name, value, field=None):
if isinstance(value, GeneratorType):
return self._get_value(name, next(value), field)
if not isinstance(value, t.Mix) and value is not SKIP_VALUE:
if callable(value):
return self._get_value(name, value(), field)
if field:
value = field.scheme.to_python(value)
return name, value
def gen_select(self, field_name, select):
""" Select exists value from database.
:param field_name: Name of field for generation.
:return : None or (name, value) for later use
"""
if field_name not in self.__fields:
return field_name, None
try:
field = self.__fields[field_name]
return field.name, field.scheme.rel.to.objects.filter(**select.params).order_by('?')[0]
except Exception:
raise Exception("Cannot find a value for the field: '{0}'".format(field_name))
def gen_field(self, field):
""" Generate value by field.
:param relation: Instance of :class:`Field`
:return : None or (name, value) for later use
"""
if isinstance(field.scheme, GenericForeignKey):
return field.name, SKIP_VALUE
if field.params and not field.scheme:
raise ValueError('Invalid relation %s' % field.name)
return super(TypeMixer, self).gen_field(field)
def | (self, field, fname=None, fake=False, kwargs=None): # noqa
""" Make a fabric for field.
:param field: A mixer field
:param fname: Field name
:param fake: Force fake data
:return function:
"""
kwargs = {} if kwargs is None else kwargs
fcls = type(field)
stype = self.__factory.cls_to_simple(fcls)
if fcls is models.CommaSeparatedIntegerField:
return partial(faker.choices, range(0, 10), length=field.max_length)
if field and field.choices:
try:
choices, _ = list(zip(*field.choices))
return partial(faker.random_element, choices)
except ValueError:
pass
if stype in (str, t.Text):
fab = super(TypeMixer, self).make_fabric(
fcls, field_name=fname, fake=fake, kwargs=kwargs)
return lambda: fab()[:field.max_length]
if stype is decimal.Decimal:
kwargs['left_digits'] = field.max_digits - field.decimal_places
kwargs['right_digits'] = field.decimal_places
elif stype is t.IPString:
# Hack for support Django 1.4/1.5
protocol = getattr(field, 'protocol', None)
if not protocol:
validator = field.default_validators[0]
protocol = 'both'
if validator is validate_ipv4_address:
protocol = 'ipv4'
elif validator is validate_ipv6_address:
protocol = 'ipv6'
# protocol matching is case insensitive
# default address is either IPv4 or IPv6
kwargs['protocol'] = protocol.lower()
elif isinstance(field, models.fields.related.RelatedField):
kwargs.update({'_typemixer': self, '_scheme': field})
return super(TypeMixer, self).make_fabric(
fcls, field_name=fname, fake=fake, kwargs=kwargs)
@staticmethod
def is_unique(field):
""" Return True is field's value should be a unique.
:return bool:
"""
if VERSION < (1, 7) and isinstance(field.scheme, models.OneToOneField):
return True
return field.scheme.unique | make_fabric | identifier_name |
django.py | path.dirname(path.dirname(__file__)), 'resources', 'file.txt'
))
MOCK_IMAGE = path.abspath(path.join(
path.dirname(path.dirname(__file__)), 'resources', 'image.jpg'
))
def get_file(filepath=MOCK_FILE, **kwargs):
""" Generate a content file.
:return ContentFile:
"""
with open(filepath, 'rb') as f:
name = path.basename(filepath)
return get_contentfile(f.read(), name)
def get_image(filepath=MOCK_IMAGE):
""" Generate a content image.
:return ContentFile:
"""
return get_file(filepath)
def get_relation(_scheme=None, _typemixer=None, **params):
""" Function description. """
if VERSION < (1, 8):
scheme = _scheme.related.parent_model
else:
scheme = _scheme.related_model
if scheme is ContentType:
choices = [m for m in models.get_models() if m is not ContentType]
return ContentType.objects.get_for_model(faker.random_element(choices))
return TypeMixer(scheme, mixer=_typemixer._TypeMixer__mixer,
factory=_typemixer._TypeMixer__factory,
fake=_typemixer._TypeMixer__fake,).blend(**params)
def get_datetime(**params):
""" Support Django TZ support. """
return faker.datetime(tzinfo=settings.USE_TZ)
class GenFactory(BaseFactory):
""" Map a django classes to simple types. """
types = {
(models.AutoField, models.PositiveIntegerField): t.PositiveInteger,
models.BigIntegerField: t.BigInteger,
models.BooleanField: bool,
(models.CharField, models.SlugField): str,
models.DateField: datetime.date,
models.DecimalField: decimal.Decimal,
models.EmailField: t.EmailString,
models.FloatField: float,
models.GenericIPAddressField: t.IPString,
models.IPAddressField: t.IP4String,
models.IntegerField: int,
models.PositiveSmallIntegerField: t.PositiveSmallInteger,
models.SmallIntegerField: t.SmallInteger,
models.TextField: t.Text,
models.TimeField: datetime.time,
models.URLField: t.URL,
}
generators = {
models.BinaryField: faker.pybytes,
models.DateTimeField: get_datetime,
models.FileField: get_file,
models.FilePathField: lambda: MOCK_FILE,
models.ForeignKey: get_relation,
models.ImageField: get_image,
models.ManyToManyField: get_relation,
models.OneToOneField: get_relation,
}
class TypeMixerMeta(BaseTypeMixerMeta):
""" Load django models from strings. """
def __new__(mcs, name, bases, params):
""" Associate Scheme with Django models.
Cache Django models.
:return mixer.backend.django.TypeMixer: A generated class.
"""
params['models_cache'] = dict()
cls = super(TypeMixerMeta, mcs).__new__(mcs, name, bases, params)
return cls
def __load_cls(cls, cls_type):
if isinstance(cls_type, _.string_types):
if '.' in cls_type:
app_label, model_name = cls_type.split(".")
return models.get_model(app_label, model_name)
else:
try:
if cls_type not in cls.models_cache:
cls.__update_cache()
return cls.models_cache[cls_type]
except KeyError:
raise ValueError('Model "%s" not found.' % cls_type)
return cls_type
def __update_cache(cls):
""" Update apps cache for Django < 1.7. """
if VERSION < (1, 7):
|
else:
from django.apps import apps
for app in apps.all_models:
for name, model in apps.all_models[app].items():
cls.models_cache[name] = model
class TypeMixer(_.with_metaclass(TypeMixerMeta, BaseTypeMixer)):
""" TypeMixer for Django. """
__metaclass__ = TypeMixerMeta
factory = GenFactory
def postprocess(self, target, postprocess_values):
""" Fill postprocess_values. """
for name, deffered in postprocess_values:
if not type(deffered.scheme) is GenericForeignKey:
continue
name, value = self._get_value(name, deffered.value)
setattr(target, name, value)
if self.__mixer:
target = self.__mixer.postprocess(target)
for name, deffered in postprocess_values:
if type(deffered.scheme) is GenericForeignKey or not target.pk:
continue
name, value = self._get_value(name, deffered.value)
# # If the ManyToMany relation has an intermediary model,
# # the add and remove methods do not exist.
if not deffered.scheme.rel.through._meta.auto_created and self.__mixer: # noqa
self.__mixer.blend(
deffered.scheme.rel.through, **{
deffered.scheme.m2m_field_name(): target,
deffered.scheme.m2m_reverse_field_name(): value})
continue
if not isinstance(value, (list, tuple)):
value = [value]
setattr(target, name, value)
return target
def get_value(self, name, value):
""" Set value to generated instance.
:return : None or (name, value) for later use
"""
field = self.__fields.get(name)
if field:
if (field.scheme in self.__scheme._meta.local_many_to_many or
type(field.scheme) is GenericForeignKey):
return name, _Deffered(value, field.scheme)
return self._get_value(name, value, field)
return super(TypeMixer, self).get_value(name, value)
def _get_value(self, name, value, field=None):
if isinstance(value, GeneratorType):
return self._get_value(name, next(value), field)
if not isinstance(value, t.Mix) and value is not SKIP_VALUE:
if callable(value):
return self._get_value(name, value(), field)
if field:
value = field.scheme.to_python(value)
return name, value
def gen_select(self, field_name, select):
""" Select exists value from database.
:param field_name: Name of field for generation.
:return : None or (name, value) for later use
"""
if field_name not in self.__fields:
return field_name, None
try:
field = self.__fields[field_name]
return field.name, field.scheme.rel.to.objects.filter(**select.params).order_by('?')[0]
except Exception:
raise Exception("Cannot find a value for the field: '{0}'".format(field_name))
def gen_field(self, field):
""" Generate value by field.
:param relation: Instance of :class:`Field`
:return : None or (name, value) for later use
"""
if isinstance(field.scheme, GenericForeignKey):
return field.name, SKIP_VALUE
if field.params and not field.scheme:
raise ValueError('Invalid relation %s' % field.name)
return super(TypeMixer, self).gen_field(field)
def make_fabric(self, field, fname=None, fake=False, kwargs=None): # noqa
""" Make a fabric for field.
:param field: A mixer field
:param fname: Field name
:param fake: Force fake data
:return function:
"""
kwargs = {} if kwargs is None else kwargs
fcls = type(field)
stype = self.__factory.cls_to_simple(fcls)
if fcls is models.CommaSeparatedIntegerField:
return partial(faker.choices, range(0, 10), length=field.max_length)
if field and field.choices:
try:
choices, _ = list(zip(*field.choices))
return partial(faker.random_element, choices)
except ValueError:
pass
if stype in (str, t.Text):
fab = super(TypeMixer, self).make_fabric(
fcls, field_name=fname, fake=fake, kwargs=kwargs)
return lambda: fab()[:field.max_length]
if stype is decimal.Decimal:
kwargs['left_digits'] = field.max_digits - field.decimal_places
kwargs['right_digits'] = field.decimal_places
elif stype is t.IPString:
# Hack for support Django 1.4/1.5
protocol = getattr(field, 'protocol', None)
if not protocol:
validator = field.default_validators[0]
protocol = 'both'
if validator is validate_ipv4_address:
protocol = 'ipv4'
elif validator is validate_ipv6_address:
protocol = 'ipv6'
# protocol matching is case insensitive
# default address is either IPv4 or IPv6
kwargs['protocol'] = protocol.lower()
elif isinstance(field, models.fields.related.RelatedField):
kwargs.update({'_typemixer': self, '_scheme': field})
return super(TypeMixer, self).make_fabric(
fcls, field_name=fname, fake=fake, kwargs=kwargs)
@staticmethod
def is_unique(field):
""" Return True is field's value should be a unique.
:return bool:
"""
if VERSION < (1, 7) and isinstance(field.scheme, models.OneToOneField):
return True
return field.scheme.unique
| for app_models in models.loading.cache.app_models.values():
for name, model in app_models.items():
cls.models_cache[name] = model | conditional_block |
django.py | path.dirname(path.dirname(__file__)), 'resources', 'file.txt'
))
MOCK_IMAGE = path.abspath(path.join(
path.dirname(path.dirname(__file__)), 'resources', 'image.jpg'
))
def get_file(filepath=MOCK_FILE, **kwargs):
""" Generate a content file.
:return ContentFile:
"""
with open(filepath, 'rb') as f:
name = path.basename(filepath)
return get_contentfile(f.read(), name)
def get_image(filepath=MOCK_IMAGE):
""" Generate a content image.
:return ContentFile:
"""
return get_file(filepath)
def get_relation(_scheme=None, _typemixer=None, **params):
""" Function description. """
if VERSION < (1, 8):
scheme = _scheme.related.parent_model
else:
scheme = _scheme.related_model
if scheme is ContentType:
choices = [m for m in models.get_models() if m is not ContentType]
return ContentType.objects.get_for_model(faker.random_element(choices))
return TypeMixer(scheme, mixer=_typemixer._TypeMixer__mixer,
factory=_typemixer._TypeMixer__factory,
fake=_typemixer._TypeMixer__fake,).blend(**params)
def get_datetime(**params):
""" Support Django TZ support. """
return faker.datetime(tzinfo=settings.USE_TZ)
class GenFactory(BaseFactory):
""" Map a django classes to simple types. """
types = {
(models.AutoField, models.PositiveIntegerField): t.PositiveInteger,
models.BigIntegerField: t.BigInteger,
models.BooleanField: bool,
(models.CharField, models.SlugField): str,
models.DateField: datetime.date,
models.DecimalField: decimal.Decimal,
models.EmailField: t.EmailString,
models.FloatField: float,
models.GenericIPAddressField: t.IPString,
models.IPAddressField: t.IP4String,
models.IntegerField: int,
models.PositiveSmallIntegerField: t.PositiveSmallInteger,
models.SmallIntegerField: t.SmallInteger,
models.TextField: t.Text,
models.TimeField: datetime.time,
models.URLField: t.URL,
}
generators = {
models.BinaryField: faker.pybytes,
models.DateTimeField: get_datetime,
models.FileField: get_file,
models.FilePathField: lambda: MOCK_FILE,
models.ForeignKey: get_relation,
models.ImageField: get_image,
models.ManyToManyField: get_relation,
models.OneToOneField: get_relation,
}
class TypeMixerMeta(BaseTypeMixerMeta):
""" Load django models from strings. """ | """ Associate Scheme with Django models.
Cache Django models.
:return mixer.backend.django.TypeMixer: A generated class.
"""
params['models_cache'] = dict()
cls = super(TypeMixerMeta, mcs).__new__(mcs, name, bases, params)
return cls
def __load_cls(cls, cls_type):
if isinstance(cls_type, _.string_types):
if '.' in cls_type:
app_label, model_name = cls_type.split(".")
return models.get_model(app_label, model_name)
else:
try:
if cls_type not in cls.models_cache:
cls.__update_cache()
return cls.models_cache[cls_type]
except KeyError:
raise ValueError('Model "%s" not found.' % cls_type)
return cls_type
def __update_cache(cls):
""" Update apps cache for Django < 1.7. """
if VERSION < (1, 7):
for app_models in models.loading.cache.app_models.values():
for name, model in app_models.items():
cls.models_cache[name] = model
else:
from django.apps import apps
for app in apps.all_models:
for name, model in apps.all_models[app].items():
cls.models_cache[name] = model
class TypeMixer(_.with_metaclass(TypeMixerMeta, BaseTypeMixer)):
""" TypeMixer for Django. """
__metaclass__ = TypeMixerMeta
factory = GenFactory
def postprocess(self, target, postprocess_values):
""" Fill postprocess_values. """
for name, deffered in postprocess_values:
if not type(deffered.scheme) is GenericForeignKey:
continue
name, value = self._get_value(name, deffered.value)
setattr(target, name, value)
if self.__mixer:
target = self.__mixer.postprocess(target)
for name, deffered in postprocess_values:
if type(deffered.scheme) is GenericForeignKey or not target.pk:
continue
name, value = self._get_value(name, deffered.value)
# # If the ManyToMany relation has an intermediary model,
# # the add and remove methods do not exist.
if not deffered.scheme.rel.through._meta.auto_created and self.__mixer: # noqa
self.__mixer.blend(
deffered.scheme.rel.through, **{
deffered.scheme.m2m_field_name(): target,
deffered.scheme.m2m_reverse_field_name(): value})
continue
if not isinstance(value, (list, tuple)):
value = [value]
setattr(target, name, value)
return target
def get_value(self, name, value):
""" Set value to generated instance.
:return : None or (name, value) for later use
"""
field = self.__fields.get(name)
if field:
if (field.scheme in self.__scheme._meta.local_many_to_many or
type(field.scheme) is GenericForeignKey):
return name, _Deffered(value, field.scheme)
return self._get_value(name, value, field)
return super(TypeMixer, self).get_value(name, value)
def _get_value(self, name, value, field=None):
if isinstance(value, GeneratorType):
return self._get_value(name, next(value), field)
if not isinstance(value, t.Mix) and value is not SKIP_VALUE:
if callable(value):
return self._get_value(name, value(), field)
if field:
value = field.scheme.to_python(value)
return name, value
def gen_select(self, field_name, select):
""" Select exists value from database.
:param field_name: Name of field for generation.
:return : None or (name, value) for later use
"""
if field_name not in self.__fields:
return field_name, None
try:
field = self.__fields[field_name]
return field.name, field.scheme.rel.to.objects.filter(**select.params).order_by('?')[0]
except Exception:
raise Exception("Cannot find a value for the field: '{0}'".format(field_name))
def gen_field(self, field):
""" Generate value by field.
:param relation: Instance of :class:`Field`
:return : None or (name, value) for later use
"""
if isinstance(field.scheme, GenericForeignKey):
return field.name, SKIP_VALUE
if field.params and not field.scheme:
raise ValueError('Invalid relation %s' % field.name)
return super(TypeMixer, self).gen_field(field)
def make_fabric(self, field, fname=None, fake=False, kwargs=None): # noqa
""" Make a fabric for field.
:param field: A mixer field
:param fname: Field name
:param fake: Force fake data
:return function:
"""
kwargs = {} if kwargs is None else kwargs
fcls = type(field)
stype = self.__factory.cls_to_simple(fcls)
if fcls is models.CommaSeparatedIntegerField:
return partial(faker.choices, range(0, 10), length=field.max_length)
if field and field.choices:
try:
choices, _ = list(zip(*field.choices))
return partial(faker.random_element, choices)
except ValueError:
pass
if stype in (str, t.Text):
fab = super(TypeMixer, self).make_fabric(
fcls, field_name=fname, fake=fake, kwargs=kwargs)
return lambda: fab()[:field.max_length]
if stype is decimal.Decimal:
kwargs['left_digits'] = field.max_digits - field.decimal_places
kwargs['right_digits'] = field.decimal_places
elif stype is t.IPString:
# Hack for support Django 1.4/1.5
protocol = getattr(field, 'protocol', None)
if not protocol:
validator = field.default_validators[0]
protocol = 'both'
if validator is validate_ipv4_address:
protocol = 'ipv4'
elif validator is validate_ipv6_address:
protocol = 'ipv6'
# protocol matching is case insensitive
# default address is either IPv4 or IPv6
kwargs['protocol'] = protocol.lower()
elif isinstance(field, models.fields.related.RelatedField):
kwargs.update({'_typemixer': self, '_scheme': field})
return super(TypeMixer, self).make_fabric(
fcls, field_name=fname, fake=fake, kwargs=kwargs)
@staticmethod
def is_unique(field):
""" Return True is field's value should be a unique.
:return bool:
"""
if VERSION < (1, 7) and isinstance(field.scheme, models.OneToOneField):
return True
return field.scheme.unique |
def __new__(mcs, name, bases, params): | random_line_split |
django.py | path.dirname(path.dirname(__file__)), 'resources', 'file.txt'
))
MOCK_IMAGE = path.abspath(path.join(
path.dirname(path.dirname(__file__)), 'resources', 'image.jpg'
))
def get_file(filepath=MOCK_FILE, **kwargs):
""" Generate a content file.
:return ContentFile:
"""
with open(filepath, 'rb') as f:
name = path.basename(filepath)
return get_contentfile(f.read(), name)
def get_image(filepath=MOCK_IMAGE):
""" Generate a content image.
:return ContentFile:
"""
return get_file(filepath)
def get_relation(_scheme=None, _typemixer=None, **params):
""" Function description. """
if VERSION < (1, 8):
scheme = _scheme.related.parent_model
else:
scheme = _scheme.related_model
if scheme is ContentType:
choices = [m for m in models.get_models() if m is not ContentType]
return ContentType.objects.get_for_model(faker.random_element(choices))
return TypeMixer(scheme, mixer=_typemixer._TypeMixer__mixer,
factory=_typemixer._TypeMixer__factory,
fake=_typemixer._TypeMixer__fake,).blend(**params)
def get_datetime(**params):
""" Support Django TZ support. """
return faker.datetime(tzinfo=settings.USE_TZ)
class GenFactory(BaseFactory):
""" Map a django classes to simple types. """
types = {
(models.AutoField, models.PositiveIntegerField): t.PositiveInteger,
models.BigIntegerField: t.BigInteger,
models.BooleanField: bool,
(models.CharField, models.SlugField): str,
models.DateField: datetime.date,
models.DecimalField: decimal.Decimal,
models.EmailField: t.EmailString,
models.FloatField: float,
models.GenericIPAddressField: t.IPString,
models.IPAddressField: t.IP4String,
models.IntegerField: int,
models.PositiveSmallIntegerField: t.PositiveSmallInteger,
models.SmallIntegerField: t.SmallInteger,
models.TextField: t.Text,
models.TimeField: datetime.time,
models.URLField: t.URL,
}
generators = {
models.BinaryField: faker.pybytes,
models.DateTimeField: get_datetime,
models.FileField: get_file,
models.FilePathField: lambda: MOCK_FILE,
models.ForeignKey: get_relation,
models.ImageField: get_image,
models.ManyToManyField: get_relation,
models.OneToOneField: get_relation,
}
class TypeMixerMeta(BaseTypeMixerMeta):
|
else:
try:
if cls_type not in cls.models_cache:
cls.__update_cache()
return cls.models_cache[cls_type]
except KeyError:
raise ValueError('Model "%s" not found.' % cls_type)
return cls_type
def __update_cache(cls):
""" Update apps cache for Django < 1.7. """
if VERSION < (1, 7):
for app_models in models.loading.cache.app_models.values():
for name, model in app_models.items():
cls.models_cache[name] = model
else:
from django.apps import apps
for app in apps.all_models:
for name, model in apps.all_models[app].items():
cls.models_cache[name] = model
class TypeMixer(_.with_metaclass(TypeMixerMeta, BaseTypeMixer)):
""" TypeMixer for Django. """
__metaclass__ = TypeMixerMeta
factory = GenFactory
def postprocess(self, target, postprocess_values):
""" Fill postprocess_values. """
for name, deffered in postprocess_values:
if not type(deffered.scheme) is GenericForeignKey:
continue
name, value = self._get_value(name, deffered.value)
setattr(target, name, value)
if self.__mixer:
target = self.__mixer.postprocess(target)
for name, deffered in postprocess_values:
if type(deffered.scheme) is GenericForeignKey or not target.pk:
continue
name, value = self._get_value(name, deffered.value)
# # If the ManyToMany relation has an intermediary model,
# # the add and remove methods do not exist.
if not deffered.scheme.rel.through._meta.auto_created and self.__mixer: # noqa
self.__mixer.blend(
deffered.scheme.rel.through, **{
deffered.scheme.m2m_field_name(): target,
deffered.scheme.m2m_reverse_field_name(): value})
continue
if not isinstance(value, (list, tuple)):
value = [value]
setattr(target, name, value)
return target
def get_value(self, name, value):
""" Set value to generated instance.
:return : None or (name, value) for later use
"""
field = self.__fields.get(name)
if field:
if (field.scheme in self.__scheme._meta.local_many_to_many or
type(field.scheme) is GenericForeignKey):
return name, _Deffered(value, field.scheme)
return self._get_value(name, value, field)
return super(TypeMixer, self).get_value(name, value)
def _get_value(self, name, value, field=None):
if isinstance(value, GeneratorType):
return self._get_value(name, next(value), field)
if not isinstance(value, t.Mix) and value is not SKIP_VALUE:
if callable(value):
return self._get_value(name, value(), field)
if field:
value = field.scheme.to_python(value)
return name, value
def gen_select(self, field_name, select):
""" Select exists value from database.
:param field_name: Name of field for generation.
:return : None or (name, value) for later use
"""
if field_name not in self.__fields:
return field_name, None
try:
field = self.__fields[field_name]
return field.name, field.scheme.rel.to.objects.filter(**select.params).order_by('?')[0]
except Exception:
raise Exception("Cannot find a value for the field: '{0}'".format(field_name))
def gen_field(self, field):
""" Generate value by field.
:param relation: Instance of :class:`Field`
:return : None or (name, value) for later use
"""
if isinstance(field.scheme, GenericForeignKey):
return field.name, SKIP_VALUE
if field.params and not field.scheme:
raise ValueError('Invalid relation %s' % field.name)
return super(TypeMixer, self).gen_field(field)
def make_fabric(self, field, fname=None, fake=False, kwargs=None): # noqa
""" Make a fabric for field.
:param field: A mixer field
:param fname: Field name
:param fake: Force fake data
:return function:
"""
kwargs = {} if kwargs is None else kwargs
fcls = type(field)
stype = self.__factory.cls_to_simple(fcls)
if fcls is models.CommaSeparatedIntegerField:
return partial(faker.choices, range(0, 10), length=field.max_length)
if field and field.choices:
try:
choices, _ = list(zip(*field.choices))
return partial(faker.random_element, choices)
except ValueError:
pass
if stype in (str, t.Text):
fab = super(TypeMixer, self).make_fabric(
fcls, field_name=fname, fake=fake, kwargs=kwargs)
return lambda: fab()[:field.max_length]
if stype is decimal.Decimal:
kwargs['left_digits'] = field.max_digits - field.decimal_places
kwargs['right_digits'] = field.decimal_places
elif stype is t.IPString:
# Hack for support Django 1.4/1.5
protocol = getattr(field, 'protocol', None)
if not protocol:
validator = field.default_validators[0]
protocol = 'both'
if validator is validate_ipv4_address:
protocol = 'ipv4'
elif validator is validate_ipv6_address:
protocol = 'ipv6'
# protocol matching is case insensitive
# default address is either IPv4 or IPv6
kwargs['protocol'] = protocol.lower()
elif isinstance(field, models.fields.related.RelatedField):
kwargs.update({'_typemixer': self, '_scheme': field})
return super(TypeMixer, self).make_fabric(
fcls, field_name=fname, fake=fake, kwargs=kwargs)
@staticmethod
def is_unique(field):
""" Return True is field's value should be a unique.
:return bool:
"""
if VERSION < (1, 7) and isinstance(field.scheme, models.OneToOneField):
return True
return field.scheme.unique | """ Load django models from strings. """
def __new__(mcs, name, bases, params):
""" Associate Scheme with Django models.
Cache Django models.
:return mixer.backend.django.TypeMixer: A generated class.
"""
params['models_cache'] = dict()
cls = super(TypeMixerMeta, mcs).__new__(mcs, name, bases, params)
return cls
def __load_cls(cls, cls_type):
if isinstance(cls_type, _.string_types):
if '.' in cls_type:
app_label, model_name = cls_type.split(".")
return models.get_model(app_label, model_name) | identifier_body |
form.component.ts | /**
* Copyright 2017 The Mifos Initiative.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Component, EventEmitter, Input, OnChanges, OnInit, Output, SimpleChanges, ViewChild} from '@angular/core';
import {PayrollConfiguration} from '../../../../services/payroll/domain/payroll-configuration.model';
import {AbstractControl, FormArray, FormBuilder, FormGroup, Validators} from '@angular/forms';
import {TdStepComponent} from '@covalent/core';
import {FimsValidators} from '../../../../common/validator/validators';
import {ProductInstance} from '../../../../services/depositAccount/domain/instance/product-instance.model';
import {PayrollAllocation} from '../../../../services/payroll/domain/payroll-allocation.model';
import {accountUnique} from './validator/account-unique.validator';
@Component({
selector: 'fims-customer-payroll-form',
templateUrl: './form.component.html'
})
export class CustomerPayrollFormComponent implements OnInit, OnChanges {
form: FormGroup;
@ViewChild('detailsStep') detailsStep: TdStepComponent;
@Input('productInstances') productInstances: ProductInstance[];
@Input('distribution') distribution: PayrollConfiguration;
@Output('onSave') onSave = new EventEmitter<PayrollConfiguration>();
@Output('onCancel') onCancel = new EventEmitter<void>();
constructor(private formBuilder: FormBuilder) { |
ngOnInit(): void {
this.detailsStep.open();
}
ngOnChanges(changes: SimpleChanges): void {
if (changes.distribution) {
this.form.reset({
mainAccountNumber: this.distribution.mainAccountNumber
});
this.distribution.payrollAllocations.forEach(allocation => this.addAllocation(allocation));
}
}
save(): void {
const distribution = Object.assign({}, this.distribution, {
mainAccountNumber: this.form.get('mainAccountNumber').value,
payrollAllocations: this.form.get('payrollAllocations').value
});
this.onSave.emit(distribution);
}
cancel(): void {
this.onCancel.emit();
}
private initAllocations(allocations: PayrollAllocation[]): FormArray {
const formControls: FormGroup[] = [];
allocations.forEach(allocation => formControls.push(this.initAllocation(allocation)));
return this.formBuilder.array(formControls);
}
private initAllocation(allocation?: PayrollAllocation): FormGroup {
return this.formBuilder.group({
accountNumber: [allocation ? allocation.accountNumber : '', [Validators.required]],
amount: [allocation ? allocation.amount : '', [
Validators.required,
FimsValidators.minValue(0.001),
FimsValidators.maxValue(9999999999.99999)]
],
proportional: [allocation ? allocation.proportional : false]
});
}
addAllocation(allocation?: PayrollAllocation): void {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
allocations.push(this.initAllocation(allocation));
}
removeAllocation(index: number): void {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
allocations.removeAt(index);
}
get allocations(): AbstractControl[] {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
return allocations.controls;
}
} | this.form = this.formBuilder.group({
mainAccountNumber: ['', [Validators.required]],
payrollAllocations: this.initAllocations([])
}, { validator: accountUnique });
} | random_line_split |
form.component.ts | /**
* Copyright 2017 The Mifos Initiative.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Component, EventEmitter, Input, OnChanges, OnInit, Output, SimpleChanges, ViewChild} from '@angular/core';
import {PayrollConfiguration} from '../../../../services/payroll/domain/payroll-configuration.model';
import {AbstractControl, FormArray, FormBuilder, FormGroup, Validators} from '@angular/forms';
import {TdStepComponent} from '@covalent/core';
import {FimsValidators} from '../../../../common/validator/validators';
import {ProductInstance} from '../../../../services/depositAccount/domain/instance/product-instance.model';
import {PayrollAllocation} from '../../../../services/payroll/domain/payroll-allocation.model';
import {accountUnique} from './validator/account-unique.validator';
@Component({
selector: 'fims-customer-payroll-form',
templateUrl: './form.component.html'
})
export class CustomerPayrollFormComponent implements OnInit, OnChanges {
form: FormGroup;
@ViewChild('detailsStep') detailsStep: TdStepComponent;
@Input('productInstances') productInstances: ProductInstance[];
@Input('distribution') distribution: PayrollConfiguration;
@Output('onSave') onSave = new EventEmitter<PayrollConfiguration>();
@Output('onCancel') onCancel = new EventEmitter<void>();
constructor(private formBuilder: FormBuilder) {
this.form = this.formBuilder.group({
mainAccountNumber: ['', [Validators.required]],
payrollAllocations: this.initAllocations([])
}, { validator: accountUnique });
}
ngOnInit(): void {
this.detailsStep.open();
}
ngOnChanges(changes: SimpleChanges): void {
if (changes.distribution) {
this.form.reset({
mainAccountNumber: this.distribution.mainAccountNumber
});
this.distribution.payrollAllocations.forEach(allocation => this.addAllocation(allocation));
}
}
save(): void {
const distribution = Object.assign({}, this.distribution, {
mainAccountNumber: this.form.get('mainAccountNumber').value,
payrollAllocations: this.form.get('payrollAllocations').value
});
this.onSave.emit(distribution);
}
cancel(): void {
this.onCancel.emit();
}
private initAllocations(allocations: PayrollAllocation[]): FormArray {
const formControls: FormGroup[] = [];
allocations.forEach(allocation => formControls.push(this.initAllocation(allocation)));
return this.formBuilder.array(formControls);
}
private initAllocation(allocation?: PayrollAllocation): FormGroup {
return this.formBuilder.group({
accountNumber: [allocation ? allocation.accountNumber : '', [Validators.required]],
amount: [allocation ? allocation.amount : '', [
Validators.required,
FimsValidators.minValue(0.001),
FimsValidators.maxValue(9999999999.99999)]
],
proportional: [allocation ? allocation.proportional : false]
});
}
addAllocation(allocation?: PayrollAllocation): void {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
allocations.push(this.initAllocation(allocation));
}
removeAllocation(index: number): void {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
allocations.removeAt(index);
}
get | (): AbstractControl[] {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
return allocations.controls;
}
}
| allocations | identifier_name |
form.component.ts | /**
* Copyright 2017 The Mifos Initiative.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Component, EventEmitter, Input, OnChanges, OnInit, Output, SimpleChanges, ViewChild} from '@angular/core';
import {PayrollConfiguration} from '../../../../services/payroll/domain/payroll-configuration.model';
import {AbstractControl, FormArray, FormBuilder, FormGroup, Validators} from '@angular/forms';
import {TdStepComponent} from '@covalent/core';
import {FimsValidators} from '../../../../common/validator/validators';
import {ProductInstance} from '../../../../services/depositAccount/domain/instance/product-instance.model';
import {PayrollAllocation} from '../../../../services/payroll/domain/payroll-allocation.model';
import {accountUnique} from './validator/account-unique.validator';
@Component({
selector: 'fims-customer-payroll-form',
templateUrl: './form.component.html'
})
export class CustomerPayrollFormComponent implements OnInit, OnChanges {
form: FormGroup;
@ViewChild('detailsStep') detailsStep: TdStepComponent;
@Input('productInstances') productInstances: ProductInstance[];
@Input('distribution') distribution: PayrollConfiguration;
@Output('onSave') onSave = new EventEmitter<PayrollConfiguration>();
@Output('onCancel') onCancel = new EventEmitter<void>();
constructor(private formBuilder: FormBuilder) |
ngOnInit(): void {
this.detailsStep.open();
}
ngOnChanges(changes: SimpleChanges): void {
if (changes.distribution) {
this.form.reset({
mainAccountNumber: this.distribution.mainAccountNumber
});
this.distribution.payrollAllocations.forEach(allocation => this.addAllocation(allocation));
}
}
save(): void {
const distribution = Object.assign({}, this.distribution, {
mainAccountNumber: this.form.get('mainAccountNumber').value,
payrollAllocations: this.form.get('payrollAllocations').value
});
this.onSave.emit(distribution);
}
cancel(): void {
this.onCancel.emit();
}
private initAllocations(allocations: PayrollAllocation[]): FormArray {
const formControls: FormGroup[] = [];
allocations.forEach(allocation => formControls.push(this.initAllocation(allocation)));
return this.formBuilder.array(formControls);
}
private initAllocation(allocation?: PayrollAllocation): FormGroup {
return this.formBuilder.group({
accountNumber: [allocation ? allocation.accountNumber : '', [Validators.required]],
amount: [allocation ? allocation.amount : '', [
Validators.required,
FimsValidators.minValue(0.001),
FimsValidators.maxValue(9999999999.99999)]
],
proportional: [allocation ? allocation.proportional : false]
});
}
addAllocation(allocation?: PayrollAllocation): void {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
allocations.push(this.initAllocation(allocation));
}
removeAllocation(index: number): void {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
allocations.removeAt(index);
}
get allocations(): AbstractControl[] {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
return allocations.controls;
}
}
| {
this.form = this.formBuilder.group({
mainAccountNumber: ['', [Validators.required]],
payrollAllocations: this.initAllocations([])
}, { validator: accountUnique });
} | identifier_body |
form.component.ts | /**
* Copyright 2017 The Mifos Initiative.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Component, EventEmitter, Input, OnChanges, OnInit, Output, SimpleChanges, ViewChild} from '@angular/core';
import {PayrollConfiguration} from '../../../../services/payroll/domain/payroll-configuration.model';
import {AbstractControl, FormArray, FormBuilder, FormGroup, Validators} from '@angular/forms';
import {TdStepComponent} from '@covalent/core';
import {FimsValidators} from '../../../../common/validator/validators';
import {ProductInstance} from '../../../../services/depositAccount/domain/instance/product-instance.model';
import {PayrollAllocation} from '../../../../services/payroll/domain/payroll-allocation.model';
import {accountUnique} from './validator/account-unique.validator';
@Component({
selector: 'fims-customer-payroll-form',
templateUrl: './form.component.html'
})
export class CustomerPayrollFormComponent implements OnInit, OnChanges {
form: FormGroup;
@ViewChild('detailsStep') detailsStep: TdStepComponent;
@Input('productInstances') productInstances: ProductInstance[];
@Input('distribution') distribution: PayrollConfiguration;
@Output('onSave') onSave = new EventEmitter<PayrollConfiguration>();
@Output('onCancel') onCancel = new EventEmitter<void>();
constructor(private formBuilder: FormBuilder) {
this.form = this.formBuilder.group({
mainAccountNumber: ['', [Validators.required]],
payrollAllocations: this.initAllocations([])
}, { validator: accountUnique });
}
ngOnInit(): void {
this.detailsStep.open();
}
ngOnChanges(changes: SimpleChanges): void {
if (changes.distribution) |
}
save(): void {
const distribution = Object.assign({}, this.distribution, {
mainAccountNumber: this.form.get('mainAccountNumber').value,
payrollAllocations: this.form.get('payrollAllocations').value
});
this.onSave.emit(distribution);
}
cancel(): void {
this.onCancel.emit();
}
private initAllocations(allocations: PayrollAllocation[]): FormArray {
const formControls: FormGroup[] = [];
allocations.forEach(allocation => formControls.push(this.initAllocation(allocation)));
return this.formBuilder.array(formControls);
}
private initAllocation(allocation?: PayrollAllocation): FormGroup {
return this.formBuilder.group({
accountNumber: [allocation ? allocation.accountNumber : '', [Validators.required]],
amount: [allocation ? allocation.amount : '', [
Validators.required,
FimsValidators.minValue(0.001),
FimsValidators.maxValue(9999999999.99999)]
],
proportional: [allocation ? allocation.proportional : false]
});
}
addAllocation(allocation?: PayrollAllocation): void {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
allocations.push(this.initAllocation(allocation));
}
removeAllocation(index: number): void {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
allocations.removeAt(index);
}
get allocations(): AbstractControl[] {
const allocations: FormArray = this.form.get('payrollAllocations') as FormArray;
return allocations.controls;
}
}
| {
this.form.reset({
mainAccountNumber: this.distribution.mainAccountNumber
});
this.distribution.payrollAllocations.forEach(allocation => this.addAllocation(allocation));
} | conditional_block |
generator.py | writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Extracts OpenStack config option info from module(s)."""
from __future__ import print_function
import argparse
import imp
import os
import re
import socket
import sys
import textwrap
from oslo.config import cfg
import six
import stevedore.named
from marconi.openstack.common import gettextutils
from marconi.openstack.common import importutils
gettextutils.install('marconi')
STROPT = "StrOpt"
BOOLOPT = "BoolOpt"
INTOPT = "IntOpt"
FLOATOPT = "FloatOpt"
LISTOPT = "ListOpt"
DICTOPT = "DictOpt"
MULTISTROPT = "MultiStrOpt"
OPT_TYPES = {
STROPT: 'string value',
BOOLOPT: 'boolean value',
INTOPT: 'integer value',
FLOATOPT: 'floating point value',
LISTOPT: 'list value',
DICTOPT: 'dict value',
MULTISTROPT: 'multi valued',
}
OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT,
FLOATOPT, LISTOPT, DICTOPT,
MULTISTROPT]))
PY_EXT = ".py"
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
"../../../../"))
WORDWRAP_WIDTH = 60
def generate(argv):
parser = argparse.ArgumentParser(
description='generate sample configuration file',
)
parser.add_argument('-m', dest='modules', action='append')
parser.add_argument('-l', dest='libraries', action='append')
parser.add_argument('srcfiles', nargs='*')
parsed_args = parser.parse_args(argv)
mods_by_pkg = dict()
for filepath in parsed_args.srcfiles:
pkg_name = filepath.split(os.sep)[1]
mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]),
os.path.basename(filepath).split('.')[0]])
mods_by_pkg.setdefault(pkg_name, list()).append(mod_str)
# NOTE(lzyeval): place top level modules before packages
pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT))
ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names)
pkg_names.extend(ext_names)
# opts_by_group is a mapping of group name to an options list
# The options list is a list of (module, options) tuples
opts_by_group = {'DEFAULT': []}
if parsed_args.modules:
for module_name in parsed_args.modules:
module = _import_module(module_name)
if module:
for group, opts in _list_opts(module):
opts_by_group.setdefault(group, []).append((module_name,
opts))
# Look for entry points defined in libraries (or applications) for
# option discovery, and include their return values in the output.
#
# Each entry point should be a function returning an iterable
# of pairs with the group name (or None for the default group)
# and the list of Opt instances for that group.
if parsed_args.libraries:
loader = stevedore.named.NamedExtensionManager(
'oslo.config.opts',
names=list(set(parsed_args.libraries)),
invoke_on_load=False,
)
for ext in loader:
for group, opts in ext.plugin():
opt_list = opts_by_group.setdefault(group or 'DEFAULT', [])
opt_list.append((ext.name, opts))
for pkg_name in pkg_names:
mods = mods_by_pkg.get(pkg_name)
mods.sort()
for mod_str in mods:
if mod_str.endswith('.__init__'):
mod_str = mod_str[:mod_str.rfind(".")]
mod_obj = _import_module(mod_str)
if not mod_obj:
raise RuntimeError("Unable to import module %s" % mod_str)
for group, opts in _list_opts(mod_obj):
opts_by_group.setdefault(group, []).append((mod_str, opts))
print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', []))
for group in sorted(opts_by_group.keys()):
print_group_opts(group, opts_by_group[group])
def _import_module(mod_str):
try:
if mod_str.startswith('bin.'):
imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:]))
return sys.modules[mod_str[4:]]
else:
return importutils.import_module(mod_str)
except Exception as e:
sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e)))
return None
def _is_in_group(opt, group):
"Check if opt is in group."
for value in group._opts.values():
# NOTE(llu): Temporary workaround for bug #1262148, wait until
# newly released oslo.config support '==' operator.
if not(value['opt'] != opt):
return True
return False
def _guess_groups(opt, mod_obj):
# is it in the DEFAULT group?
if _is_in_group(opt, cfg.CONF):
return 'DEFAULT'
# what other groups is it in?
for value in cfg.CONF.values():
if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group):
return value._group.name
raise RuntimeError(
"Unable to find group for option %s, "
"maybe it's defined twice in the same group?"
% opt.name
)
def _list_opts(obj):
def is_opt(o):
return (isinstance(o, cfg.Opt) and
not isinstance(o, cfg.SubCommandOpt))
opts = list()
for attr_str in dir(obj):
attr_obj = getattr(obj, attr_str)
if is_opt(attr_obj):
opts.append(attr_obj)
elif (isinstance(attr_obj, list) and
all(map(lambda x: is_opt(x), attr_obj))):
opts.extend(attr_obj)
ret = {}
for opt in opts:
ret.setdefault(_guess_groups(opt, obj), []).append(opt)
return ret.items()
def print_group_opts(group, opts_by_module):
print("[%s]" % group)
print('')
for mod, opts in opts_by_module:
print('#')
print('# Options defined in %s' % mod)
print('#')
print('')
for opt in opts:
_print_opt(opt)
print('')
def _get_my_ip():
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return None
def _sanitize_default(name, value):
"""Set up a reasonably sensible default for pybasedir, my_ip and host."""
if value.startswith(sys.prefix):
# NOTE(jd) Don't use os.path.join, because it is likely to think the
# second part is an absolute pathname and therefore drop the first
# part.
value = os.path.normpath("/usr/" + value[len(sys.prefix):])
elif value.startswith(BASEDIR):
return value.replace(BASEDIR, '/usr/lib/python/site-packages')
elif BASEDIR in value:
return value.replace(BASEDIR, '')
elif value == _get_my_ip():
return '10.0.0.1'
elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name:
return 'marconi'
elif value.strip() != value:
return '"%s"' % value
return value
def _print_opt(opt):
| deprecated_opt.name))
try:
if opt_default is None:
print('#%s=<None>' % opt_name)
elif opt_type == STROPT:
assert(isinstance(opt_default, six.string_types))
print('#%s=%s' % (opt_name, _sanitize_default(opt_name,
opt_default)))
elif opt_type == BOOLOPT:
assert(isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, str(opt_default).lower()))
elif opt_type == INTOPT:
assert(isinstance(opt_default, int) and
not isinstance(opt_default, bool))
print('#%s=%s' % ( | opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help
if not opt_help:
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
opt_help = ""
opt_type = None
try:
opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
except (ValueError, AttributeError) as err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
opt_help = u'%s (%s)' % (opt_help,
OPT_TYPES[opt_type])
print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH)))
if opt.deprecated_opts:
for deprecated_opt in opt.deprecated_opts:
if deprecated_opt.name:
deprecated_group = (deprecated_opt.group if
deprecated_opt.group else "DEFAULT")
print('# Deprecated group/name - [%s]/%s' %
(deprecated_group, | identifier_body |
generator.py | writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Extracts OpenStack config option info from module(s)."""
from __future__ import print_function
import argparse
import imp
import os
import re
import socket
import sys
import textwrap
from oslo.config import cfg
import six
import stevedore.named
from marconi.openstack.common import gettextutils
from marconi.openstack.common import importutils
gettextutils.install('marconi')
STROPT = "StrOpt"
BOOLOPT = "BoolOpt"
INTOPT = "IntOpt"
FLOATOPT = "FloatOpt"
LISTOPT = "ListOpt"
DICTOPT = "DictOpt"
MULTISTROPT = "MultiStrOpt"
OPT_TYPES = {
STROPT: 'string value',
BOOLOPT: 'boolean value',
INTOPT: 'integer value',
FLOATOPT: 'floating point value',
LISTOPT: 'list value',
DICTOPT: 'dict value',
MULTISTROPT: 'multi valued',
}
OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT,
FLOATOPT, LISTOPT, DICTOPT,
MULTISTROPT]))
PY_EXT = ".py"
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
"../../../../"))
WORDWRAP_WIDTH = 60
| def generate(argv):
parser = argparse.ArgumentParser(
description='generate sample configuration file',
)
parser.add_argument('-m', dest='modules', action='append')
parser.add_argument('-l', dest='libraries', action='append')
parser.add_argument('srcfiles', nargs='*')
parsed_args = parser.parse_args(argv)
mods_by_pkg = dict()
for filepath in parsed_args.srcfiles:
pkg_name = filepath.split(os.sep)[1]
mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]),
os.path.basename(filepath).split('.')[0]])
mods_by_pkg.setdefault(pkg_name, list()).append(mod_str)
# NOTE(lzyeval): place top level modules before packages
pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT))
ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names)
pkg_names.extend(ext_names)
# opts_by_group is a mapping of group name to an options list
# The options list is a list of (module, options) tuples
opts_by_group = {'DEFAULT': []}
if parsed_args.modules:
for module_name in parsed_args.modules:
module = _import_module(module_name)
if module:
for group, opts in _list_opts(module):
opts_by_group.setdefault(group, []).append((module_name,
opts))
# Look for entry points defined in libraries (or applications) for
# option discovery, and include their return values in the output.
#
# Each entry point should be a function returning an iterable
# of pairs with the group name (or None for the default group)
# and the list of Opt instances for that group.
if parsed_args.libraries:
loader = stevedore.named.NamedExtensionManager(
'oslo.config.opts',
names=list(set(parsed_args.libraries)),
invoke_on_load=False,
)
for ext in loader:
for group, opts in ext.plugin():
opt_list = opts_by_group.setdefault(group or 'DEFAULT', [])
opt_list.append((ext.name, opts))
for pkg_name in pkg_names:
mods = mods_by_pkg.get(pkg_name)
mods.sort()
for mod_str in mods:
if mod_str.endswith('.__init__'):
mod_str = mod_str[:mod_str.rfind(".")]
mod_obj = _import_module(mod_str)
if not mod_obj:
raise RuntimeError("Unable to import module %s" % mod_str)
for group, opts in _list_opts(mod_obj):
opts_by_group.setdefault(group, []).append((mod_str, opts))
print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', []))
for group in sorted(opts_by_group.keys()):
print_group_opts(group, opts_by_group[group])
def _import_module(mod_str):
try:
if mod_str.startswith('bin.'):
imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:]))
return sys.modules[mod_str[4:]]
else:
return importutils.import_module(mod_str)
except Exception as e:
sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e)))
return None
def _is_in_group(opt, group):
"Check if opt is in group."
for value in group._opts.values():
# NOTE(llu): Temporary workaround for bug #1262148, wait until
# newly released oslo.config support '==' operator.
if not(value['opt'] != opt):
return True
return False
def _guess_groups(opt, mod_obj):
# is it in the DEFAULT group?
if _is_in_group(opt, cfg.CONF):
return 'DEFAULT'
# what other groups is it in?
for value in cfg.CONF.values():
if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group):
return value._group.name
raise RuntimeError(
"Unable to find group for option %s, "
"maybe it's defined twice in the same group?"
% opt.name
)
def _list_opts(obj):
def is_opt(o):
return (isinstance(o, cfg.Opt) and
not isinstance(o, cfg.SubCommandOpt))
opts = list()
for attr_str in dir(obj):
attr_obj = getattr(obj, attr_str)
if is_opt(attr_obj):
opts.append(attr_obj)
elif (isinstance(attr_obj, list) and
all(map(lambda x: is_opt(x), attr_obj))):
opts.extend(attr_obj)
ret = {}
for opt in opts:
ret.setdefault(_guess_groups(opt, obj), []).append(opt)
return ret.items()
def print_group_opts(group, opts_by_module):
print("[%s]" % group)
print('')
for mod, opts in opts_by_module:
print('#')
print('# Options defined in %s' % mod)
print('#')
print('')
for opt in opts:
_print_opt(opt)
print('')
def _get_my_ip():
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return None
def _sanitize_default(name, value):
"""Set up a reasonably sensible default for pybasedir, my_ip and host."""
if value.startswith(sys.prefix):
# NOTE(jd) Don't use os.path.join, because it is likely to think the
# second part is an absolute pathname and therefore drop the first
# part.
value = os.path.normpath("/usr/" + value[len(sys.prefix):])
elif value.startswith(BASEDIR):
return value.replace(BASEDIR, '/usr/lib/python/site-packages')
elif BASEDIR in value:
return value.replace(BASEDIR, '')
elif value == _get_my_ip():
return '10.0.0.1'
elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name:
return 'marconi'
elif value.strip() != value:
return '"%s"' % value
return value
def _print_opt(opt):
opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help
if not opt_help:
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
opt_help = ""
opt_type = None
try:
opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
except (ValueError, AttributeError) as err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
opt_help = u'%s (%s)' % (opt_help,
OPT_TYPES[opt_type])
print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH)))
if opt.deprecated_opts:
for deprecated_opt in opt.deprecated_opts:
if deprecated_opt.name:
deprecated_group = (deprecated_opt.group if
deprecated_opt.group else "DEFAULT")
print('# Deprecated group/name - [%s]/%s' %
(deprecated_group,
deprecated_opt.name))
try:
if opt_default is None:
print('#%s=<None>' % opt_name)
elif opt_type == STROPT:
assert(isinstance(opt_default, six.string_types))
print('#%s=%s' % (opt_name, _sanitize_default(opt_name,
opt_default)))
elif opt_type == BOOLOPT:
assert(isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, str(opt_default).lower()))
elif opt_type == INTOPT:
assert(isinstance(opt_default, int) and
not isinstance(opt_default, bool))
print('#%s=%s' % ( | random_line_split |
|
generator.py | writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Extracts OpenStack config option info from module(s)."""
from __future__ import print_function
import argparse
import imp
import os
import re
import socket
import sys
import textwrap
from oslo.config import cfg
import six
import stevedore.named
from marconi.openstack.common import gettextutils
from marconi.openstack.common import importutils
gettextutils.install('marconi')
STROPT = "StrOpt"
BOOLOPT = "BoolOpt"
INTOPT = "IntOpt"
FLOATOPT = "FloatOpt"
LISTOPT = "ListOpt"
DICTOPT = "DictOpt"
MULTISTROPT = "MultiStrOpt"
OPT_TYPES = {
STROPT: 'string value',
BOOLOPT: 'boolean value',
INTOPT: 'integer value',
FLOATOPT: 'floating point value',
LISTOPT: 'list value',
DICTOPT: 'dict value',
MULTISTROPT: 'multi valued',
}
OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT,
FLOATOPT, LISTOPT, DICTOPT,
MULTISTROPT]))
PY_EXT = ".py"
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
"../../../../"))
WORDWRAP_WIDTH = 60
def generate(argv):
parser = argparse.ArgumentParser(
description='generate sample configuration file',
)
parser.add_argument('-m', dest='modules', action='append')
parser.add_argument('-l', dest='libraries', action='append')
parser.add_argument('srcfiles', nargs='*')
parsed_args = parser.parse_args(argv)
mods_by_pkg = dict()
for filepath in parsed_args.srcfiles:
pkg_name = filepath.split(os.sep)[1]
mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]),
os.path.basename(filepath).split('.')[0]])
mods_by_pkg.setdefault(pkg_name, list()).append(mod_str)
# NOTE(lzyeval): place top level modules before packages
pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT))
ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names)
pkg_names.extend(ext_names)
# opts_by_group is a mapping of group name to an options list
# The options list is a list of (module, options) tuples
opts_by_group = {'DEFAULT': []}
if parsed_args.modules:
for module_name in parsed_args.modules:
module = _import_module(module_name)
if module:
for group, opts in _list_opts(module):
opts_by_group.setdefault(group, []).append((module_name,
opts))
# Look for entry points defined in libraries (or applications) for
# option discovery, and include their return values in the output.
#
# Each entry point should be a function returning an iterable
# of pairs with the group name (or None for the default group)
# and the list of Opt instances for that group.
if parsed_args.libraries:
loader = stevedore.named.NamedExtensionManager(
'oslo.config.opts',
names=list(set(parsed_args.libraries)),
invoke_on_load=False,
)
for ext in loader:
for group, opts in ext.plugin():
opt_list = opts_by_group.setdefault(group or 'DEFAULT', [])
opt_list.append((ext.name, opts))
for pkg_name in pkg_names:
mods = mods_by_pkg.get(pkg_name)
mods.sort()
for mod_str in mods:
if mod_str.endswith('.__init__'):
mod_str = mod_str[:mod_str.rfind(".")]
mod_obj = _import_module(mod_str)
if not mod_obj:
raise RuntimeError("Unable to import module %s" % mod_str)
for group, opts in _list_opts(mod_obj):
opts_by_group.setdefault(group, []).append((mod_str, opts))
print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', []))
for group in sorted(opts_by_group.keys()):
print_group_opts(group, opts_by_group[group])
def _import_module(mod_str):
try:
if mod_str.startswith('bin.'):
imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:]))
return sys.modules[mod_str[4:]]
else:
return importutils.import_module(mod_str)
except Exception as e:
sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e)))
return None
def _is_in_group(opt, group):
"Check if opt is in group."
for value in group._opts.values():
# NOTE(llu): Temporary workaround for bug #1262148, wait until
# newly released oslo.config support '==' operator.
if not(value['opt'] != opt):
return True
return False
def _guess_groups(opt, mod_obj):
# is it in the DEFAULT group?
if _is_in_group(opt, cfg.CONF):
return 'DEFAULT'
# what other groups is it in?
for value in cfg.CONF.values():
if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group):
return value._group.name
raise RuntimeError(
"Unable to find group for option %s, "
"maybe it's defined twice in the same group?"
% opt.name
)
def _list_opts(obj):
def is_opt(o):
return (isinstance(o, cfg.Opt) and
not isinstance(o, cfg.SubCommandOpt))
opts = list()
for attr_str in dir(obj):
attr_obj = getattr(obj, attr_str)
if is_opt(attr_obj):
opts.append(attr_obj)
elif (isinstance(attr_obj, list) and
all(map(lambda x: is_opt(x), attr_obj))):
opts.extend(attr_obj)
ret = {}
for opt in opts:
ret.setdefault(_guess_groups(opt, obj), []).append(opt)
return ret.items()
def print_group_opts(group, opts_by_module):
print("[%s]" % group)
print('')
for mod, opts in opts_by_module:
print('#')
print('# Options defined in %s' % mod)
print('#')
print('')
for opt in opts:
_print_opt(opt)
print('')
def | ():
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return None
def _sanitize_default(name, value):
"""Set up a reasonably sensible default for pybasedir, my_ip and host."""
if value.startswith(sys.prefix):
# NOTE(jd) Don't use os.path.join, because it is likely to think the
# second part is an absolute pathname and therefore drop the first
# part.
value = os.path.normpath("/usr/" + value[len(sys.prefix):])
elif value.startswith(BASEDIR):
return value.replace(BASEDIR, '/usr/lib/python/site-packages')
elif BASEDIR in value:
return value.replace(BASEDIR, '')
elif value == _get_my_ip():
return '10.0.0.1'
elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name:
return 'marconi'
elif value.strip() != value:
return '"%s"' % value
return value
def _print_opt(opt):
opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help
if not opt_help:
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
opt_help = ""
opt_type = None
try:
opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
except (ValueError, AttributeError) as err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
opt_help = u'%s (%s)' % (opt_help,
OPT_TYPES[opt_type])
print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH)))
if opt.deprecated_opts:
for deprecated_opt in opt.deprecated_opts:
if deprecated_opt.name:
deprecated_group = (deprecated_opt.group if
deprecated_opt.group else "DEFAULT")
print('# Deprecated group/name - [%s]/%s' %
(deprecated_group,
deprecated_opt.name))
try:
if opt_default is None:
print('#%s=<None>' % opt_name)
elif opt_type == STROPT:
assert(isinstance(opt_default, six.string_types))
print('#%s=%s' % (opt_name, _sanitize_default(opt_name,
opt_default)))
elif opt_type == BOOLOPT:
assert(isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, str(opt_default).lower()))
elif opt_type == INTOPT:
assert(isinstance(opt_default, int) and
not isinstance(opt_default, bool))
print('#%s=%s' % ( | _get_my_ip | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.