file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
12.1k
suffix
large_stringlengths
0
12k
middle
large_stringlengths
0
7.51k
fim_type
large_stringclasses
4 values
attr.rs
/// SwiftBar only: separate color for dark system theme. If `None`, use `light`. pub(crate) dark: Option<css_color_parser::Color>, } impl From<css_color_parser::Color> for Color { fn from(light: css_color_parser::Color) -> Color { Color { light, dark: None } } } impl FromStr for Color { type Err = ColorParseError; fn from_str(s: &str) -> Result<Color, ColorParseError> { Ok(Color { light: s.parse()?, dark: None, }) } } impl<'a> TryFrom<&'a str> for Color { type Error = ColorParseError; fn try_from(s: &str) -> Result<Color, ColorParseError> { s.parse() } } #[cfg(feature = "css-colors")] macro_rules! css_color_try_into_color { ($t:ty) => { #[cfg_attr(docsrs, doc(cfg(feature = "css-colors")))] impl TryFrom<$t> for Color { type Error = ColorParseError; fn try_from(color: $t) -> Result<Color, ColorParseError> { Ok(Color { light: color.to_string().parse()?, dark: None, }) } } }; } #[cfg(feature = "css-colors")] css_color_try_into_color!(css_colors::RGB); #[cfg(feature = "css-colors")] css_color_try_into_color!(css_colors::RGBA); #[cfg(feature = "css-colors")] css_color_try_into_color!(css_colors::HSL); #[cfg(feature = "css-colors")] css_color_try_into_color!(css_colors::HSLA); #[cfg(feature = "serenity")] #[cfg_attr(docsrs, doc(cfg(feature = "serenity")))] impl From<serenity::utils::Colour> for Color { fn from(c: serenity::utils::Colour) -> Color { Color { light: css_color_parser::Color { r: c.r(), g: c.g(), b: c.b(), a: 1.0, }, dark: None, } } } impl fmt::Display for Color { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "#{:02x}{:02x}{:02x}", self.light.r, self.light.g, self.light.b)?; if let Some(dark) = self.dark { write!(f, ",#{:02x}{:02x}{:02x}", dark.r, dark.g, dark.b)?; } Ok(()) } } #[derive(Debug)] /// A menu item's alternate mode or submenu. pub enum Extra { /// A menu item's alternate mode, shown when <key>⌥</key> is held. Alternate(Box<ContentItem>), //TODO make sure alts don't have submenus /// A submenu. Submenu(Menu), } /// Used by [`ContentItem::href`](ContentItem::href()). pub trait IntoUrl { /// Converts `self` into a [`Url`]. fn into_url(self) -> Result<Url, url::ParseError>; } impl IntoUrl for Url { fn into_url(self) -> Result<Url, url::ParseError> { Ok(self) } } impl IntoUrl for String { fn into_url(self) -> Result<Url, url::ParseError> { Url::parse(&self) } } impl<'a> IntoUrl for &'a str { fn into_url(self) -> Result<Url, url::ParseError> { Url::parse(self) } } #[cfg(feature = "url1")] #[cfg_attr(docsrs, doc(cfg(feature = "url1")))] impl IntoUrl for Url1 { fn into_url(self) -> Result<Url, url::ParseError> { Url::parse(self.as_str()) } } /// BitBar only supports up to five parameters for `bash=` commands (see <https://github.com/matryer/bitbar/issues/490>). #[derive(Debug)] pub struct Params { pub(crate) cmd: String, pub(crate) params: Vec<String>, } impl Params { #[doc(hidden)] // used in proc macro pub fn new(cmd: String, params: Vec<String>) -> Self { Self { cmd, params } } } macro_rules! params_from { ($n:literal$(, $elt:ident: $t:ident)*) => { impl<T: ToString> From<[T; $n]> for Params { fn from([cmd, $($elt),*]: [T; $n]) -> Params { Params { cmd: cmd.to_string(), params: vec![$($elt.to_string()),*], } } } impl<Cmd: ToString, $($t: ToString),*> From<(Cmd, $($t),*)> for Params { fn from((cmd, $($elt),*): (Cmd, $($t),*)) -> Params { Params { cmd: cmd.to_string(), params: vec![$($elt.to_string()),*], } } } }; } params_from!(1); params_from!(2, param1: A); params_from!(3, param1: A, param2: B); params_from!(4, param1: A, param2: B, param3: C); params_from!(5, param1: A, param2: B, param3: C, param4: D); params_from!(6, param1: A, param2: B, param3: C, param4: D, param5: E); impl<'a, T: ToString> TryFrom<&'a [T]> for Params { type Error = &'a [T]; fn try_from(slice: &[T]) -> Result<Params, &[T]> { match slice { [cmd] => Ok(Params { cmd: cmd.to_string(), params: Vec::default() }), [cmd, param1] => Ok(Params { cmd: cmd.to_string(), params: vec![param1.to_string()] }), [cmd, param1, param2] => Ok(Params { cmd: cmd.to_string(), params: vec![param1.to_string(), param2.to_string()] }), [cmd, param1, param2, param3] => Ok(Params { cmd: cmd.to_string(), params: vec![param1.to_string(), param2.to_string(), param3.to_string()] }), [cmd, param1, param2, param3, param4] => Ok(Params { cmd: cmd.to_string(), params: vec![param1.to_string(), param2.to_string(), param3.to_string(), param4.to_string()] }), [cmd, param1, param2, param3, param4, param5] => Ok(Params { cmd: cmd.to_string(), params: vec![param1.to_string(), param2.to_string(), param3.to_string(), param4.to_string(), param5.to_string()] }), slice => Err(slice), } } } impl<T: ToString> TryFrom<Vec<T>> for Params { type Error = Vec<T>; fn try_from(mut v: Vec<T>) -> Result<Params, Vec<T>> { match v.len() { 1..=6 => Ok(Params { cmd: v.remove(0).to_string(), params: v.into_iter().map(|x| x.to_string()).collect(), }), _ => Err(v), } } } /// Used by [`ContentItem::command`](ContentItem::command()). /// /// A `Command` contains the [`Params`], which includes the actual command (called `bash=` by BitBar) and its parameters, and the value of `terminal=`. /// /// It is usually constructed via conversion, unless `terminal=true` is required. /// /// **Note:** Unlike BitBar's default of `true`, `Command` assumes a default of `terminal=false`. #[derive(Debug)] pub struct Command { pub(crate) params: Params, pub(crate) terminal: bool, } impl Command { /// Creates a `Command` with the `terminal=` value set to `true`. pub fn terminal(args: impl Into<Params>) -> Command { Command { params: args.into(), terminal: true, } } /// Attempts to construct a `Command` with `terminal=` set to `false` from the given arguments. /// /// This is not a `TryFrom` implementation due to a limitation in Rust. pub fn try_from<P: TryInto<Params>>(args: P) -> Result<Command, P::Error> { Ok(Command { params: args.try_into()?, terminal: false, }) } /// Same as `Command::terminal` but for types that might not convert to `Params`. pub fn try_terminal<P: TryInto<Params>>(args: P) -> Result<Command, P::Error> { Ok(Command { params: args.try_into()?, terminal: true, }) } } /// Converts an array containing a command string and 0–5 parameters to a command argument vector. The `terminal=` value will be `false`. impl<P: Into<Params>> From<P> for Command { fn from
s: P) -> Command { Command { params: args.into(), terminal: false, } } } /// Used by `ContentItem::image` and `ContentItem::template_image`. #[derive
(arg
identifier_name
attr.rs
/// SwiftBar only: separate color for dark system theme. If `None`, use `light`. pub(crate) dark: Option<css_color_parser::Color>, } impl From<css_color_parser::Color> for Color { fn from(light: css_color_parser::Color) -> Color { Color { light, dark: None } } } impl FromStr for Color { type Err = ColorParseError; fn from_str(s: &str) -> Result<Color, ColorParseError> { Ok(Color { light: s.parse()?, dark: None, }) } } impl<'a> TryFrom<&'a str> for Color { type Error = ColorParseError; fn try_from(s: &str) -> Result<Color, ColorParseError> { s.parse() } } #[cfg(feature = "css-colors")] macro_rules! css_color_try_into_color { ($t:ty) => { #[cfg_attr(docsrs, doc(cfg(feature = "css-colors")))] impl TryFrom<$t> for Color { type Error = ColorParseError; fn try_from(color: $t) -> Result<Color, ColorParseError> { Ok(Color { light: color.to_string().parse()?, dark: None, }) } } }; } #[cfg(feature = "css-colors")] css_color_try_into_color!(css_colors::RGB); #[cfg(feature = "css-colors")] css_color_try_into_color!(css_colors::RGBA); #[cfg(feature = "css-colors")] css_color_try_into_color!(css_colors::HSL); #[cfg(feature = "css-colors")] css_color_try_into_color!(css_colors::HSLA); #[cfg(feature = "serenity")] #[cfg_attr(docsrs, doc(cfg(feature = "serenity")))] impl From<serenity::utils::Colour> for Color { fn from(c: serenity::utils::Colour) -> Color { Color { light: css_color_parser::Color { r: c.r(), g: c.g(), b: c.b(), a: 1.0, }, dark: None, } } } impl fmt::Display for Color { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "#{:02x}{:02x}{:02x}", self.light.r, self.light.g, self.light.b)?; if let Some(dark) = self.dark { write!(f, ",#{:02x}{:02x}{:02x}", dark.r, dark.g, dark.b)?; } Ok(()) } } #[derive(Debug)] /// A menu item's alternate mode or submenu. pub enum Extra { /// A menu item's alternate mode, shown when <key>⌥</key> is held. Alternate(Box<ContentItem>), //TODO make sure alts don't have submenus /// A submenu. Submenu(Menu), } /// Used by [`ContentItem::href`](ContentItem::href()). pub trait IntoUrl { /// Converts `self` into a [`Url`]. fn into_url(self) -> Result<Url, url::ParseError>; } impl IntoUrl for Url { fn into_url(self) -> Result<Url, url::ParseError> { Ok(self) } } impl IntoUrl for String { fn into_url(self) -> Result<Url, url::ParseError> { Url::parse(&self) } } impl<'a> IntoUrl for &'a str { fn into_url(self) -> Result<Url, url::ParseError> { Url::parse(self) } } #[cfg(feature = "url1")] #[cfg_attr(docsrs, doc(cfg(feature = "url1")))] impl IntoUrl for Url1 { fn into_url(self) -> Result<Url, url::ParseError> { Url::parse(self.as_str()) } } /// BitBar only supports up to five parameters for `bash=` commands (see <https://github.com/matryer/bitbar/issues/490>). #[derive(Debug)] pub struct Params { pub(crate) cmd: String, pub(crate) params: Vec<String>, } impl Params { #[doc(hidden)] // used in proc macro pub fn new(cmd: String, params: Vec<String>) -> Self { Self { cmd, params } } } macro_rules! params_from { ($n:literal$(, $elt:ident: $t:ident)*) => { impl<T: ToString> From<[T; $n]> for Params { fn from([cmd, $($elt),*]: [T; $n]) -> Params { Params { cmd: cmd.to_string(), params: vec![$($elt.to_string()),*], } } } impl<Cmd: ToString, $($t: ToString),*> From<(Cmd, $($t),*)> for Params { fn from((cmd, $($elt),*): (Cmd, $($t),*)) -> Params { Params { cmd: cmd.to_string(), params: vec![$($elt.to_string()),*], } } } }; } params_from!(1); params_from!(2, param1: A); params_from!(3, param1: A, param2: B); params_from!(4, param1: A, param2: B, param3: C); params_from!(5, param1: A, param2: B, param3: C, param4: D); params_from!(6, param1: A, param2: B, param3: C, param4: D, param5: E); impl<'a, T: ToString> TryFrom<&'a [T]> for Params { type Error = &'a [T]; fn try_from(slice: &[T]) -> Result<Params, &[T]> { match slice { [cmd] => Ok(Params { cmd: cmd.to_string(), params: Vec::default() }), [cmd, param1] => Ok(Params { cmd: cmd.to_string(), params: vec![param1.to_string()] }), [cmd, param1, param2] => Ok(Params { cmd: cmd.to_string(), params: vec![param1.to_string(), param2.to_string()] }), [cmd, param1, param2, param3] => Ok(Params { cmd: cmd.to_string(), params: vec![param1.to_string(), param2.to_string(), param3.to_string()] }), [cmd, param1, param2, param3, param4] => Ok(Params { cmd: cmd.to_string(), params: vec![param1.to_string(), param2.to_string(), param3.to_string(), param4.to_string()] }), [cmd, param1, param2, param3, param4, param5] => Ok(Params { cmd: cmd.to_string(), params: vec![param1.to_string(), param2.to_string(), param3.to_string(), param4.to_string(), param5.to_string()] }), slice => Err(slice), }
type Error = Vec<T>; fn try_from(mut v: Vec<T>) -> Result<Params, Vec<T>> { match v.len() { 1..=6 => Ok(Params { cmd: v.remove(0).to_string(), params: v.into_iter().map(|x| x.to_string()).collect(), }), _ => Err(v), } } } /// Used by [`ContentItem::command`](ContentItem::command()). /// /// A `Command` contains the [`Params`], which includes the actual command (called `bash=` by BitBar) and its parameters, and the value of `terminal=`. /// /// It is usually constructed via conversion, unless `terminal=true` is required. /// /// **Note:** Unlike BitBar's default of `true`, `Command` assumes a default of `terminal=false`. #[derive(Debug)] pub struct Command { pub(crate) params: Params, pub(crate) terminal: bool, } impl Command { /// Creates a `Command` with the `terminal=` value set to `true`. pub fn terminal(args: impl Into<Params>) -> Command { Command { params: args.into(), terminal: true, } } /// Attempts to construct a `Command` with `terminal=` set to `false` from the given arguments. /// /// This is not a `TryFrom` implementation due to a limitation in Rust. pub fn try_from<P: TryInto<Params>>(args: P) -> Result<Command, P::Error> { Ok(Command { params: args.try_into()?, terminal: false, }) } /// Same as `Command::terminal` but for types that might not convert to `Params`. pub fn try_terminal<P: TryInto<Params>>(args: P) -> Result<Command, P::Error> { Ok(Command { params: args.try_into()?, terminal: true, }) } } /// Converts an array containing a command string and 0–5 parameters to a command argument vector. The `terminal=` value will be `false`. impl<P: Into<Params>> From<P> for Command { fn from(args: P) -> Command { Command { params: args.into(), terminal: false, } } } /// Used by `ContentItem::image` and `ContentItem::template_image`. #[derive(Debug
} } impl<T: ToString> TryFrom<Vec<T>> for Params {
random_line_split
smc_samplers.py
indexing:: obj[array([3, 5, 10, 10])] # returns a new instance that contains particles 3, 5 and 10 (twice) """ shared = [] # put here the name of shared attributes def __init__(self, **kwargs): for k, v in kwargs.items(): self.__dict__[k] = v self.containers = [k for k in kwargs if k not in self.shared] if 'theta' in kwargs: self.arr = as_2d_array(self.theta) self.N, self.dim = self.arr.shape def __getitem__(self, key): attrs = {k: self.__dict__[k][key] for k in self.containers} if isinstance(key, int): return attrs else: attrs.update({k: cp.deepcopy(self.__dict__[k]) for k in self.shared}) return self.__class__(**attrs) def __setitem__(self, key, value): for k in self.containers: self.__dict__[k][key] = value.__dict__[k] def copy(self): """Returns a copy of the object.""" attrs = {k: self.__dict__[k].copy() for k in self.containers} attrs.update({k: cp.deepcopy(self.__dict__[k]) for k in self.shared}) return self.__class__(**attrs) def copyto(self, src, where=None): """Emulates function `copyto` in NumPy. Parameters ---------- where: (N,) bool ndarray True if particle n in src must be copied. src: (N,) `ThetaParticles` object source for each n such that where[n] is True, copy particle n in src into self (at location n) """ for k in self.containers: v = self.__dict__[k] if isinstance(v, np.ndarray): np.copyto(v, src.__dict__[k], where=where) else: v.copyto(src.__dict__[k], where=where) def copyto_at(self, n, src, m): """Copy to at a given location. Parameters ---------- n: int index where to copy src: `ThetaParticles` object source m: int index of the element to be copied Note ---- Basically, does self[n] <- src[m] """ for k in self.containers: self.__dict__[k][n] = src.__dict__[k][m] class MetroParticles(ThetaParticles): """Particles that may be moved through a Metropolis step. The following attributes are required: * `theta`: a (N,) record array; the parameter values * `lpost`: a (N,) float array; log-posterior density at the parameter values An instance has the following shared attribute: * acc_rates: list; acceptance rates of the previous Metropolis steps This class implements generic methods to move all the particle according to a Metropolis step. """ shared = ['acc_rates'] def __init__(self, theta=None, lpost=None, acc_rates=None, **extra_kwargs): ThetaParticles.__init__(self, theta=theta, lpost=lpost, **extra_kwargs) self.acc_rates = [] if acc_rates is None else acc_rates def mcmc_iterate(self, nsteps, xstart, xend, delta_dist): if nsteps == 0: prev_dist = 0. yield while True: mean_dist = np.mean(np.sqrt(np.sum((xend - xstart)**2, axis=1))) if np.abs(mean_dist - prev_dist) < delta_dist * prev_dist: break prev_dist = mean_dist yield else: for _ in range(nsteps): y
class RandomWalkProposal(object): def __init__(self, x, scale=None, adaptive=True): if adaptive: if scale is None: scale = 2.38 / np.sqrt(x.shape[1]) cov = np.cov(x.T) try: self.L = scale * cholesky(cov, lower=True) except LinAlgError: self.L = scale * np.diag(np.sqrt(np.diag(cov))) print('Warning: could not compute Cholesky decomposition, using diag matrix instead') else: if scale is None: scale = 1. self.L = scale * np.eye(x.shape[1]) def step(self, x): y = x + np.dot(stats.norm.rvs(size=x.shape), self.L.T) return y, 0. class IndependentProposal(object): def __init__(self, x, scale=1.1): self.L = scale * cholesky(np.cov(x.T), lower=True) self.mu = np.mean(x, axis=0) def step(self, x): z = stats.norm.rvs(size=x.shape) y = self.mu + np.dot(z, self.L.T) zx = solve_triangular(self.L, np.transpose(x - self.mu), lower=True) delta_lp = (0.5 * np.sum(z * z, axis=1) - 0.5 * np.sum(zx * zx, axis=0)) return y, delta_lp def choose_proposal(self, type_prop='random walk', rw_scale=None, adaptive=True, indep_scale=1.1): if type_prop == 'random walk': return MetroParticles.RandomWalkProposal(self.arr, scale=rw_scale, adaptive=adaptive) if type_prop == 'independent': return MetroParticles.IndependentProposal(self.arr, scale=indep_scale) raise ValueError('Unknown type for Metropolis proposal') def Metropolis(self, compute_target, mh_options): """Performs a certain number of Metropolis steps. Parameters ---------- compute_target: function computes the target density for the proposed values mh_options: dict + 'type_prop': {'random walk', 'independent'} type of proposal: either Gaussian random walk, or independent Gaussian + 'adaptive': bool If True, the covariance matrix of the random walk proposal is set to a `rw_scale` times the weighted cov matrix of the particle sample (ignored if proposal is independent) + 'rw_scale': float (default=None) see above (ignored if proposal is independent) + 'indep_scale': float (default=1.1) for an independent proposal, the proposal distribution is Gaussian with mean set to the particle mean, cov set to `indep_scale` times particle covariance + 'nsteps': int (default: 0) number of steps; if 0, the number of steps is chosen adaptively as follows: we stop when the average distance between the starting points and the stopping points increase less than a certain fraction + 'delta_dist': float (default: 0.1) threshold for when nsteps = 0 """ opts = mh_options.copy() nsteps = opts.pop('nsteps', 0) delta_dist = opts.pop('delta_dist', 0.1) proposal = self.choose_proposal(**opts) xout = self.copy() xp = self.__class__(theta=np.empty_like(self.theta)) step_ars = [] for _ in self.mcmc_iterate(nsteps, self.arr, xout.arr, delta_dist): xp.arr[:, :], delta_lp = proposal.step(xout.arr) compute_target(xp) lp_acc = xp.lpost - xout.lpost + delta_lp accept = (np.log(stats.uniform.rvs(size=self.N)) < lp_acc) xout.copyto(xp, where=accept) step_ars.append(np.mean(accept)) xout.acc_rates = self.acc_rates + [step_ars] return xout ############################# # Basic importance sampler class ImportanceSampler(object): """Importance sampler. Basic implementation of importance sampling, with the same interface as SMC samplers. Parameters ---------- model: `StaticModel` object The static model that defines the target posterior distribution(s) proposal: `StructDist` object the proposal distribution (if None, proposal is set to the prior) """ def __init__(self, model=None, proposal=None): self.proposal = model.prior if proposal is None else proposal self.model = model def run(self, N=100): """ Parameter --------- N: int number of particles Returns ------- wgts: Weights object The importance weights (with attributes lw, W, and ESS) X: ThetaParticles object The N particles (with attributes theta, logpost) norm_cst: float Estimate of the normalising constant of the target """ th = self.proposal.rvs(size=N) self.X = ThetaParticles(theta=th, lpost=None) self.X.lpost = self.model.logpost(th) lw = self.X.lpost - self.proposal.logpdf(th) self.wgts = rs.Weights(lw=lw) self.norm_cst = rs.log_mean_exp
ield
conditional_block
smc_samplers.py
fancy indexing:: obj[array([3, 5, 10, 10])] # returns a new instance that contains particles 3, 5 and 10 (twice) """ shared = [] # put here the name of shared attributes def __init__(self, **kwargs): for k, v in kwargs.items(): self.__dict__[k] = v self.containers = [k for k in kwargs if k not in self.shared] if 'theta' in kwargs: self.arr = as_2d_array(self.theta) self.N, self.dim = self.arr.shape def __getitem__(self, key): attrs = {k: self.__dict__[k][key] for k in self.containers} if isinstance(key, int): return attrs else: attrs.update({k: cp.deepcopy(self.__dict__[k]) for k in self.shared}) return self.__class__(**attrs) def __setitem__(self, key, value): for k in self.containers: self.__dict__[k][key] = value.__dict__[k] def copy(self): """Returns a copy of the object.""" attrs = {k: self.__dict__[k].copy() for k in self.containers} attrs.update({k: cp.deepcopy(self.__dict__[k]) for k in self.shared}) return self.__class__(**attrs) def copyto(self, src, where=None): """Emulates function `copyto` in NumPy. Parameters ---------- where: (N,) bool ndarray True if particle n in src must be copied. src: (N,) `ThetaParticles` object source for each n such that where[n] is True, copy particle n in src into self (at location n) """ for k in self.containers: v = self.__dict__[k] if isinstance(v, np.ndarray): np.copyto(v, src.__dict__[k], where=where) else: v.copyto(src.__dict__[k], where=where) def copyto_at(self, n, src, m): """Copy to at a given location. Parameters ---------- n: int index where to copy src: `ThetaParticles` object source m: int index of the element to be copied Note ---- Basically, does self[n] <- src[m] """ for k in self.containers: self.__dict__[k][n] = src.__dict__[k][m] class MetroParticles(ThetaParticles): """Particles that may be moved through a Metropolis step. The following attributes are required: * `theta`: a (N,) record array; the parameter values * `lpost`: a (N,) float array; log-posterior density at the parameter values An instance has the following shared attribute: * acc_rates: list; acceptance rates of the previous Metropolis steps This class implements generic methods to move all the particle according to a Metropolis step. """ shared = ['acc_rates'] def __init__(self, theta=None, lpost=None, acc_rates=None, **extra_kwargs): ThetaParticles.__init__(self, theta=theta, lpost=lpost, **extra_kwargs) self.acc_rates = [] if acc_rates is None else acc_rates def mcmc_iterate(self, nsteps, xstart, xend, delta_dist): if nsteps == 0: prev_dist = 0. yield while True: mean_dist = np.mean(np.sqrt(np.sum((xend - xstart)**2, axis=1))) if np.abs(mean_dist - prev_dist) < delta_dist * prev_dist: break prev_dist = mean_dist yield else: for _ in range(nsteps): yield class RandomWalkProposal(object): def __init__(self, x, scale=None, adaptive=True): if adaptive: if scale is None: scale = 2.38 / np.sqrt(x.shape[1]) cov = np.cov(x.T) try: self.L = scale * cholesky(cov, lower=True) except LinAlgError: self.L = scale * np.diag(np.sqrt(np.diag(cov))) print('Warning: could not compute Cholesky decomposition, using diag matrix instead') else: if scale is None: scale = 1. self.L = scale * np.eye(x.shape[1]) def step(self, x): y = x + np.dot(stats.norm.rvs(size=x.shape), self.L.T) return y, 0. class IndependentProposal(object): def __init__(self, x, scale=1.1): self.L = scale * cholesky(np.cov(x.T), lower=True) self.mu = np.mean(x, axis=0) def step(self, x): z = stats.norm.rvs(size=x.shape) y = self.mu + np.dot(z, self.L.T) zx = solve_triangular(self.L, np.transpose(x - self.mu), lower=True) delta_lp = (0.5 * np.sum(z * z, axis=1) - 0.5 * np.sum(zx * zx, axis=0)) return y, delta_lp def choose_proposal(self, type_prop='random walk', rw_scale=None, adaptive=True, indep_scale=1.1): if type_prop == 'random walk': return MetroParticles.RandomWalkProposal(self.arr, scale=rw_scale, adaptive=adaptive) if type_prop == 'independent': return MetroParticles.IndependentProposal(self.arr, scale=indep_scale) raise ValueError('Unknown type for Metropolis proposal') def Metropolis(self, compute_target, mh_options): """Performs a certain number of Metropolis steps. Parameters ---------- compute_target: function computes the target density for the proposed values mh_options: dict + 'type_prop': {'random walk', 'independent'} type of proposal: either Gaussian random walk, or independent Gaussian + 'adaptive': bool If True, the covariance matrix of the random walk proposal is set to a `rw_scale` times the weighted cov matrix of the particle sample (ignored if proposal is independent) + 'rw_scale': float (default=None) see above (ignored if proposal is independent) + 'indep_scale': float (default=1.1) for an independent proposal, the proposal distribution is Gaussian with mean set to the particle mean, cov set to `indep_scale` times particle covariance + 'nsteps': int (default: 0) number of steps; if 0, the number of steps is chosen adaptively as follows: we stop when the average distance between the starting points and the stopping points increase less than a certain fraction + 'delta_dist': float (default: 0.1) threshold for when nsteps = 0 """ opts = mh_options.copy() nsteps = opts.pop('nsteps', 0) delta_dist = opts.pop('delta_dist', 0.1) proposal = self.choose_proposal(**opts) xout = self.copy() xp = self.__class__(theta=np.empty_like(self.theta)) step_ars = [] for _ in self.mcmc_iterate(nsteps, self.arr, xout.arr, delta_dist): xp.arr[:, :], delta_lp = proposal.step(xout.arr) compute_target(xp) lp_acc = xp.lpost - xout.lpost + delta_lp accept = (np.log(stats.uniform.rvs(size=self.N)) < lp_acc) xout.copyto(xp, where=accept) step_ars.append(np.mean(accept)) xout.acc_rates = self.acc_rates + [step_ars] return xout ############################# # Basic importance sampler class ImportanceSampler(object): """Importance sampler. Basic implementation of importance sampling, with the same interface as SMC samplers. Parameters ---------- model: `StaticModel` object The static model that defines the target posterior distribution(s) proposal: `StructDist` object the proposal distribution (if None, proposal is set to the prior)
self.proposal = model.prior if proposal is None else proposal self.model = model def run(self, N=100): """ Parameter --------- N: int number of particles Returns ------- wgts: Weights object The importance weights (with attributes lw, W, and ESS) X: ThetaParticles object The N particles (with attributes theta, logpost) norm_cst: float Estimate of the normalising constant of the target """ th = self.proposal.rvs(size=N) self.X = ThetaParticles(theta=th, lpost=None) self.X.lpost = self.model.logpost(th) lw = self.X.lpost - self.proposal.logpdf(th) self.wgts = rs.Weights(lw=lw) self.norm_cst = rs.log_mean_exp
""" def __init__(self, model=None, proposal=None):
random_line_split
smc_samplers.py
self, key, value): self.l[key] = value def copy(self): return cp.deepcopy(self) def copyto(self, src, where=None): """ Same syntax and functionality as numpy.copyto """ for n, _ in enumerate(self.l): if where[n]: self.l[n] = src.l[n] # not a copy def as_2d_array(theta): """ returns a view to record array theta which behaves like a (N,d) float array """ v = theta.view(np.float) N = theta.shape[0] v.shape = (N, - 1) # raise an error if v cannot be reshaped without creating a copy return v class ThetaParticles(object): """Base class for particle systems for SMC samplers. This is a rather generic class for packing together information on N particles; it may have the following attributes: * `theta`: a structured array (an array with named variables); see `distributions` module for more details on structured arrays. * a bunch of `numpy` arrays such that shape[0] = N; for instance an array ``lpost`` for storing the log posterior density of the N particles; * lists of length N; object n in the list is associated to particle n; for instance a list of particle filters in SMC^2; the name of each of of these lists must be put in class attribute *Nlists*. * a common attribute (shared among all particles). The whole point of this class is to mimic the behaviour of a numpy array containing N particles. In particular this class implements fancy indexing:: obj[array([3, 5, 10, 10])] # returns a new instance that contains particles 3, 5 and 10 (twice) """ shared = [] # put here the name of shared attributes def __init__(self, **kwargs): for k, v in kwargs.items(): self.__dict__[k] = v self.containers = [k for k in kwargs if k not in self.shared] if 'theta' in kwargs: self.arr = as_2d_array(self.theta) self.N, self.dim = self.arr.shape def __getitem__(self, key): attrs = {k: self.__dict__[k][key] for k in self.containers} if isinstance(key, int): return attrs else: attrs.update({k: cp.deepcopy(self.__dict__[k]) for k in self.shared}) return self.__class__(**attrs) def __setitem__(self, key, value): for k in self.containers: self.__dict__[k][key] = value.__dict__[k] def copy(self): """Returns a copy of the object.""" attrs = {k: self.__dict__[k].copy() for k in self.containers} attrs.update({k: cp.deepcopy(self.__dict__[k]) for k in self.shared}) return self.__class__(**attrs) def copyto(self, src, where=None): """Emulates function `copyto` in NumPy. Parameters ---------- where: (N,) bool ndarray True if particle n in src must be copied. src: (N,) `ThetaParticles` object source for each n such that where[n] is True, copy particle n in src into self (at location n) """ for k in self.containers: v = self.__dict__[k] if isinstance(v, np.ndarray): np.copyto(v, src.__dict__[k], where=where) else: v.copyto(src.__dict__[k], where=where) def copyto_at(self, n, src, m): """Copy to at a given location. Parameters ---------- n: int index where to copy src: `ThetaParticles` object source m: int index of the element to be copied Note ---- Basically, does self[n] <- src[m] """ for k in self.containers: self.__dict__[k][n] = src.__dict__[k][m] class MetroParticles(ThetaParticles): """Particles that may be moved through a Metropolis step. The following attributes are required: * `theta`: a (N,) record array; the parameter values * `lpost`: a (N,) float array; log-posterior density at the parameter values An instance has the following shared attribute: * acc_rates: list; acceptance rates of the previous Metropolis steps This class implements generic methods to move all the particle according to a Metropolis step. """ shared = ['acc_rates'] def __init__(self, theta=None, lpost=None, acc_rates=None, **extra_kwargs): ThetaParticles.__init__(self, theta=theta, lpost=lpost, **extra_kwargs) self.acc_rates = [] if acc_rates is None else acc_rates def mcmc_iterate(self, nsteps, xstart, xend, delta_dist): if nsteps == 0: prev_dist = 0. yield while True: mean_dist = np.mean(np.sqrt(np.sum((xend - xstart)**2, axis=1))) if np.abs(mean_dist - prev_dist) < delta_dist * prev_dist: break prev_dist = mean_dist yield else: for _ in range(nsteps): yield class RandomWalkProposal(object): def __init__(self, x, scale=None, adaptive=True): if adaptive: if scale is None: scale = 2.38 / np.sqrt(x.shape[1]) cov = np.cov(x.T) try: self.L = scale * cholesky(cov, lower=True) except LinAlgError: self.L = scale * np.diag(np.sqrt(np.diag(cov))) print('Warning: could not compute Cholesky decomposition, using diag matrix instead') else: if scale is None: scale = 1. self.L = scale * np.eye(x.shape[1]) def step(self, x): y = x + np.dot(stats.norm.rvs(size=x.shape), self.L.T) return y, 0. class IndependentProposal(object): def __init__(self, x, scale=1.1): self.L = scale * cholesky(np.cov(x.T), lower=True) self.mu = np.mean(x, axis=0) def step(self, x): z = stats.norm.rvs(size=x.shape) y = self.mu + np.dot(z, self.L.T) zx = solve_triangular(self.L, np.transpose(x - self.mu), lower=True) delta_lp = (0.5 * np.sum(z * z, axis=1) - 0.5 * np.sum(zx * zx, axis=0)) return y, delta_lp def choose_proposal(self, type_prop='random walk', rw_scale=None, adaptive=True, indep_scale=1.1): if type_prop == 'random walk': return MetroParticles.RandomWalkProposal(self.arr, scale=rw_scale, adaptive=adaptive) if type_prop == 'independent': return MetroParticles.IndependentProposal(self.arr, scale=indep_scale) raise ValueError('Unknown type for Metropolis proposal') def Metropolis(self, compute_target, mh_options): """Performs a certain number of Metropolis steps. Parameters ---------- compute_target: function computes the target density for the proposed values mh_options: dict + 'type_prop': {'random walk', 'independent'} type of proposal: either Gaussian random walk, or independent Gaussian + 'adaptive': bool If True, the covariance matrix of the random walk proposal is set to a `rw_scale` times the weighted cov matrix of the particle sample (ignored if proposal is independent) + 'rw_scale': float (default=None) see above (ignored if proposal is independent) + 'indep_scale': float (default=1.1) for an independent proposal, the proposal distribution is Gaussian with mean set to the particle mean, cov set to `indep_scale` times particle covariance + 'nsteps': int (default: 0) number of steps; if 0, the number of steps is chosen adaptively as follows: we stop when the average distance between the starting points and the stopping points increase less than a certain fraction + 'delta_dist': float (default: 0.1) threshold for when nsteps = 0 """ opts = mh_options.copy() nsteps = opts.pop('nsteps', 0) delta_dist = opts.pop('delta_dist', 0.1) proposal = self.choose_proposal(**opts) xout = self.copy() xp = self.__class__(theta=np.empty_like(self.theta)) step_ars = [] for _ in self.mcmc_iterate(nsteps, self.arr, xout.arr, delta_dist): xp.arr[:, :], delta_lp = proposal.step(xout.arr) compute_target(xp) lp_acc = xp.lpost -
_setitem__(
identifier_name
smc_samplers.py
must be copied. src: (N,) `ThetaParticles` object source for each n such that where[n] is True, copy particle n in src into self (at location n) """ for k in self.containers: v = self.__dict__[k] if isinstance(v, np.ndarray): np.copyto(v, src.__dict__[k], where=where) else: v.copyto(src.__dict__[k], where=where) def copyto_at(self, n, src, m): """Copy to at a given location. Parameters ---------- n: int index where to copy src: `ThetaParticles` object source m: int index of the element to be copied Note ---- Basically, does self[n] <- src[m] """ for k in self.containers: self.__dict__[k][n] = src.__dict__[k][m] class MetroParticles(ThetaParticles): """Particles that may be moved through a Metropolis step. The following attributes are required: * `theta`: a (N,) record array; the parameter values * `lpost`: a (N,) float array; log-posterior density at the parameter values An instance has the following shared attribute: * acc_rates: list; acceptance rates of the previous Metropolis steps This class implements generic methods to move all the particle according to a Metropolis step. """ shared = ['acc_rates'] def __init__(self, theta=None, lpost=None, acc_rates=None, **extra_kwargs): ThetaParticles.__init__(self, theta=theta, lpost=lpost, **extra_kwargs) self.acc_rates = [] if acc_rates is None else acc_rates def mcmc_iterate(self, nsteps, xstart, xend, delta_dist): if nsteps == 0: prev_dist = 0. yield while True: mean_dist = np.mean(np.sqrt(np.sum((xend - xstart)**2, axis=1))) if np.abs(mean_dist - prev_dist) < delta_dist * prev_dist: break prev_dist = mean_dist yield else: for _ in range(nsteps): yield class RandomWalkProposal(object): def __init__(self, x, scale=None, adaptive=True): if adaptive: if scale is None: scale = 2.38 / np.sqrt(x.shape[1]) cov = np.cov(x.T) try: self.L = scale * cholesky(cov, lower=True) except LinAlgError: self.L = scale * np.diag(np.sqrt(np.diag(cov))) print('Warning: could not compute Cholesky decomposition, using diag matrix instead') else: if scale is None: scale = 1. self.L = scale * np.eye(x.shape[1]) def step(self, x): y = x + np.dot(stats.norm.rvs(size=x.shape), self.L.T) return y, 0. class IndependentProposal(object): def __init__(self, x, scale=1.1): self.L = scale * cholesky(np.cov(x.T), lower=True) self.mu = np.mean(x, axis=0) def step(self, x): z = stats.norm.rvs(size=x.shape) y = self.mu + np.dot(z, self.L.T) zx = solve_triangular(self.L, np.transpose(x - self.mu), lower=True) delta_lp = (0.5 * np.sum(z * z, axis=1) - 0.5 * np.sum(zx * zx, axis=0)) return y, delta_lp def choose_proposal(self, type_prop='random walk', rw_scale=None, adaptive=True, indep_scale=1.1): if type_prop == 'random walk': return MetroParticles.RandomWalkProposal(self.arr, scale=rw_scale, adaptive=adaptive) if type_prop == 'independent': return MetroParticles.IndependentProposal(self.arr, scale=indep_scale) raise ValueError('Unknown type for Metropolis proposal') def Metropolis(self, compute_target, mh_options): """Performs a certain number of Metropolis steps. Parameters ---------- compute_target: function computes the target density for the proposed values mh_options: dict + 'type_prop': {'random walk', 'independent'} type of proposal: either Gaussian random walk, or independent Gaussian + 'adaptive': bool If True, the covariance matrix of the random walk proposal is set to a `rw_scale` times the weighted cov matrix of the particle sample (ignored if proposal is independent) + 'rw_scale': float (default=None) see above (ignored if proposal is independent) + 'indep_scale': float (default=1.1) for an independent proposal, the proposal distribution is Gaussian with mean set to the particle mean, cov set to `indep_scale` times particle covariance + 'nsteps': int (default: 0) number of steps; if 0, the number of steps is chosen adaptively as follows: we stop when the average distance between the starting points and the stopping points increase less than a certain fraction + 'delta_dist': float (default: 0.1) threshold for when nsteps = 0 """ opts = mh_options.copy() nsteps = opts.pop('nsteps', 0) delta_dist = opts.pop('delta_dist', 0.1) proposal = self.choose_proposal(**opts) xout = self.copy() xp = self.__class__(theta=np.empty_like(self.theta)) step_ars = [] for _ in self.mcmc_iterate(nsteps, self.arr, xout.arr, delta_dist): xp.arr[:, :], delta_lp = proposal.step(xout.arr) compute_target(xp) lp_acc = xp.lpost - xout.lpost + delta_lp accept = (np.log(stats.uniform.rvs(size=self.N)) < lp_acc) xout.copyto(xp, where=accept) step_ars.append(np.mean(accept)) xout.acc_rates = self.acc_rates + [step_ars] return xout ############################# # Basic importance sampler class ImportanceSampler(object): """Importance sampler. Basic implementation of importance sampling, with the same interface as SMC samplers. Parameters ---------- model: `StaticModel` object The static model that defines the target posterior distribution(s) proposal: `StructDist` object the proposal distribution (if None, proposal is set to the prior) """ def __init__(self, model=None, proposal=None): self.proposal = model.prior if proposal is None else proposal self.model = model def run(self, N=100): """ Parameter --------- N: int number of particles Returns ------- wgts: Weights object The importance weights (with attributes lw, W, and ESS) X: ThetaParticles object The N particles (with attributes theta, logpost) norm_cst: float Estimate of the normalising constant of the target """ th = self.proposal.rvs(size=N) self.X = ThetaParticles(theta=th, lpost=None) self.X.lpost = self.model.logpost(th) lw = self.X.lpost - self.proposal.logpdf(th) self.wgts = rs.Weights(lw=lw) self.norm_cst = rs.log_mean_exp(lw) ############################# # FK classes for SMC samplers class FKSMCsampler(particles.FeynmanKac): """Base FeynmanKac class for SMC samplers. Parameters ---------- model: `StaticModel` object The static model that defines the target posterior distribution(s) mh_options: dict + 'type_prop': {'random walk', 'independent'} type of proposal: either Gaussian random walk, or independent Gaussian + 'adaptive': bool If True, the covariance matrix of the random walk proposal is set to a `rw_scale` times the weighted cov matrix of the particle sample (ignored if proposal is independent) + 'rw_scale': float (default=None) see above (ignored if proposal is independent) + 'indep_scale': float (default=1.1) for an independent proposal, the proposal distribution is Gaussian with mean set to the particle mean, cov set to `indep_scale` times particle covariance + 'nsteps': int (default: 0) number of steps; if 0, the number of steps is chosen adaptively as follows: we stop when the average distance between the starting points and the stopping points increase less than a certain fraction + 'delta_dist': float (default: 0.1) threshold for when nsteps = 0 """ def __init__(self, model, mh_options=None): self.model = model self.mh_options = {} if mh_options is None else mh_options @property def T(self): r
eturn self.model.T
identifier_body
main.go
() { err := godotenv.Load() if err != nil { color.HiRed("[!] Could not load .env file; it might be missing. Add it to your project root.") return } for _, v := range [4]string{ OAuthConsumerKey, OAuthConsumerSecret, OAuthToken, OAuthTokenSecret, } { if os.Getenv(v) == "" { color.HiRed("[!] %s is missing from your environment configuration. Ensure it is set, then try again.", v) return } } config := oauth1.NewConfig(os.Getenv(OAuthConsumerKey), os.Getenv(OAuthConsumerSecret)) token := oauth1.NewToken(os.Getenv(OAuthToken), os.Getenv(OAuthTokenSecret)) httpClient := config.Client(oauth1.NoContext, token) client := twitter.NewClient(httpClient) latPtr := flag.Float64("lat", 0.0, "The latitude of the tweet") longPtr := flag.Float64("long", 0.0, "The longitude of the tweet") replyToPtr := flag.Int64("replyto", 0, "If you are replying to a tweet, specify its ID here") mediaPtr := flag.String("media", "", "A list of media files following the format 1,2,3,4. Needs to be an image, video, or GIF. Note that you can either: use 4 images OR 1 video OR 1 GIF per tweet. ") debug := flag.Bool("debug", false, "Specify this to view detailed logs") placeIDPtr := flag.String("placeid", "", "If you have places.json in your folder, specify the ID to use.") chunkIntervalPtr := flag.Int64("chunk-interval", NewChunkDefaultWaitTime, "Change the interval (in ms) at which a buffer is read from media and sent as an asynchronous POST request.") flag.Parse() if *debug { color.Blue("debug mode -- more logs will be shown") } tweetText := flag.Args() tweetTextJoined := strings.Join(tweetText, " ") color.HiBlue("---- thighs: Custom Twitter source messages version %s ----\nhttps://github.com/vysiondev/thighs", Version) if *debug { color.HiBlack("[debug] tweet text: %s", tweetTextJoined) } if *chunkIntervalPtr < 0 { color.HiRed("[!] Chunk interval should not be less than 0ms.") return } if *placeIDPtr != "" { places, err := ParsePlacesFile() if err != nil { color.HiRed("Could not parse your places.json file. %s", err.Error()) return } if places == nil { color.HiYellow("[!] You specified -placeid, but the places.json file was not parsed because either the program lacks permissions, or it doesn't exist. It will be ignored.") } else { match := false for _, p := range *places { if strings.EqualFold(p.ID, *placeIDPtr) { *latPtr = p.Lat *longPtr = p.Long match = true color.White("Using location %s with lat: %f, long: %f", p.Name, p.Lat, p.Long) break } } if !match { color.HiYellow("[!] You specified -placeid, but none of your places matched the ID you specified. It will be ignored.") } } } if *latPtr != 0.0 && math.Abs(*latPtr) > MaxLatitude { color.HiRed("[!] Latitude must be within %f and %f.", MaxLatitude, -MaxLatitude) return } if *longPtr != 0.0 && math.Abs(*longPtr) > MaxLongitude { color.HiRed("[!] Longitude must be within %f and %f.", MaxLongitude, -MaxLongitude) return } var replyToUsername string if *replyToPtr != 0 { if *debug { color.HiBlack("[debug] user will reply to tweet with id %d", *replyToPtr) color.HiBlack("[debug] fetching tweet to make sure it exists...") } t, _, err := client.Statuses.Show(*replyToPtr, nil) if err != nil { color.HiRed("[!] Encountered an error while trying to fetch tweet to reply to: %s", err.Error()) return } if t == nil { color.HiRed("[!] Reply tweet ID doesn't exist or it's not viewable to the public.") return } if *debug { color.HiBlack("[debug] tweet found; setting reply to mention @%s", t.User.ScreenName) } *replyToPtr = t.ID replyToUsername = t.User.ScreenName } if *debug { color.HiBlack("[debug] completed reply check") } if *replyToPtr != 0 { tweetTextJoined = fmt.Sprintf("@%s %s", replyToUsername, tweetTextJoined) } if len(tweetText) >= TwitterMaxCharacters { color.HiRed("[!] Your proposed Tweet would be too long (%d characters >= 280). Try making it shorter.", len(tweetText)) return } if *debug { color.HiBlack("[debug] completed message length check") } mediaIds := make([]int64, 0) if len(*mediaPtr) > 0 { specialFileUploaded := false mediaSplit := strings.Split(*mediaPtr, ",") if len(mediaSplit) == 0 { color.HiRed("[!] You didn't specify any media to upload.") return } if *debug { color.HiBlack("[debug] %d media files to upload", len(mediaSplit)) } for _, f := range mediaSplit { if specialFileUploaded { color.HiYellow("[!] An image/video was already processed; skipping all other files.") break } color.White("Uploading %s", f) file, err := os.Open(f) if err != nil { color.HiRed("[!] Failed to open file %s: %s.", f, err.Error()) return } defer file.Close() fstat, err := file.Stat() if err != nil { color.HiRed("[!] Failed to stat file %s: %s.", f, err.Error()) return } if fstat.Size() >= 15*1024*1024 { color.HiRed("[!] File is too big (>=15MB).") return } // get file content type contentType, err := utils.GetFileContentType(file) if err != nil { color.HiYellow("[!] Failed to detect content type for %s: %s. Will use application/octet-stream instead.", f, err.Error()) contentType = "application/octet-stream" } if *debug { color.HiBlack("[debug] this file is of type %s", contentType) color.HiBlack("[debug] target size reported by stat: %d (all chunks should add up to this number)", fstat.Size()) color.HiBlack("[debug] resetting reader to 0") } _, err = file.Seek(0, io.SeekStart) if err != nil { color.HiRed("[!] Failed to seek file reader to 0: %s.", err.Error()) return } if *debug { color.HiBlack("[debug] making INIT request") } mediaID, err := CallInit(httpClient, fstat.Size(), contentType) if err != nil { color.HiRed("[!] Failed on INIT request: %s.", err.Error()) return } if *debug { color.HiBlack("[debug] INIT successful; got media ID %d", mediaID) } var segmentID int64 reader := bufio.NewReader(file) chunkSize := utils.CalculateChunkSize(fstat.Size()) if *debug { color.HiBlack("[debug] chunk size (%d * 0.30) is %d", fstat.Size(), chunkSize) } buf := make([]byte, 0, chunkSize) needToWaitSecs := 0 appendResponseChan := make(chan *AppendResponse) var wg sync.WaitGroup if *debug { color.HiBlack("[debug] upload starting") } for { n, err := io.ReadFull(reader, buf[:cap(buf)]) buf = buf[:n] if *debug { color.HiBlack("[debug] [async:%d] %d bytes to be uploaded", segmentID, len(buf)) } if err != nil { if err != io.EOF && err != io.ErrUnexpectedEOF { color.HiRed("[!] Failed to read the file into a buffer because of something other than EOF: %s.", err.Error()) return } wg.Add(1) go CallAppend(mediaID, segmentID, &buf, httpClient, appendResponseChan, &wg) break } wg.Add(1) go CallAppend(media
main
identifier_name
main.go
httpClient := config.Client(oauth1.NoContext, token) client := twitter.NewClient(httpClient) latPtr := flag.Float64("lat", 0.0, "The latitude of the tweet") longPtr := flag.Float64("long", 0.0, "The longitude of the tweet") replyToPtr := flag.Int64("replyto", 0, "If you are replying to a tweet, specify its ID here") mediaPtr := flag.String("media", "", "A list of media files following the format 1,2,3,4. Needs to be an image, video, or GIF. Note that you can either: use 4 images OR 1 video OR 1 GIF per tweet. ") debug := flag.Bool("debug", false, "Specify this to view detailed logs") placeIDPtr := flag.String("placeid", "", "If you have places.json in your folder, specify the ID to use.") chunkIntervalPtr := flag.Int64("chunk-interval", NewChunkDefaultWaitTime, "Change the interval (in ms) at which a buffer is read from media and sent as an asynchronous POST request.") flag.Parse() if *debug { color.Blue("debug mode -- more logs will be shown") } tweetText := flag.Args() tweetTextJoined := strings.Join(tweetText, " ") color.HiBlue("---- thighs: Custom Twitter source messages version %s ----\nhttps://github.com/vysiondev/thighs", Version) if *debug { color.HiBlack("[debug] tweet text: %s", tweetTextJoined) } if *chunkIntervalPtr < 0 { color.HiRed("[!] Chunk interval should not be less than 0ms.") return } if *placeIDPtr != "" { places, err := ParsePlacesFile() if err != nil { color.HiRed("Could not parse your places.json file. %s", err.Error()) return } if places == nil { color.HiYellow("[!] You specified -placeid, but the places.json file was not parsed because either the program lacks permissions, or it doesn't exist. It will be ignored.") } else { match := false for _, p := range *places { if strings.EqualFold(p.ID, *placeIDPtr) { *latPtr = p.Lat *longPtr = p.Long match = true color.White("Using location %s with lat: %f, long: %f", p.Name, p.Lat, p.Long) break } } if !match { color.HiYellow("[!] You specified -placeid, but none of your places matched the ID you specified. It will be ignored.") } } } if *latPtr != 0.0 && math.Abs(*latPtr) > MaxLatitude { color.HiRed("[!] Latitude must be within %f and %f.", MaxLatitude, -MaxLatitude) return } if *longPtr != 0.0 && math.Abs(*longPtr) > MaxLongitude { color.HiRed("[!] Longitude must be within %f and %f.", MaxLongitude, -MaxLongitude) return } var replyToUsername string if *replyToPtr != 0 { if *debug { color.HiBlack("[debug] user will reply to tweet with id %d", *replyToPtr) color.HiBlack("[debug] fetching tweet to make sure it exists...") } t, _, err := client.Statuses.Show(*replyToPtr, nil) if err != nil { color.HiRed("[!] Encountered an error while trying to fetch tweet to reply to: %s", err.Error()) return } if t == nil { color.HiRed("[!] Reply tweet ID doesn't exist or it's not viewable to the public.") return } if *debug { color.HiBlack("[debug] tweet found; setting reply to mention @%s", t.User.ScreenName) } *replyToPtr = t.ID replyToUsername = t.User.ScreenName } if *debug { color.HiBlack("[debug] completed reply check") } if *replyToPtr != 0 { tweetTextJoined = fmt.Sprintf("@%s %s", replyToUsername, tweetTextJoined) } if len(tweetText) >= TwitterMaxCharacters { color.HiRed("[!] Your proposed Tweet would be too long (%d characters >= 280). Try making it shorter.", len(tweetText)) return } if *debug { color.HiBlack("[debug] completed message length check") } mediaIds := make([]int64, 0) if len(*mediaPtr) > 0 { specialFileUploaded := false mediaSplit := strings.Split(*mediaPtr, ",") if len(mediaSplit) == 0 { color.HiRed("[!] You didn't specify any media to upload.") return } if *debug { color.HiBlack("[debug] %d media files to upload", len(mediaSplit)) } for _, f := range mediaSplit { if specialFileUploaded { color.HiYellow("[!] An image/video was already processed; skipping all other files.") break } color.White("Uploading %s", f) file, err := os.Open(f) if err != nil { color.HiRed("[!] Failed to open file %s: %s.", f, err.Error()) return } defer file.Close() fstat, err := file.Stat() if err != nil { color.HiRed("[!] Failed to stat file %s: %s.", f, err.Error()) return } if fstat.Size() >= 15*1024*1024 { color.HiRed("[!] File is too big (>=15MB).") return } // get file content type contentType, err := utils.GetFileContentType(file) if err != nil { color.HiYellow("[!] Failed to detect content type for %s: %s. Will use application/octet-stream instead.", f, err.Error()) contentType = "application/octet-stream" } if *debug { color.HiBlack("[debug] this file is of type %s", contentType) color.HiBlack("[debug] target size reported by stat: %d (all chunks should add up to this number)", fstat.Size()) color.HiBlack("[debug] resetting reader to 0") } _, err = file.Seek(0, io.SeekStart) if err != nil { color.HiRed("[!] Failed to seek file reader to 0: %s.", err.Error()) return } if *debug { color.HiBlack("[debug] making INIT request") } mediaID, err := CallInit(httpClient, fstat.Size(), contentType) if err != nil { color.HiRed("[!] Failed on INIT request: %s.", err.Error()) return } if *debug { color.HiBlack("[debug] INIT successful; got media ID %d", mediaID) } var segmentID int64 reader := bufio.NewReader(file) chunkSize := utils.CalculateChunkSize(fstat.Size()) if *debug { color.HiBlack("[debug] chunk size (%d * 0.30) is %d", fstat.Size(), chunkSize) } buf := make([]byte, 0, chunkSize) needToWaitSecs := 0 appendResponseChan := make(chan *AppendResponse) var wg sync.WaitGroup if *debug { color.HiBlack("[debug] upload starting") } for { n, err := io.ReadFull(reader, buf[:cap(buf)]) buf = buf[:n] if *debug { color.HiBlack("[debug] [async:%d] %d bytes to be uploaded", segmentID, len(buf)) } if err != nil { if err != io.EOF && err != io.ErrUnexpectedEOF { color.HiRed("[!] Failed to read the file into a buffer because of something other than EOF: %s.", err.Error()) return } wg.Add(1) go CallAppend(mediaID, segmentID, &buf, httpClient, appendResponseChan, &wg) break } wg.Add(1) go CallAppend(mediaID,
{ err := godotenv.Load() if err != nil { color.HiRed("[!] Could not load .env file; it might be missing. Add it to your project root.") return } for _, v := range [4]string{ OAuthConsumerKey, OAuthConsumerSecret, OAuthToken, OAuthTokenSecret, } { if os.Getenv(v) == "" { color.HiRed("[!] %s is missing from your environment configuration. Ensure it is set, then try again.", v) return } } config := oauth1.NewConfig(os.Getenv(OAuthConsumerKey), os.Getenv(OAuthConsumerSecret)) token := oauth1.NewToken(os.Getenv(OAuthToken), os.Getenv(OAuthTokenSecret))
identifier_body
main.go
list of media files following the format 1,2,3,4. Needs to be an image, video, or GIF. Note that you can either: use 4 images OR 1 video OR 1 GIF per tweet. ") debug := flag.Bool("debug", false, "Specify this to view detailed logs") placeIDPtr := flag.String("placeid", "", "If you have places.json in your folder, specify the ID to use.") chunkIntervalPtr := flag.Int64("chunk-interval", NewChunkDefaultWaitTime, "Change the interval (in ms) at which a buffer is read from media and sent as an asynchronous POST request.") flag.Parse() if *debug { color.Blue("debug mode -- more logs will be shown") } tweetText := flag.Args() tweetTextJoined := strings.Join(tweetText, " ") color.HiBlue("---- thighs: Custom Twitter source messages version %s ----\nhttps://github.com/vysiondev/thighs", Version) if *debug { color.HiBlack("[debug] tweet text: %s", tweetTextJoined) } if *chunkIntervalPtr < 0 { color.HiRed("[!] Chunk interval should not be less than 0ms.") return } if *placeIDPtr != "" { places, err := ParsePlacesFile() if err != nil { color.HiRed("Could not parse your places.json file. %s", err.Error()) return } if places == nil { color.HiYellow("[!] You specified -placeid, but the places.json file was not parsed because either the program lacks permissions, or it doesn't exist. It will be ignored.") } else { match := false for _, p := range *places { if strings.EqualFold(p.ID, *placeIDPtr) { *latPtr = p.Lat *longPtr = p.Long match = true color.White("Using location %s with lat: %f, long: %f", p.Name, p.Lat, p.Long) break } } if !match { color.HiYellow("[!] You specified -placeid, but none of your places matched the ID you specified. It will be ignored.") } } } if *latPtr != 0.0 && math.Abs(*latPtr) > MaxLatitude { color.HiRed("[!] Latitude must be within %f and %f.", MaxLatitude, -MaxLatitude) return } if *longPtr != 0.0 && math.Abs(*longPtr) > MaxLongitude { color.HiRed("[!] Longitude must be within %f and %f.", MaxLongitude, -MaxLongitude) return } var replyToUsername string if *replyToPtr != 0 { if *debug { color.HiBlack("[debug] user will reply to tweet with id %d", *replyToPtr) color.HiBlack("[debug] fetching tweet to make sure it exists...") } t, _, err := client.Statuses.Show(*replyToPtr, nil) if err != nil { color.HiRed("[!] Encountered an error while trying to fetch tweet to reply to: %s", err.Error()) return } if t == nil { color.HiRed("[!] Reply tweet ID doesn't exist or it's not viewable to the public.") return } if *debug { color.HiBlack("[debug] tweet found; setting reply to mention @%s", t.User.ScreenName) } *replyToPtr = t.ID replyToUsername = t.User.ScreenName } if *debug { color.HiBlack("[debug] completed reply check") } if *replyToPtr != 0 { tweetTextJoined = fmt.Sprintf("@%s %s", replyToUsername, tweetTextJoined) } if len(tweetText) >= TwitterMaxCharacters { color.HiRed("[!] Your proposed Tweet would be too long (%d characters >= 280). Try making it shorter.", len(tweetText)) return } if *debug { color.HiBlack("[debug] completed message length check") } mediaIds := make([]int64, 0) if len(*mediaPtr) > 0 { specialFileUploaded := false mediaSplit := strings.Split(*mediaPtr, ",") if len(mediaSplit) == 0 { color.HiRed("[!] You didn't specify any media to upload.") return } if *debug { color.HiBlack("[debug] %d media files to upload", len(mediaSplit)) } for _, f := range mediaSplit { if specialFileUploaded { color.HiYellow("[!] An image/video was already processed; skipping all other files.") break } color.White("Uploading %s", f) file, err := os.Open(f) if err != nil { color.HiRed("[!] Failed to open file %s: %s.", f, err.Error()) return } defer file.Close() fstat, err := file.Stat() if err != nil { color.HiRed("[!] Failed to stat file %s: %s.", f, err.Error()) return } if fstat.Size() >= 15*1024*1024 { color.HiRed("[!] File is too big (>=15MB).") return } // get file content type contentType, err := utils.GetFileContentType(file) if err != nil { color.HiYellow("[!] Failed to detect content type for %s: %s. Will use application/octet-stream instead.", f, err.Error()) contentType = "application/octet-stream" } if *debug { color.HiBlack("[debug] this file is of type %s", contentType) color.HiBlack("[debug] target size reported by stat: %d (all chunks should add up to this number)", fstat.Size()) color.HiBlack("[debug] resetting reader to 0") } _, err = file.Seek(0, io.SeekStart) if err != nil { color.HiRed("[!] Failed to seek file reader to 0: %s.", err.Error()) return } if *debug { color.HiBlack("[debug] making INIT request") } mediaID, err := CallInit(httpClient, fstat.Size(), contentType) if err != nil { color.HiRed("[!] Failed on INIT request: %s.", err.Error()) return } if *debug { color.HiBlack("[debug] INIT successful; got media ID %d", mediaID) } var segmentID int64 reader := bufio.NewReader(file) chunkSize := utils.CalculateChunkSize(fstat.Size()) if *debug { color.HiBlack("[debug] chunk size (%d * 0.30) is %d", fstat.Size(), chunkSize) } buf := make([]byte, 0, chunkSize) needToWaitSecs := 0 appendResponseChan := make(chan *AppendResponse) var wg sync.WaitGroup if *debug { color.HiBlack("[debug] upload starting") } for { n, err := io.ReadFull(reader, buf[:cap(buf)]) buf = buf[:n] if *debug { color.HiBlack("[debug] [async:%d] %d bytes to be uploaded", segmentID, len(buf)) } if err != nil { if err != io.EOF && err != io.ErrUnexpectedEOF { color.HiRed("[!] Failed to read the file into a buffer because of something other than EOF: %s.", err.Error()) return } wg.Add(1) go CallAppend(mediaID, segmentID, &buf, httpClient, appendResponseChan, &wg) break } wg.Add(1) go CallAppend(mediaID, segmentID, &buf, httpClient, appendResponseChan, &wg) segmentID++ // not sure what's causing chunks to write the wrong # of bytes unless there's a short delay... time.Sleep(time.Millisecond * time.Duration(*chunkIntervalPtr)) } go func() { wg.Wait() close(appendResponseChan) }() for a := range appendResponseChan { if a.StatusCode == 0 { color.HiRed("[!] Async upload has failed.") return } if a.StatusCode < 200 || a.StatusCode > 299 { color.HiRed("[!] Async upload hit a response that reported status code %s (not 2xx).", a.StatusCode) return } if *debug { color.HiBlack("[debug] [async:%d] done; status %d", a.SegmentID, a.StatusCode) } } if *debug { color.HiBlack("[debug] upload finished") } wait, e := CallFinalize(mediaID, httpClient) if e != nil { color.HiRed("[!] Error while calling FINALIZE: %s.", e.Error()) return }
random_line_split
main.go
return } var replyToUsername string if *replyToPtr != 0 { if *debug { color.HiBlack("[debug] user will reply to tweet with id %d", *replyToPtr) color.HiBlack("[debug] fetching tweet to make sure it exists...") } t, _, err := client.Statuses.Show(*replyToPtr, nil) if err != nil { color.HiRed("[!] Encountered an error while trying to fetch tweet to reply to: %s", err.Error()) return } if t == nil { color.HiRed("[!] Reply tweet ID doesn't exist or it's not viewable to the public.") return } if *debug { color.HiBlack("[debug] tweet found; setting reply to mention @%s", t.User.ScreenName) } *replyToPtr = t.ID replyToUsername = t.User.ScreenName } if *debug { color.HiBlack("[debug] completed reply check") } if *replyToPtr != 0 { tweetTextJoined = fmt.Sprintf("@%s %s", replyToUsername, tweetTextJoined) } if len(tweetText) >= TwitterMaxCharacters { color.HiRed("[!] Your proposed Tweet would be too long (%d characters >= 280). Try making it shorter.", len(tweetText)) return } if *debug { color.HiBlack("[debug] completed message length check") } mediaIds := make([]int64, 0) if len(*mediaPtr) > 0 { specialFileUploaded := false mediaSplit := strings.Split(*mediaPtr, ",") if len(mediaSplit) == 0 { color.HiRed("[!] You didn't specify any media to upload.") return } if *debug { color.HiBlack("[debug] %d media files to upload", len(mediaSplit)) } for _, f := range mediaSplit { if specialFileUploaded { color.HiYellow("[!] An image/video was already processed; skipping all other files.") break } color.White("Uploading %s", f) file, err := os.Open(f) if err != nil { color.HiRed("[!] Failed to open file %s: %s.", f, err.Error()) return } defer file.Close() fstat, err := file.Stat() if err != nil { color.HiRed("[!] Failed to stat file %s: %s.", f, err.Error()) return } if fstat.Size() >= 15*1024*1024 { color.HiRed("[!] File is too big (>=15MB).") return } // get file content type contentType, err := utils.GetFileContentType(file) if err != nil { color.HiYellow("[!] Failed to detect content type for %s: %s. Will use application/octet-stream instead.", f, err.Error()) contentType = "application/octet-stream" } if *debug { color.HiBlack("[debug] this file is of type %s", contentType) color.HiBlack("[debug] target size reported by stat: %d (all chunks should add up to this number)", fstat.Size()) color.HiBlack("[debug] resetting reader to 0") } _, err = file.Seek(0, io.SeekStart) if err != nil { color.HiRed("[!] Failed to seek file reader to 0: %s.", err.Error()) return } if *debug { color.HiBlack("[debug] making INIT request") } mediaID, err := CallInit(httpClient, fstat.Size(), contentType) if err != nil { color.HiRed("[!] Failed on INIT request: %s.", err.Error()) return } if *debug { color.HiBlack("[debug] INIT successful; got media ID %d", mediaID) } var segmentID int64 reader := bufio.NewReader(file) chunkSize := utils.CalculateChunkSize(fstat.Size()) if *debug { color.HiBlack("[debug] chunk size (%d * 0.30) is %d", fstat.Size(), chunkSize) } buf := make([]byte, 0, chunkSize) needToWaitSecs := 0 appendResponseChan := make(chan *AppendResponse) var wg sync.WaitGroup if *debug { color.HiBlack("[debug] upload starting") } for { n, err := io.ReadFull(reader, buf[:cap(buf)]) buf = buf[:n] if *debug { color.HiBlack("[debug] [async:%d] %d bytes to be uploaded", segmentID, len(buf)) } if err != nil { if err != io.EOF && err != io.ErrUnexpectedEOF { color.HiRed("[!] Failed to read the file into a buffer because of something other than EOF: %s.", err.Error()) return } wg.Add(1) go CallAppend(mediaID, segmentID, &buf, httpClient, appendResponseChan, &wg) break } wg.Add(1) go CallAppend(mediaID, segmentID, &buf, httpClient, appendResponseChan, &wg) segmentID++ // not sure what's causing chunks to write the wrong # of bytes unless there's a short delay... time.Sleep(time.Millisecond * time.Duration(*chunkIntervalPtr)) } go func() { wg.Wait() close(appendResponseChan) }() for a := range appendResponseChan { if a.StatusCode == 0 { color.HiRed("[!] Async upload has failed.") return } if a.StatusCode < 200 || a.StatusCode > 299 { color.HiRed("[!] Async upload hit a response that reported status code %s (not 2xx).", a.StatusCode) return } if *debug { color.HiBlack("[debug] [async:%d] done; status %d", a.SegmentID, a.StatusCode) } } if *debug { color.HiBlack("[debug] upload finished") } wait, e := CallFinalize(mediaID, httpClient) if e != nil { color.HiRed("[!] Error while calling FINALIZE: %s.", e.Error()) return } needToWaitSecs = wait if *debug { color.HiBlack("[debug] FINALIZE call successful") if needToWaitSecs > 0 { color.HiBlack("[debug] FINALIZE says upload is not done processing; we need to wait") } } if needToWaitSecs > 0 { statusChecks := 0 var status *MediaStatusResponse color.HiBlack("Twitter is processing the file; checking again after %d seconds", needToWaitSecs) // initial thread sleep because we get time to wait from FINALIZE time.Sleep(time.Second * time.Duration(needToWaitSecs)) for { if statusChecks > StatusCheckCap { color.HiRed("[!] Waited for too long for upload to complete. Bailing out") return } if *debug { color.HiBlack("[debug] calling STATUS (try %d of %d)", statusChecks+1, StatusCheckCap) } statusObject, err := CallStatus(mediaID, httpClient) if err != nil { color.HiRed("[!] Error while checking for status: %s.", err.Error()) return } status = statusObject if statusObject.ProcessingInfo.State != "in_progress" { break } color.HiBlack("Processing (%d%%); checking again after %d seconds", statusObject.ProcessingInfo.ProgressPercent, statusObject.ProcessingInfo.CheckAfterSecs) statusChecks++ time.Sleep(time.Second * time.Duration(status.ProcessingInfo.CheckAfterSecs)) } if status == nil { color.HiRed("[!] No data on status object. This could indicate that it failed to upload.") return } if status.ProcessingInfo.State != "succeeded" { color.HiRed("[!] Upload of %s was not successful (status: %s): %s: %s", f, status.ProcessingInfo.State, status.ProcessingInfo.Error.Name, status.ProcessingInfo.Error.Message) return } } if strings.Contains(contentType, "video") || strings.Contains(contentType, "gif") { if *debug { color.HiBlack("[debug] special media type, so this is the only one that will be used in the tweet (overriding everything else)") } mediaIds = make([]int64, 0) specialFileUploaded = true } color.HiGreen("%s uploaded & processed successfully", f) mediaIds = append(mediaIds, mediaID) } } if *debug
{ color.HiBlack("[debug] completed upload of all files") }
conditional_block
train.py
f) # img_value=cv2.imread(self.data_path + f) # x[i,] = cv2.resize(img_value,\ # (self.resize_shape[1], self.resize_shape[0]),\ # interpolation=cv2.INTER_AREA) # # x[i,] = cv2.imread(self.data_path + f)(img_value, (256,256),interpolation=cv2.INTER_AREA) # if self.subset =='train': # # print(self.df.columns[2:].to_list()) # for j,label in enumerate(self.df.columns[2:].to_list()): # rle = self.df[label].iloc[indexes[i]] # # print(rle) # shape = self.df['size'].iloc[indexes[i]] # y[i,:,:,j] = rle2mask(rle, shape, resize_shape = self.resize_shape ) # if not self.augmentation is None: # x[i,], y[i,] = self.__random_transform__(x[i,], y[i,]) # if self.preprocess !=None : x= self.preprocess(x) # if self.subset == 'train' : return x,y # else: return x # def augumentation(): # augument = albu.Compose([albu.VerticalFlip(), # albu.HorizontalFlip(), # albu.Rotate(limit= 20), # albu.GridDistortion(), # # albu.RandomSizedCrop((128,174), 256, 384,interpolation =cv2.INTER_LINEAR ) # ],p=1.0) # return augument def label_concat(df,labels): df = df[labels] label_list = df[df!=''].keys().values.tolist() return label_list def train_split(df): df_copy = df.copy() df_copy['Class'] = df_copy.apply(lambda x : label_concat(x, df.columns[3:].tolist() ), axis=1) train_idx, valid_idx = train_test_split(df_copy.index.values, test_size =0.2, stratify = df_copy['Class'].map(lambda x : str(sorted(x))), random_state=42) return train_idx,valid_idx def compile_and_train(model,train_batches,valid_batches, epochs, pretrained_weights = None, model_path ='./model_test/', graph_path = './graph_test/', log_path = './log/' ): print(model_path) print(graph_path) print(log_path) if (pretrained_weights): model.load_weights(pretrained_weights) model.compile(loss=bce_dice_loss, optimizer='adam', metrics=[dice_coef,bce_dice_loss]) early_stopping = EarlyStopping(monitor='val_loss', mode = 'min',patience=20, verbose=1) ###============= MODEL_SAVE_FOLDER_PATH = model_path if not os.path.exists(MODEL_SAVE_FOLDER_PATH): os.mkdir(MODEL_SAVE_FOLDER_PATH) model_path = MODEL_SAVE_FOLDER_PATH + 'effinet_b4_unet_{epoch:02d}-{val_loss:.4f}.h5' ###============= model_checkpoint = ModelCheckpoint(model_path, monitor='val_loss', mode = 'min', save_best_only=True, verbose=1) #model_checkpoint = ModelCheckpoint("./" + model_name + "_best.h5",monitor='val_acc', # mode = 'max', save_weights_only=True, save_best_only=True, period=1, verbose=1) reduce_lr = ReduceLROnPlateau(monitor='val_loss', mode = 'min',factor=0.2, patience=5, verbose=1) #checkpoint = ModelCheckpoint(filepath, monitor='loss', verbose=0, save_weights_only=True, save_best_only=True, mode='auto', period=1) #tensor_board = TensorBoard(log_dir='logs/', histogram_freq=0, batch_size=32) #history = model.fit(x=x_train, y=y_train, batch_size=32, epochs=num_epochs, verbose=1, callbacks=[checkpoint, tensor_board], validation_split=0.2) tb_hist = TensorBoard(log_dir= graph_path , histogram_freq=0, write_graph=True, write_images=True) # sys.quit csv_logger = CSVLogger(os.path.join(log_path, 'log.out'), append=True, separator=';') history = model.fit_generator( train_batches, validation_data = valid_batches, epochs = epochs, verbose = 1, callbacks=[ early_stopping, model_checkpoint, reduce_lr, tb_hist, csv_logger ], use_multiprocessing=True, workers=cores, ) ### deleting garbage model_lists = sorted([k for k in os.listdir(MODEL_SAVE_FOLDER_PATH) if k.split('.')[-1]=='h5']) if len(model_lists)>=2: [os.remove(os.path.join(MODEL_SAVE_FOLDER_PATH, k)) for k in model_lists[:-1]] print('remove garbage model {} '.format(len(model_lists))) return history def main(): print(FLAGS.json_path) print(FLAGS.img_path) print(FLAGS.model_name) # sys.exit() print(FLAGS.resize_shape) # sys.exit() df = df_gen(FLAGS.json_path) # size(2100,1400) rle(1400,2100) print(df.iloc[0]) # img_path = '/'.join(FLAGS.json_path.split('/')[:-2]) img_path = FLAGS.img_path print(df.shape) print(img_path) print(df.columns[3:].tolist()) # sys.exit() if FLAGS.augument: argument = augumentation() else: argument = None train_idx, valid_idx = train_split(df) print("Train : {}\n validataion : {}".format(len(train_idx), len(valid_idx))) # In : size(2100,1400) rle(2100,1400) # out : # Img - resize_size(512,256 ) - > resize(256,512) -> (512,256) # Mask - resize_size(512,256 ) - > resize(256,512) -> (512,256) train_batches = SegmentDataGenerator(df.iloc[train_idx], batch_size = FLAGS.batch_size, subset='train', shuffle=True, preprocess = img_preprocess, augmentation = argument, resize_shape = (FLAGS.resize_shape[0],FLAGS.resize_shape[1]), train_path = img_path) valid_batches = SegmentDataGenerator(df.iloc[valid_idx], batch_size = FLAGS.batch_size, subset='train', shuffle=False, preprocess = img_preprocess, augmentation = None, resize_shape = (FLAGS.resize_shape[0],FLAGS.resize_shape[1]), train_path = img_path) print(valid_batches[0][0].shape) print(valid_batches[0][1].shape) # sys.exit() # print(np.where(train_batches[0][1][0,:,:,0]==1)) # print(np.where(train_batches[0][1][0,:,:,1]==1)) # print(np.where(train_batches[0][1][0,:,:,2]==1)) # print(np.where(train_batches[0][1][0,:,:,3]==1)) # print('@@@@@@@') # print(np.where(valid_batches[0][1][0,:,:,0]==1)) # print(np.where(valid_batches[0][1][0,:,:,1]==1)) # print(np.where(valid_batches[0][1][0,:,:,2]==1)) # print(np.where(valid_batches[0][1][0,:,:,3]==1)) # sys.exit() # In : (512,256, 3, 4) # Out : (512,256, 3, 4) seg_model = get_model(label_counts = len(df.columns[3:].tolist()), input_shape = (FLAGS.resize_shape[0],FLAGS.resize_shape[1],3)) print(train_batches[0][0].shape) print(train_batches[0][1].shape) print(valid_batches[0][0].shape) print(valid_batches[0][1].shape) # sys.exit() ### path_name set img_dir_name = img_path.split('/')[-1] task_name = 'segment' model_name = FLAGS.model_name main_path = root_path + '/result/{}/{}/{}/'.format(img_dir_name, task_name, model_name) print(img_path) print(os.path.dirname(os.path.abspath(img_path))) print('!!!!') print(img_path.split('/')) print(img_dir_name) print(main_path) # sys.exit() model_path = os.path.join(main_path, 'model/') graph_path = os.path.join(main_path, 'graph/') log_path = os.path.join(main_path, 'log/') pred_path = os.path.join(main_path, 'pred/') valid_path = os.path.join(main_path, 'valid/') for path in [model_path, graph_path, log_path, pred_path ,valid_path]: if not os.path.exists(path):
os.makedirs(path)
conditional_block
train.py
50) # parser.add_argument('--batch_size', default = 16) # parser.add_argument('--shape', default = (256,256)) # parser.add_argument('--augument',default = True) # # parser.add_argument('--') # return parser # class DataGenerator(keras.utils.Sequence): # def __init__(self, df, batch_size = 16 ,subset ='train', shuffle = False, preprocess = None, info={}, augmentation = None, resize_shape = (256,256), train_path=None, test_path=None, ): # super().__init__() # self.df = df # self.shuffle = shuffle # self.subset = subset # self.batch_size = batch_size # self.preprocess = preprocess # self.info = info # self.augmentation = augmentation # self.resize_shape = resize_shape # if self.subset =='train': # # self.data_path = path +'train_images/' # self.data_path = train_path + '/' # elif self.subset =='test': # # self.data_path = path + 'test_images/' # self.data_path = test_path + '/' # self.on_epoch_end() # def __len__(self): # return int(np.floor(len(self.df) / self.batch_size)) # def on_epoch_end(self): # self.indexes = np.arange(len(self.df)) # if self.shuffle == True: # np.random.shuffle(self.indexes) # def __random_transform__(self, img, masks): # composed = self.augmentation(image = img, mask = masks) # aug_img = composed['image'] # aug_masks = composed['mask'] # return aug_img, aug_masks # def __getitem__(self,index): # x = np.empty((self.batch_size, self.resize_shape[0], self.resize_shape[1], 3), dtype=np.float32) # y = np.empty((self.batch_size, self.resize_shape[0], self.resize_shape[1], 4), dtype=np.int8) # indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size] # for i,f in enumerate(self.df['ImageId'].iloc[indexes]): # self.info[index*self.batch_size + i] =f # # x[i,]=Image.open(self.data_path + f).resize((256,256), resample = Image.) # # print (self.data_path + f) # img_value=cv2.imread(self.data_path + f) # x[i,] = cv2.resize(img_value,\ # (self.resize_shape[1], self.resize_shape[0]),\ # interpolation=cv2.INTER_AREA) # # x[i,] = cv2.imread(self.data_path + f)(img_value, (256,256),interpolation=cv2.INTER_AREA) # if self.subset =='train': # # print(self.df.columns[2:].to_list()) # for j,label in enumerate(self.df.columns[2:].to_list()): # rle = self.df[label].iloc[indexes[i]] # # print(rle) # shape = self.df['size'].iloc[indexes[i]] # y[i,:,:,j] = rle2mask(rle, shape, resize_shape = self.resize_shape ) # if not self.augmentation is None: # x[i,], y[i,] = self.__random_transform__(x[i,], y[i,]) # if self.preprocess !=None : x= self.preprocess(x) # if self.subset == 'train' : return x,y # else: return x # def augumentation(): # augument = albu.Compose([albu.VerticalFlip(), # albu.HorizontalFlip(), # albu.Rotate(limit= 20), # albu.GridDistortion(), # # albu.RandomSizedCrop((128,174), 256, 384,interpolation =cv2.INTER_LINEAR ) # ],p=1.0) # return augument def label_concat(df,labels): df = df[labels] label_list = df[df!=''].keys().values.tolist() return label_list def
(df): df_copy = df.copy() df_copy['Class'] = df_copy.apply(lambda x : label_concat(x, df.columns[3:].tolist() ), axis=1) train_idx, valid_idx = train_test_split(df_copy.index.values, test_size =0.2, stratify = df_copy['Class'].map(lambda x : str(sorted(x))), random_state=42) return train_idx,valid_idx def compile_and_train(model,train_batches,valid_batches, epochs, pretrained_weights = None, model_path ='./model_test/', graph_path = './graph_test/', log_path = './log/' ): print(model_path) print(graph_path) print(log_path) if (pretrained_weights): model.load_weights(pretrained_weights) model.compile(loss=bce_dice_loss, optimizer='adam', metrics=[dice_coef,bce_dice_loss]) early_stopping = EarlyStopping(monitor='val_loss', mode = 'min',patience=20, verbose=1) ###============= MODEL_SAVE_FOLDER_PATH = model_path if not os.path.exists(MODEL_SAVE_FOLDER_PATH): os.mkdir(MODEL_SAVE_FOLDER_PATH) model_path = MODEL_SAVE_FOLDER_PATH + 'effinet_b4_unet_{epoch:02d}-{val_loss:.4f}.h5' ###============= model_checkpoint = ModelCheckpoint(model_path, monitor='val_loss', mode = 'min', save_best_only=True, verbose=1) #model_checkpoint = ModelCheckpoint("./" + model_name + "_best.h5",monitor='val_acc', # mode = 'max', save_weights_only=True, save_best_only=True, period=1, verbose=1) reduce_lr = ReduceLROnPlateau(monitor='val_loss', mode = 'min',factor=0.2, patience=5, verbose=1) #checkpoint = ModelCheckpoint(filepath, monitor='loss', verbose=0, save_weights_only=True, save_best_only=True, mode='auto', period=1) #tensor_board = TensorBoard(log_dir='logs/', histogram_freq=0, batch_size=32) #history = model.fit(x=x_train, y=y_train, batch_size=32, epochs=num_epochs, verbose=1, callbacks=[checkpoint, tensor_board], validation_split=0.2) tb_hist = TensorBoard(log_dir= graph_path , histogram_freq=0, write_graph=True, write_images=True) # sys.quit csv_logger = CSVLogger(os.path.join(log_path, 'log.out'), append=True, separator=';') history = model.fit_generator( train_batches, validation_data = valid_batches, epochs = epochs, verbose = 1, callbacks=[ early_stopping, model_checkpoint, reduce_lr, tb_hist, csv_logger ], use_multiprocessing=True, workers=cores, ) ### deleting garbage model_lists = sorted([k for k in os.listdir(MODEL_SAVE_FOLDER_PATH) if k.split('.')[-1]=='h5']) if len(model_lists)>=2: [os.remove(os.path.join(MODEL_SAVE_FOLDER_PATH, k)) for k in model_lists[:-1]] print('remove garbage model {} '.format(len(model_lists))) return history def main(): print(FLAGS.json_path) print(FLAGS.img_path) print(FLAGS.model_name) # sys.exit() print(FLAGS.resize_shape) # sys.exit() df = df_gen(FLAGS.json_path) # size(2100,1400) rle(1400,2100) print(df.iloc[0]) # img_path = '/'.join(FLAGS.json_path.split('/')[:-2]) img_path = FLAGS.img_path print(df.shape) print(img_path) print(df.columns[3:].tolist()) # sys.exit() if FLAGS.augument: argument = augumentation() else: argument = None train_idx, valid_idx = train_split(df) print("Train : {}\n validataion : {}".format(len(train_idx), len(valid_idx))) # In : size(2100,1400) rle(2100,1400) # out : # Img - resize_size(512,256 ) - > resize(256,512) -> (512,256) # Mask - resize_size(512,256 ) - > resize(256,512) -> (512,256) train_batches = SegmentDataGenerator(df.iloc[train_idx], batch_size = FLAGS.batch_size, subset='train', shuffle=True, preprocess = img_preprocess, augmentation = argument, resize_shape = (FLAGS.resize_shape[0],FLAGS.resize_shape[1]), train_path = img_path) valid_batches = SegmentDataGenerator(df.iloc[valid_idx], batch_size = FLAGS.batch_size, subset='train', shuffle=False, preprocess =
train_split
identifier_name
train.py
50) # parser.add_argument('--batch_size', default = 16) # parser.add_argument('--shape', default = (256,256)) # parser.add_argument('--augument',default = True) # # parser.add_argument('--') # return parser # class DataGenerator(keras.utils.Sequence): # def __init__(self, df, batch_size = 16 ,subset ='train', shuffle = False, preprocess = None, info={}, augmentation = None, resize_shape = (256,256), train_path=None, test_path=None, ): # super().__init__() # self.df = df # self.shuffle = shuffle # self.subset = subset # self.batch_size = batch_size # self.preprocess = preprocess # self.info = info # self.augmentation = augmentation # self.resize_shape = resize_shape # if self.subset =='train': # # self.data_path = path +'train_images/' # self.data_path = train_path + '/' # elif self.subset =='test': # # self.data_path = path + 'test_images/' # self.data_path = test_path + '/' # self.on_epoch_end() # def __len__(self): # return int(np.floor(len(self.df) / self.batch_size)) # def on_epoch_end(self): # self.indexes = np.arange(len(self.df)) # if self.shuffle == True: # np.random.shuffle(self.indexes) # def __random_transform__(self, img, masks): # composed = self.augmentation(image = img, mask = masks) # aug_img = composed['image'] # aug_masks = composed['mask'] # return aug_img, aug_masks # def __getitem__(self,index): # x = np.empty((self.batch_size, self.resize_shape[0], self.resize_shape[1], 3), dtype=np.float32) # y = np.empty((self.batch_size, self.resize_shape[0], self.resize_shape[1], 4), dtype=np.int8) # indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size] # for i,f in enumerate(self.df['ImageId'].iloc[indexes]): # self.info[index*self.batch_size + i] =f # # x[i,]=Image.open(self.data_path + f).resize((256,256), resample = Image.) # # print (self.data_path + f) # img_value=cv2.imread(self.data_path + f) # x[i,] = cv2.resize(img_value,\ # (self.resize_shape[1], self.resize_shape[0]),\ # interpolation=cv2.INTER_AREA) # # x[i,] = cv2.imread(self.data_path + f)(img_value, (256,256),interpolation=cv2.INTER_AREA) # if self.subset =='train': # # print(self.df.columns[2:].to_list()) # for j,label in enumerate(self.df.columns[2:].to_list()): # rle = self.df[label].iloc[indexes[i]] # # print(rle) # shape = self.df['size'].iloc[indexes[i]] # y[i,:,:,j] = rle2mask(rle, shape, resize_shape = self.resize_shape ) # if not self.augmentation is None: # x[i,], y[i,] = self.__random_transform__(x[i,], y[i,]) # if self.preprocess !=None : x= self.preprocess(x) # if self.subset == 'train' : return x,y # else: return x # def augumentation(): # augument = albu.Compose([albu.VerticalFlip(), # albu.HorizontalFlip(), # albu.Rotate(limit= 20), # albu.GridDistortion(), # # albu.RandomSizedCrop((128,174), 256, 384,interpolation =cv2.INTER_LINEAR ) # ],p=1.0) # return augument def label_concat(df,labels):
def train_split(df): df_copy = df.copy() df_copy['Class'] = df_copy.apply(lambda x : label_concat(x, df.columns[3:].tolist() ), axis=1) train_idx, valid_idx = train_test_split(df_copy.index.values, test_size =0.2, stratify = df_copy['Class'].map(lambda x : str(sorted(x))), random_state=42) return train_idx,valid_idx def compile_and_train(model,train_batches,valid_batches, epochs, pretrained_weights = None, model_path ='./model_test/', graph_path = './graph_test/', log_path = './log/' ): print(model_path) print(graph_path) print(log_path) if (pretrained_weights): model.load_weights(pretrained_weights) model.compile(loss=bce_dice_loss, optimizer='adam', metrics=[dice_coef,bce_dice_loss]) early_stopping = EarlyStopping(monitor='val_loss', mode = 'min',patience=20, verbose=1) ###============= MODEL_SAVE_FOLDER_PATH = model_path if not os.path.exists(MODEL_SAVE_FOLDER_PATH): os.mkdir(MODEL_SAVE_FOLDER_PATH) model_path = MODEL_SAVE_FOLDER_PATH + 'effinet_b4_unet_{epoch:02d}-{val_loss:.4f}.h5' ###============= model_checkpoint = ModelCheckpoint(model_path, monitor='val_loss', mode = 'min', save_best_only=True, verbose=1) #model_checkpoint = ModelCheckpoint("./" + model_name + "_best.h5",monitor='val_acc', # mode = 'max', save_weights_only=True, save_best_only=True, period=1, verbose=1) reduce_lr = ReduceLROnPlateau(monitor='val_loss', mode = 'min',factor=0.2, patience=5, verbose=1) #checkpoint = ModelCheckpoint(filepath, monitor='loss', verbose=0, save_weights_only=True, save_best_only=True, mode='auto', period=1) #tensor_board = TensorBoard(log_dir='logs/', histogram_freq=0, batch_size=32) #history = model.fit(x=x_train, y=y_train, batch_size=32, epochs=num_epochs, verbose=1, callbacks=[checkpoint, tensor_board], validation_split=0.2) tb_hist = TensorBoard(log_dir= graph_path , histogram_freq=0, write_graph=True, write_images=True) # sys.quit csv_logger = CSVLogger(os.path.join(log_path, 'log.out'), append=True, separator=';') history = model.fit_generator( train_batches, validation_data = valid_batches, epochs = epochs, verbose = 1, callbacks=[ early_stopping, model_checkpoint, reduce_lr, tb_hist, csv_logger ], use_multiprocessing=True, workers=cores, ) ### deleting garbage model_lists = sorted([k for k in os.listdir(MODEL_SAVE_FOLDER_PATH) if k.split('.')[-1]=='h5']) if len(model_lists)>=2: [os.remove(os.path.join(MODEL_SAVE_FOLDER_PATH, k)) for k in model_lists[:-1]] print('remove garbage model {} '.format(len(model_lists))) return history def main(): print(FLAGS.json_path) print(FLAGS.img_path) print(FLAGS.model_name) # sys.exit() print(FLAGS.resize_shape) # sys.exit() df = df_gen(FLAGS.json_path) # size(2100,1400) rle(1400,2100) print(df.iloc[0]) # img_path = '/'.join(FLAGS.json_path.split('/')[:-2]) img_path = FLAGS.img_path print(df.shape) print(img_path) print(df.columns[3:].tolist()) # sys.exit() if FLAGS.augument: argument = augumentation() else: argument = None train_idx, valid_idx = train_split(df) print("Train : {}\n validataion : {}".format(len(train_idx), len(valid_idx))) # In : size(2100,1400) rle(2100,1400) # out : # Img - resize_size(512,256 ) - > resize(256,512) -> (512,256) # Mask - resize_size(512,256 ) - > resize(256,512) -> (512,256) train_batches = SegmentDataGenerator(df.iloc[train_idx], batch_size = FLAGS.batch_size, subset='train', shuffle=True, preprocess = img_preprocess, augmentation = argument, resize_shape = (FLAGS.resize_shape[0],FLAGS.resize_shape[1]), train_path = img_path) valid_batches = SegmentDataGenerator(df.iloc[valid_idx], batch_size = FLAGS.batch_size, subset='train', shuffle=False, preprocess = img
df = df[labels] label_list = df[df!=''].keys().values.tolist() return label_list
identifier_body
train.py
50) # parser.add_argument('--batch_size', default = 16) # parser.add_argument('--shape', default = (256,256)) # parser.add_argument('--augument',default = True) # # parser.add_argument('--') # return parser # class DataGenerator(keras.utils.Sequence): # def __init__(self, df, batch_size = 16 ,subset ='train', shuffle = False, preprocess = None, info={}, augmentation = None, resize_shape = (256,256), train_path=None, test_path=None, ): # super().__init__() # self.df = df # self.shuffle = shuffle # self.subset = subset # self.batch_size = batch_size # self.preprocess = preprocess # self.info = info # self.augmentation = augmentation # self.resize_shape = resize_shape
# # self.data_path = path +'train_images/' # self.data_path = train_path + '/' # elif self.subset =='test': # # self.data_path = path + 'test_images/' # self.data_path = test_path + '/' # self.on_epoch_end() # def __len__(self): # return int(np.floor(len(self.df) / self.batch_size)) # def on_epoch_end(self): # self.indexes = np.arange(len(self.df)) # if self.shuffle == True: # np.random.shuffle(self.indexes) # def __random_transform__(self, img, masks): # composed = self.augmentation(image = img, mask = masks) # aug_img = composed['image'] # aug_masks = composed['mask'] # return aug_img, aug_masks # def __getitem__(self,index): # x = np.empty((self.batch_size, self.resize_shape[0], self.resize_shape[1], 3), dtype=np.float32) # y = np.empty((self.batch_size, self.resize_shape[0], self.resize_shape[1], 4), dtype=np.int8) # indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size] # for i,f in enumerate(self.df['ImageId'].iloc[indexes]): # self.info[index*self.batch_size + i] =f # # x[i,]=Image.open(self.data_path + f).resize((256,256), resample = Image.) # # print (self.data_path + f) # img_value=cv2.imread(self.data_path + f) # x[i,] = cv2.resize(img_value,\ # (self.resize_shape[1], self.resize_shape[0]),\ # interpolation=cv2.INTER_AREA) # # x[i,] = cv2.imread(self.data_path + f)(img_value, (256,256),interpolation=cv2.INTER_AREA) # if self.subset =='train': # # print(self.df.columns[2:].to_list()) # for j,label in enumerate(self.df.columns[2:].to_list()): # rle = self.df[label].iloc[indexes[i]] # # print(rle) # shape = self.df['size'].iloc[indexes[i]] # y[i,:,:,j] = rle2mask(rle, shape, resize_shape = self.resize_shape ) # if not self.augmentation is None: # x[i,], y[i,] = self.__random_transform__(x[i,], y[i,]) # if self.preprocess !=None : x= self.preprocess(x) # if self.subset == 'train' : return x,y # else: return x # def augumentation(): # augument = albu.Compose([albu.VerticalFlip(), # albu.HorizontalFlip(), # albu.Rotate(limit= 20), # albu.GridDistortion(), # # albu.RandomSizedCrop((128,174), 256, 384,interpolation =cv2.INTER_LINEAR ) # ],p=1.0) # return augument def label_concat(df,labels): df = df[labels] label_list = df[df!=''].keys().values.tolist() return label_list def train_split(df): df_copy = df.copy() df_copy['Class'] = df_copy.apply(lambda x : label_concat(x, df.columns[3:].tolist() ), axis=1) train_idx, valid_idx = train_test_split(df_copy.index.values, test_size =0.2, stratify = df_copy['Class'].map(lambda x : str(sorted(x))), random_state=42) return train_idx,valid_idx def compile_and_train(model,train_batches,valid_batches, epochs, pretrained_weights = None, model_path ='./model_test/', graph_path = './graph_test/', log_path = './log/' ): print(model_path) print(graph_path) print(log_path) if (pretrained_weights): model.load_weights(pretrained_weights) model.compile(loss=bce_dice_loss, optimizer='adam', metrics=[dice_coef,bce_dice_loss]) early_stopping = EarlyStopping(monitor='val_loss', mode = 'min',patience=20, verbose=1) ###============= MODEL_SAVE_FOLDER_PATH = model_path if not os.path.exists(MODEL_SAVE_FOLDER_PATH): os.mkdir(MODEL_SAVE_FOLDER_PATH) model_path = MODEL_SAVE_FOLDER_PATH + 'effinet_b4_unet_{epoch:02d}-{val_loss:.4f}.h5' ###============= model_checkpoint = ModelCheckpoint(model_path, monitor='val_loss', mode = 'min', save_best_only=True, verbose=1) #model_checkpoint = ModelCheckpoint("./" + model_name + "_best.h5",monitor='val_acc', # mode = 'max', save_weights_only=True, save_best_only=True, period=1, verbose=1) reduce_lr = ReduceLROnPlateau(monitor='val_loss', mode = 'min',factor=0.2, patience=5, verbose=1) #checkpoint = ModelCheckpoint(filepath, monitor='loss', verbose=0, save_weights_only=True, save_best_only=True, mode='auto', period=1) #tensor_board = TensorBoard(log_dir='logs/', histogram_freq=0, batch_size=32) #history = model.fit(x=x_train, y=y_train, batch_size=32, epochs=num_epochs, verbose=1, callbacks=[checkpoint, tensor_board], validation_split=0.2) tb_hist = TensorBoard(log_dir= graph_path , histogram_freq=0, write_graph=True, write_images=True) # sys.quit csv_logger = CSVLogger(os.path.join(log_path, 'log.out'), append=True, separator=';') history = model.fit_generator( train_batches, validation_data = valid_batches, epochs = epochs, verbose = 1, callbacks=[ early_stopping, model_checkpoint, reduce_lr, tb_hist, csv_logger ], use_multiprocessing=True, workers=cores, ) ### deleting garbage model_lists = sorted([k for k in os.listdir(MODEL_SAVE_FOLDER_PATH) if k.split('.')[-1]=='h5']) if len(model_lists)>=2: [os.remove(os.path.join(MODEL_SAVE_FOLDER_PATH, k)) for k in model_lists[:-1]] print('remove garbage model {} '.format(len(model_lists))) return history def main(): print(FLAGS.json_path) print(FLAGS.img_path) print(FLAGS.model_name) # sys.exit() print(FLAGS.resize_shape) # sys.exit() df = df_gen(FLAGS.json_path) # size(2100,1400) rle(1400,2100) print(df.iloc[0]) # img_path = '/'.join(FLAGS.json_path.split('/')[:-2]) img_path = FLAGS.img_path print(df.shape) print(img_path) print(df.columns[3:].tolist()) # sys.exit() if FLAGS.augument: argument = augumentation() else: argument = None train_idx, valid_idx = train_split(df) print("Train : {}\n validataion : {}".format(len(train_idx), len(valid_idx))) # In : size(2100,1400) rle(2100,1400) # out : # Img - resize_size(512,256 ) - > resize(256,512) -> (512,256) # Mask - resize_size(512,256 ) - > resize(256,512) -> (512,256) train_batches = SegmentDataGenerator(df.iloc[train_idx], batch_size = FLAGS.batch_size, subset='train', shuffle=True, preprocess = img_preprocess, augmentation = argument, resize_shape = (FLAGS.resize_shape[0],FLAGS.resize_shape[1]), train_path = img_path) valid_batches = SegmentDataGenerator(df.iloc[valid_idx], batch_size = FLAGS.batch_size, subset='train', shuffle=False, preprocess
# if self.subset =='train':
random_line_split
lmerge_ins.py
_i_R CIPOS95=str(ninefive_i_L_start) + ',' + str(ninefive_i_L_end) CIEND95=str(ninefive_i_R_start) + ',' + str(ninefive_i_R_end) return [CIPOS95, CIEND95] def combine_pdfs(BP, c, use_product, weighting_scheme): L = [] R = [] for b_i in c: b = BP[b_i] L.append([b.left.start, b.left.end, b.left.p]) R.append([b.right.start, b.right.end, b.right.p]) [start_R, end_R, a_R] = l_bp.align_intervals(R) [start_L, end_L, a_L] = l_bp.align_intervals(L) p_L = [0] * len(a_L[0]) p_R = [0] * len(a_R[0]) wts = [1] * len(c) for c_i in range(len(c)): if weighting_scheme == 'evidence_wt': A = BP[c[c_i]].l.rstrip().split('\t', 10) m = l_bp.to_map(A[7]) wt=int(m['SU']) #sys.stderr.write("wt\t0\t"+str(wt)+"\n") a_L[c_i]=[wt*ali for ali in a_L[c_i]] a_R[c_i]=[wt*ari for ari in a_R[c_i]] elif weighting_scheme == 'carrier_wt': A = BP[c[c_i]].l.rstrip().split('\t', 10) m = l_bp.to_map(A[7]) wt = 1 if 'SNAME' in m: wt=len(m['SNAME'].split(',')) a_L[c_i]=[wt*ali for ali in a_L[c_i]] a_R[c_i]=[wt*ari for ari in a_R[c_i]] for i in range(len(a_L[c_i])): #sys.stderr.write("L\t"+str(i)+"\t"+str(c_i)+"\t"+str(a_L[c_i][i])+"\n") p_L[i] += a_L[c_i][i] for i in range(len(a_R[c_i])): #sys.stderr.write("R\t"+str(i)+"\t"+str(c_i)+"\t"+str(a_R[c_i][i])+"\n") p_R[i] += a_R[c_i][i] ALG = 'SUM' if use_product: pmax_i_L = p_L.index(max(p_L)) pmax_i_R = p_R.index(max(p_R)) miss = 0 for c_i in range(len(c)): if (a_L[c_i][pmax_i_L] == 0) or (a_R[c_i][pmax_i_R] == 0): miss += 1 if miss == 0: ALG = "PROD" ls_p_L = [ls.get_ls(1)] * len(a_L[0]) ls_p_R = [ls.get_ls(1)] * len(a_R[0]) for c_i in range(len(c)): for i in range(len(a_L[c_i])): ls_p_L[i] = ls.ls_multiply(ls_p_L[i], ls.get_ls(a_L[c_i][i])) for i in range(len(a_R[c_i])): ls_p_R[i] = ls.ls_multiply(ls_p_R[i], ls.get_ls(a_R[c_i][i])) ls_sum_L = ls.get_ls(0) ls_sum_R = ls.get_ls(0) for ls_p in ls_p_L: ls_sum_L = ls.ls_add(ls_sum_L, ls_p) for ls_p in ls_p_R: ls_sum_R = ls.ls_add(ls_sum_R, ls_p) p_L = [] for ls_p in ls_p_L: p_L.append(ls.get_p(ls.ls_divide(ls_p, ls_sum_L))) p_R = [] for ls_p in ls_p_R:
sum_L = sum(p_L) sum_R = sum(p_R) p_L = [x/sum_L for x in p_L] p_R = [x/sum_L for x in p_R] [clip_start_L, clip_end_L] = l_bp.trim(p_L) [clip_start_R, clip_end_R] = l_bp.trim(p_R) [ new_start_L, new_end_L ] = [ start_L + clip_start_L, end_L - clip_end_L ] [ new_start_R, new_end_R ] = [ start_R + clip_start_R, end_R - clip_end_R ] p_L = p_L[clip_start_L:len(p_L)-clip_end_L] p_R = p_R[clip_start_R:len(p_R)-clip_end_R] s_p_L = sum(p_L) s_p_R = sum(p_R) p_L = [x/s_p_L for x in p_L] p_R = [x/s_p_R for x in p_R] #sys.exit(1) return new_start_L, new_start_R, p_L, p_R, ALG def create_merged_variant(BP, c, v_id, vcf, use_product, weighting_scheme='unweighted'): new_start_L, new_start_R, p_L , p_R, ALG = combine_pdfs(BP, c, use_product, weighting_scheme) max_i_L = p_L.index(max(p_L)) max_i_R = p_R.index(max(p_R)) [cipos95, ciend95]=getCI95( p_L, p_R, max_i_L, max_i_R) new_pos_L = new_start_L + max_i_L new_pos_R = new_start_R + max_i_R BP0=BP[c[0]] A=BP0.l.rstrip().split('\t', 10) ALT = '' if BP0.sv_type == 'BND': if BP0.strands[:2] == '++': ALT = 'N]' + BP0.right.chrom + ':' + str(new_pos_R) + ']' elif BP0.strands[:2] == '-+': ALT = ']' + BP0.right.chrom + ':' + str(new_pos_R) + ']N' elif BP0.strands[:2] == '+-': ALT = 'N[' + BP0.right.chrom + ':' + str(new_pos_R) + '[' elif BP0.strands[:2] == '--': ALT = '[' + BP0.right.chrom + ':' + str(new_pos_R) + '[N' else: ALT = '<' + BP0.sv_type + '>' var_list=[ BP0.left.chrom, new_pos_L, str(v_id), 'N', ALT, 0.0, '.', ''] + A[8:] var=Variant(var_list, vcf) var.set_info('SVTYPE', BP0.sv_type) var.set_info('ALG', ALG) if var.get_info('SVTYPE')=='DEL': var.set_info('SVLEN', new_pos_L - new_pos_R) elif BP0.left.chrom == BP0.right.chrom: var.set_info('SVLEN', new_pos_R - new_pos_L) else: SVLEN = None if var.get_info('SVTYPE') == 'BND': var.set_info('EVENT', str(v_id)) else: var.set_info('END', new_pos_R ) var.set_info('CIPOS95', cipos95) var.set_info('CIEND95', ciend95) var.set_info('CIPOS', ','.join([str(x) for x in [-1*max_i_L, len(p_L) - max_i_L - 1]])) var.set_info('CIEND', ','.join([str(x) for x in [-1*max_i_R, len(p_R) - max_i_R - 1]])) var.set_info('PRPOS', ','.join([str(x) for x in p_L])) var.set_info('PREND', ','.join([str(x) for x in p_R])) return var def combine_var_support(var, BP, c, include_genotypes, sample_order): strand_map = {} qual = 0.0 [ SU, PE, SR ] = [0,0,0] s_name_list = [] s1_name_list = [] format_string = var.get_format_string() gt_dict = dict() for b_i in c: A = BP[b_i].l.rstrip().split('\t') if A[5].isdigit(): qual += float(A[5]) m = l_bp.to_map(A[7]) for strand_entry in m['STRANDS'].split(','): s_type,s_count = strand_entry.split(':') if s_type not in strand_map: strand_map[s_type] = 0 strand_map[s_type] += int(s_count) SU += int(m['SU']) PE += int(m['PE']) SR += int(m['SR']) if 'SNAME' in m: s_name_list.append(m['SNAME'] + ':' + A[2]) if include_genotypes: if format
p_R.append(ls.get_p(ls.ls_divide(ls_p, ls_sum_R)))
conditional_block
lmerge_ins.py
(BP, sample_order, v_id, use_product, vcf, vcf_out, include_genotypes): A = BP[0].l.rstrip().split('\t') var = Variant(A,vcf) try: sname = var.get_info('SNAME') var.set_info('SNAME', sname + ':' + var.var_id) except KeyError: pass var.var_id=str(v_id) if use_product: var.set_info('ALG', 'PROD') else: var.set_info('ALG', 'SUM') GTS = None if include_genotypes: null_string = null_format_string(A[8]) gt_dict = { sname: A[9] } GTS = '\t'.join([gt_dict.get(x, null_string) for x in sample_order]) var.gts = None var.gts_string = GTS return var def order_cliques(BP, C): #Sweep the set. Find the largest intersecting set. Remove it. Continue. BP_i = range(len(BP)) # index set of each node in the graph while len(BP_i) > 0: h_l = [] #heap of left breakpoint end coordinates and node id (index). heapq is a min heap and the end coord is what will be used for the sorting. max_c = [] max_c_len = 0 for i in BP_i: # remove anything in the heap that doesn't intersect with the current breakpoint while (len(h_l) > 0) and (h_l[0][0] < BP[i].left.start): heapq.heappop(h_l) heapq.heappush(h_l, (BP[i].left.end, i)) # add to the heap # at this point everything in h_l intersects on the left # but we need to take into account what is going on on the right h_r = [] # heap with rightmost starts h_l_i = [x[1] for x in h_l] # this is all of the node ids on the heap currently h_l_i.sort(key=lambda x:BP[x].right.start) # sort them by their right start for j in h_l_i: # remove anything in the heap that doesn't intersect with the current breakpoint on the right end while (len(h_r) > 0) and (h_r[0][0] < BP[j].right.start): heapq.heappop(h_r) # add something to the right heap heapq.heappush(h_r, (BP[j].right.end, j)) if max_c_len < len(h_r): # max clique! Register what nodes we have max_c_len = len(h_r) max_c = [y[1] for y in h_r] C.append(max_c) for c in max_c: BP_i.remove(c) def getCI95( p_L, p_R, max_i_L, max_i_R): ninefive_i_L_start = max_i_L ninefive_i_L_end = max_i_L ninefive_i_L_total = p_L[max_i_L] while (ninefive_i_L_total < 0.95): if (ninefive_i_L_start <= 0) and (ninefive_i_L_end >= (len(p_L)-1)): break ninefive_i_L_start = max(0, ninefive_i_L_start - 1) ninefive_i_L_end = min(len(p_L)-1, ninefive_i_L_end +1) ninefive_i_L_total = sum(p_L[ninefive_i_L_start:ninefive_i_L_end+1]) ninefive_i_L_start = ninefive_i_L_start - max_i_L ninefive_i_L_end = ninefive_i_L_end - max_i_L ninefive_i_R_start = max_i_R ninefive_i_R_end = max_i_R ninefive_i_R_total = p_R[max_i_R] while (ninefive_i_R_total < 0.95): if (ninefive_i_R_start <= 0) and (ninefive_i_R_end >= len(p_R)-1): break ninefive_i_R_start = max(0, ninefive_i_R_start - 1) ninefive_i_R_end = min(len(p_R)-1, ninefive_i_R_end +1) ninefive_i_R_total = sum(p_R[ninefive_i_R_start:ninefive_i_R_end+1]) ninefive_i_R_end = ninefive_i_R_end - max_i_R ninefive_i_R_start = ninefive_i_R_start - max_i_R CIPOS95=str(ninefive_i_L_start) + ',' + str(ninefive_i_L_end) CIEND95=str(ninefive_i_R_start) + ',' + str(ninefive_i_R_end) return [CIPOS95, CIEND95] def combine_pdfs(BP, c, use_product, weighting_scheme): L = [] R = [] for b_i in c: b = BP[b_i] L.append([b.left.start, b.left.end, b.left.p]) R.append([b.right.start, b.right.end, b.right.p]) [start_R, end_R, a_R] = l_bp.align_intervals(R) [start_L, end_L, a_L] = l_bp.align_intervals(L) p_L = [0] * len(a_L[0]) p_R = [0] * len(a_R[0]) wts = [1] * len(c) for c_i in range(len(c)): if weighting_scheme == 'evidence_wt': A = BP[c[c_i]].l.rstrip().split('\t', 10) m = l_bp.to_map(A[7]) wt=int(m['SU']) #sys.stderr.write("wt\t0\t"+str(wt)+"\n") a_L[c_i]=[wt*ali for ali in a_L[c_i]] a_R[c_i]=[wt*ari for ari in a_R[c_i]] elif weighting_scheme == 'carrier_wt': A = BP[c[c_i]].l.rstrip().split('\t', 10) m = l_bp.to_map(A[7]) wt = 1 if 'SNAME' in m: wt=len(m['SNAME'].split(',')) a_L[c_i]=[wt*ali for ali in a_L[c_i]] a_R[c_i]=[wt*ari for ari in a_R[c_i]] for i in range(len(a_L[c_i])): #sys.stderr.write("L\t"+str(i)+"\t"+str(c_i)+"\t"+str(a_L[c_i][i])+"\n") p_L[i] += a_L[c_i][i] for i in range(len(a_R[c_i])): #sys.stderr.write("R\t"+str(i)+"\t"+str(c_i)+"\t"+str(a_R[c_i][i])+"\n") p_R[i] += a_R[c_i][i] ALG = 'SUM' if use_product: pmax_i_L = p_L.index(max(p_L)) pmax_i_R = p_R.index(max(p_R)) miss = 0 for c_i in range(len(c)): if (a_L[c_i][pmax_i_L] == 0) or (a_R[c_i][pmax_i_R] == 0): miss += 1 if miss == 0: ALG = "PROD" ls_p_L = [ls.get_ls(1)] * len(a_L[0]) ls_p_R = [ls.get_ls(1)] * len(a_R[0]) for c_i in range(len(c)): for i in range(len(a_L[c_i])): ls_p_L[i] = ls.ls_multiply(ls_p_L[i], ls.get_ls(a_L[c_i][i])) for i in range(len(a_R[c_i])): ls_p_R[i] = ls.ls_multiply(ls_p_R[i], ls.get_ls(a_R[c_i][i])) ls_sum_L = ls.get_ls(0) ls_sum_R = ls.get_ls(0) for ls_p in ls_p_L: ls_sum_L = ls.ls_add(ls_sum_L, ls_p) for ls_p in ls_p_R: ls_sum_R = ls.ls_add(ls_sum_R, ls_p) p_L = [] for ls_p in ls_p_L: p_L.append(ls.get_p(ls.ls_divide(ls_p, ls_sum_L))) p_R = [] for ls_p in ls_p_R: p_R.append(ls.get_p(ls.ls_divide(ls_p, ls_sum_R))) sum_L = sum(p_L) sum_R = sum(p_R) p_L = [x/sum_L for x in p_L] p_R = [x/sum_L for x in p_R] [clip_start_L, clip_end_L] = l_bp.trim(p_L) [clip_start_R, clip_end_R] = l_bp.trim(p_R) [ new_start_L, new_end_L ] = [ start_L + clip_start_L, end_L - clip_end_L ] [ new_start_R, new_end_R ] = [ start_R + clip_start_R, end_R - clip_end_R
merge_single_bp
identifier_name
lmerge_ins.py
str(new_pos_R) + '[' elif BP0.strands[:2] == '--': ALT = '[' + BP0.right.chrom + ':' + str(new_pos_R) + '[N' else: ALT = '<' + BP0.sv_type + '>' var_list=[ BP0.left.chrom, new_pos_L, str(v_id), 'N', ALT, 0.0, '.', ''] + A[8:] var=Variant(var_list, vcf) var.set_info('SVTYPE', BP0.sv_type) var.set_info('ALG', ALG) if var.get_info('SVTYPE')=='DEL': var.set_info('SVLEN', new_pos_L - new_pos_R) elif BP0.left.chrom == BP0.right.chrom: var.set_info('SVLEN', new_pos_R - new_pos_L) else: SVLEN = None if var.get_info('SVTYPE') == 'BND': var.set_info('EVENT', str(v_id)) else: var.set_info('END', new_pos_R ) var.set_info('CIPOS95', cipos95) var.set_info('CIEND95', ciend95) var.set_info('CIPOS', ','.join([str(x) for x in [-1*max_i_L, len(p_L) - max_i_L - 1]])) var.set_info('CIEND', ','.join([str(x) for x in [-1*max_i_R, len(p_R) - max_i_R - 1]])) var.set_info('PRPOS', ','.join([str(x) for x in p_L])) var.set_info('PREND', ','.join([str(x) for x in p_R])) return var def combine_var_support(var, BP, c, include_genotypes, sample_order): strand_map = {} qual = 0.0 [ SU, PE, SR ] = [0,0,0] s_name_list = [] s1_name_list = [] format_string = var.get_format_string() gt_dict = dict() for b_i in c: A = BP[b_i].l.rstrip().split('\t') if A[5].isdigit(): qual += float(A[5]) m = l_bp.to_map(A[7]) for strand_entry in m['STRANDS'].split(','): s_type,s_count = strand_entry.split(':') if s_type not in strand_map: strand_map[s_type] = 0 strand_map[s_type] += int(s_count) SU += int(m['SU']) PE += int(m['PE']) SR += int(m['SR']) if 'SNAME' in m: s_name_list.append(m['SNAME'] + ':' + A[2]) if include_genotypes: if format_string == A[8]: gt_dict[m['SNAME']] = A[9] else: format_dict = dict(zip(A[8].split(':'), A[9].split(':'))) geno = ':'.join([format_dict.get(i, '.') for i in var.format_list]) gt_dict[m['SNAME']] = geno else: var.format_dict=None if s_name_list: var.set_info('SNAME', ','.join(s_name_list)) GTS = None if include_genotypes: null_string = null_format_string(format_string) GTS = '\t'.join([gt_dict.get(x, null_string) for x in sample_order]) var.gts=None var.gts_string=GTS strand_types_counts = [] for strand in strand_map: strand_types_counts.append(strand + ':' + str(strand_map[strand])) var.set_info('STRANDS', ','.join(strand_types_counts)) var.qual = qual var.set_info('PE', str(PE)) var.set_info('SU', str(SU)) var.set_info('SR', str(SR)) def invtobnd(var): strands=var.get_info('STRANDS') strand_dict = dict(x.split(':') for x in strands.split(',')) for o in strand_dict.keys(): if strand_dict[o] == '0': del(strand_dict[o]) strands=','.join(['%s:%s' % (o,strand_dict[o]) for o in strand_dict]) var.set_info('STRANDS', strands) if strands[:2] == '++': ALT = 'N]' + var.chrom + ':' + str(var.get_info('END')) + ']' elif strands[:2] == '--': ALT = '[' + var.chrom + ':' + str(var.get_info('END')) + '[N' var.set_info('SVTYPE', 'BND') var.alt = ALT [ tempci, temp95 ] = [var.get_info('CIPOS'), var.get_info('CIPOS95')] try: temppr = var.get_info('PRPOS') except KeyError: raise MissingProbabilitiesException('Required tag PRPOS not found.') var.set_info('CIPOS', var.get_info('CIEND')) var.set_info('CIEND', tempci) var.set_info('CIPOS95', var.get_info('CIEND95')) var.set_info('CIEND95', temp95 ) try: var.set_info('PRPOS', var.get_info('PREND')) except KeyError: raise MissingProbabilitiesException('Required tag PREND not found.') var.set_info('PREND', temppr ) def write_var(var, vcf_out, include_genotypes=False): v_id=var.var_id if var.get_info('CIPOS95') != '0,0' or var.get_info('CIEND95') != '0,0': var.set_info('IMPRECISE', True) else: var.set_info('IMPRECISE', False) if var.get_info('SVTYPE') == 'INV' and ('--:0' in var.get_info('STRANDS') or '++:0' in var.get_info('STRANDS')): invtobnd(var) if var.alt not in ['<DEL>', '<DUP>', '<INV>']: var.var_id=str(v_id)+'_1' var.set_info('EVENT', v_id) var.set_info('MATEID', str(v_id)+'_2') var.info.pop('END', None) var.info.pop('SVLEN', None) varstring=var.get_var_string(use_cached_gt_string=True) if not include_genotypes: varstring='\t'.join(varstring.split('\t', 10)[:8]) vcf_out.write(varstring+'\n') new_alt = '' if var.alt[0] == '[': new_alt = '[' + var.chrom + ':' + str(var.pos) + '[N' elif var.alt[0] == ']': new_alt = 'N[' + var.chrom + ':' + str(var.pos) + '[' elif var.alt[-1] == '[': new_alt = ']' + var.chrom + ':' + str(var.pos) + ']N' elif var.alt[-1] == ']': new_alt = 'N]' + var.chrom + ':' + str(var.pos) + ']' sep, chrom, pos = parse_bnd_alt_string(var.alt) var.chrom = chrom var.pos = int(pos) var.var_id = str(v_id)+'_2' var.set_info('MATEID', str(v_id)+'_1') var.set_info('SECONDARY', True) var.alt = new_alt [ tempci, temp95 ] = [var.get_info('CIPOS'), var.get_info('CIPOS95')] try: temppr = var.get_info('PRPOS') except KeyError: raise MissingProbabilitiesException('Required tag PRPOS not found.') var.set_info('CIPOS', var.get_info('CIEND')) var.set_info('CIEND', tempci) var.set_info('CIPOS95', var.get_info('CIEND95')) var.set_info('CIEND95', temp95 ) try: var.set_info('PRPOS', var.get_info('PREND')) except KeyError: raise MissingProbabilitiesException('Required tag PREND not found.') var.set_info('PREND', temppr ) varstring=var.get_var_string(use_cached_gt_string=True) if not include_genotypes: varstring='\t'.join(varstring.split('\t', 10)[:8]) vcf_out.write(varstring+'\n') else: varstring=var.get_var_string(use_cached_gt_string=True) if not include_genotypes: varstring='\t'.join(varstring.split('\t', 10)[:8]) vcf_out.write(varstring+'\n') def merge(BP, sample_order, v_id, use_product, vcf, vcf_out, include_genotypes=False, weighting_scheme='unweighted'): if len(BP) == 1: #merge a single breakpoint
v_id+=1 var=merge_single_bp(BP, sample_order, v_id, use_product, vcf, vcf_out, include_genotypes) write_var(var, vcf_out, include_genotypes)
random_line_split
lmerge_ins.py
1]])) var.set_info('PRPOS', ','.join([str(x) for x in p_L])) var.set_info('PREND', ','.join([str(x) for x in p_R])) return var def combine_var_support(var, BP, c, include_genotypes, sample_order): strand_map = {} qual = 0.0 [ SU, PE, SR ] = [0,0,0] s_name_list = [] s1_name_list = [] format_string = var.get_format_string() gt_dict = dict() for b_i in c: A = BP[b_i].l.rstrip().split('\t') if A[5].isdigit(): qual += float(A[5]) m = l_bp.to_map(A[7]) for strand_entry in m['STRANDS'].split(','): s_type,s_count = strand_entry.split(':') if s_type not in strand_map: strand_map[s_type] = 0 strand_map[s_type] += int(s_count) SU += int(m['SU']) PE += int(m['PE']) SR += int(m['SR']) if 'SNAME' in m: s_name_list.append(m['SNAME'] + ':' + A[2]) if include_genotypes: if format_string == A[8]: gt_dict[m['SNAME']] = A[9] else: format_dict = dict(zip(A[8].split(':'), A[9].split(':'))) geno = ':'.join([format_dict.get(i, '.') for i in var.format_list]) gt_dict[m['SNAME']] = geno else: var.format_dict=None if s_name_list: var.set_info('SNAME', ','.join(s_name_list)) GTS = None if include_genotypes: null_string = null_format_string(format_string) GTS = '\t'.join([gt_dict.get(x, null_string) for x in sample_order]) var.gts=None var.gts_string=GTS strand_types_counts = [] for strand in strand_map: strand_types_counts.append(strand + ':' + str(strand_map[strand])) var.set_info('STRANDS', ','.join(strand_types_counts)) var.qual = qual var.set_info('PE', str(PE)) var.set_info('SU', str(SU)) var.set_info('SR', str(SR)) def invtobnd(var): strands=var.get_info('STRANDS') strand_dict = dict(x.split(':') for x in strands.split(',')) for o in strand_dict.keys(): if strand_dict[o] == '0': del(strand_dict[o]) strands=','.join(['%s:%s' % (o,strand_dict[o]) for o in strand_dict]) var.set_info('STRANDS', strands) if strands[:2] == '++': ALT = 'N]' + var.chrom + ':' + str(var.get_info('END')) + ']' elif strands[:2] == '--': ALT = '[' + var.chrom + ':' + str(var.get_info('END')) + '[N' var.set_info('SVTYPE', 'BND') var.alt = ALT [ tempci, temp95 ] = [var.get_info('CIPOS'), var.get_info('CIPOS95')] try: temppr = var.get_info('PRPOS') except KeyError: raise MissingProbabilitiesException('Required tag PRPOS not found.') var.set_info('CIPOS', var.get_info('CIEND')) var.set_info('CIEND', tempci) var.set_info('CIPOS95', var.get_info('CIEND95')) var.set_info('CIEND95', temp95 ) try: var.set_info('PRPOS', var.get_info('PREND')) except KeyError: raise MissingProbabilitiesException('Required tag PREND not found.') var.set_info('PREND', temppr ) def write_var(var, vcf_out, include_genotypes=False): v_id=var.var_id if var.get_info('CIPOS95') != '0,0' or var.get_info('CIEND95') != '0,0': var.set_info('IMPRECISE', True) else: var.set_info('IMPRECISE', False) if var.get_info('SVTYPE') == 'INV' and ('--:0' in var.get_info('STRANDS') or '++:0' in var.get_info('STRANDS')): invtobnd(var) if var.alt not in ['<DEL>', '<DUP>', '<INV>']: var.var_id=str(v_id)+'_1' var.set_info('EVENT', v_id) var.set_info('MATEID', str(v_id)+'_2') var.info.pop('END', None) var.info.pop('SVLEN', None) varstring=var.get_var_string(use_cached_gt_string=True) if not include_genotypes: varstring='\t'.join(varstring.split('\t', 10)[:8]) vcf_out.write(varstring+'\n') new_alt = '' if var.alt[0] == '[': new_alt = '[' + var.chrom + ':' + str(var.pos) + '[N' elif var.alt[0] == ']': new_alt = 'N[' + var.chrom + ':' + str(var.pos) + '[' elif var.alt[-1] == '[': new_alt = ']' + var.chrom + ':' + str(var.pos) + ']N' elif var.alt[-1] == ']': new_alt = 'N]' + var.chrom + ':' + str(var.pos) + ']' sep, chrom, pos = parse_bnd_alt_string(var.alt) var.chrom = chrom var.pos = int(pos) var.var_id = str(v_id)+'_2' var.set_info('MATEID', str(v_id)+'_1') var.set_info('SECONDARY', True) var.alt = new_alt [ tempci, temp95 ] = [var.get_info('CIPOS'), var.get_info('CIPOS95')] try: temppr = var.get_info('PRPOS') except KeyError: raise MissingProbabilitiesException('Required tag PRPOS not found.') var.set_info('CIPOS', var.get_info('CIEND')) var.set_info('CIEND', tempci) var.set_info('CIPOS95', var.get_info('CIEND95')) var.set_info('CIEND95', temp95 ) try: var.set_info('PRPOS', var.get_info('PREND')) except KeyError: raise MissingProbabilitiesException('Required tag PREND not found.') var.set_info('PREND', temppr ) varstring=var.get_var_string(use_cached_gt_string=True) if not include_genotypes: varstring='\t'.join(varstring.split('\t', 10)[:8]) vcf_out.write(varstring+'\n') else: varstring=var.get_var_string(use_cached_gt_string=True) if not include_genotypes: varstring='\t'.join(varstring.split('\t', 10)[:8]) vcf_out.write(varstring+'\n') def merge(BP, sample_order, v_id, use_product, vcf, vcf_out, include_genotypes=False, weighting_scheme='unweighted'): if len(BP) == 1: #merge a single breakpoint v_id+=1 var=merge_single_bp(BP, sample_order, v_id, use_product, vcf, vcf_out, include_genotypes) write_var(var, vcf_out, include_genotypes) else: BP.sort(key=lambda x: x.left.start) ordered_cliques = [] order_cliques(BP, ordered_cliques) #merge cliques for cliq in ordered_cliques: v_id+=1 var=create_merged_variant(BP, cliq, v_id, vcf, use_product, weighting_scheme) combine_var_support(var, BP, cliq, include_genotypes, sample_order) write_var(var, vcf_out, include_genotypes) return v_id def r_cluster(BP_l, sample_order, v_id, use_product, vcf, vcf_out, include_genotypes=False, weighting_scheme='unweighted'): # need to resort based on the right side, then extract clusters
BP_l.sort(key=lambda x: x.right.start) BP_l.sort(key=lambda x: x.right.chrom) BP_r = [] BP_max_end_r = -1 BP_chr_r = '' for b in BP_l: if (len(BP_r) == 0) or \ ((b.right.start <= BP_max_end_r) and \ (b.right.chrom == BP_chr_r)): BP_r.append(b) BP_max_end_r = max(BP_max_end_r, b.right.end) BP_chr_r = b.right.chrom else: v_id = merge(BP_r, sample_order, v_id, use_product, vcf, vcf_out, include_genotypes, weighting_scheme) BP_r = [b] BP_max_end_r = b.right.end BP_chr_r = b.right.chrom
identifier_body
Popup.ts
onselectstart = returnFalse; let btnSets = $('<div class="dg-title-buttons"></div>').appendTo(tb); if (cfg.btnMax) { this.btnMax = $("<b class='dg-btn-max'></b>"); btnSets.append(this.btnMax); this.btnMax.on('click', function () { self.toggle(); }); } if (cfg.btnClose) { this.btnClose = $("<b class='dg-btn-x'></b>"); btnSets.append(this.btnClose); this.btnClose.on('click', function () { self.close(); }); }
$('<div class="row tb-row" />').prependTo(this.boxy).append(tb); } function setDraggable(self) { let tb = self.titleBar; tb.on('mousedown', function (evt) { self.toTop(); if (evt.target.tagName === 'B') return; if (evt.button < 2 && self.state !== "max") { tb.on('mousemove.boxy', function (e) { tb.unbind("mousemove.boxy"); let boxy = self.boxy[0]; document.onselectstart = returnFalse; let size = self.getSize(); BoxyStore.dragging = [ boxy, e.pageX - boxy.offsetLeft, e.pageY - boxy.offsetTop, document.body.scrollWidth - size.width, document.body.scrollHeight - size.height ]; $(document) .bind("mousemove.boxy", BoxyStore._handleDrag) .bind("mouseup.boxy", function () { if (self.state !== "max" && BoxyStore.dragging) { $(document).unbind(".boxy"); BoxyStore.dragging = document.onselectstart = null; let pos = self.boxy.position(); self.restoreSize.top = pos.top; self.restoreSize.left = pos.left; } }); }); } tb.on("mouseup.boxy", function () { tb.unbind(".boxy"); }); }); } function setFooter(cfg) { let footer = this.footBar = $('<div class="dg-footer"></div>'); let htmlArr = []; for (let key in cfg.buttons) { let v = cfg.buttons[key], x = htmlArr.length; let cls, txt; if (typeof v === 'string') { cls = (x === 0 ? 'btn-primary' : ''); txt = v; } else { cls = v.className || ''; txt = v.text || ''; if(typeof v.onClick === 'function'){ if(!cfg.callback){ cfg.callback = function () {}; } cfg.callback[key] = v.onClick; } } htmlArr.push(`<button class="${cls}" name="${x}" data-key="${key}">${txt}</button>`); } footer.html(htmlArr.join(' ')); let self = this; footer.on('click', 'button', function (evt) { let keepOpen = false; if (cfg.callback) { let clicked = this; let btnKey = $(this).data('key'); let ifrWin = null; if (self.iframe) { ifrWin = self.iframe.contentWindow ? self.iframe.contentWindow : self.iframe.contentDocument.defaultView; } let i = parseInt(clicked.name, 10); if(btnKey && (typeof cfg.callback[btnKey] === 'function')){ keepOpen = cfg.callback[btnKey].call(self, i, ifrWin, clicked); } else{ keepOpen = cfg.callback.call(self, i, ifrWin, clicked); } } if (!keepOpen) self.close(); }); $('<div class="row tf-row" />').appendTo(this.boxy).append(footer); } class PopUp extends DisplayObject { cfg: any; state: string; restoreSize: any; visible: boolean; mask: JQuery; boxy: JQuery; titleBar ?: JQuery; footBar ?: JQuery; content: JQuery; btnMax ?: JQuery; btnClose ?: JQuery; iframe ?: HTMLIFrameElement; constructor(jq: JQuery, cfg: any) { cfg = $.extend({}, DEFAULTS, cfg); super(jq, cfg); BoxyStore.manager.push(this); if (cfg.popId) { if (cfg.popId in BoxyStore.managerHash) throw new Error(`Duplicated PopId "${cfg.popId}"`); else BoxyStore.managerHash[cfg.popId] = this; } } init(jq: JQuery, cfg: any) { super.init(jq, cfg); this.cfg = cfg; } create(jq: JQuery, cfg: any) : PopUp{ this.state = 'normal'; this.visible = false; this.mask = $('<div class="dg-mask"></div>'); this.boxy = $('<div class="dg-wrapper" id="' + ('dialog_' + DisplayObject.guid() ) + '"></div>'); this.content = $('<div class="dg-content"></div>'); this.content.append(jq); this.boxy.append(this.content).appendTo(document.body); let titleBarHeight = 0, footBarHeight = 0; if (cfg.title) { setTitleBar.call(this, cfg); titleBarHeight = this.titleBar.outerHeight(); this.boxy.find('.tb-row').css({height: titleBarHeight}); } if (cfg.buttons) { setFooter.call(this, cfg); this.boxy.find('.tf-row').css({height: this.footBar.outerHeight()}); footBarHeight = this.footBar.outerHeight(); } if (this.jq[0].tagName === 'IFRAME') this.iframe = this.jq[0] as HTMLIFrameElement; let contentSize = { width: cfg.width || this.boxy.outerWidth() || 500, height: cfg.height || this.boxy.outerHeight() || 300 }; /*//console.log(size); this.boxy.css(contentSize);*/ let doc = document.documentElement;//, win = window; let viewport = { //top: win.pageYOffset, //left: win.pageXOffset, width: doc.clientWidth, height: doc.clientHeight }; //console.log(p); let pos = { width: cfg.width || this.boxy.outerWidth() || 500, height: cfg.height || this.boxy.outerHeight() || 300, top: Math.max(0, (viewport.height - contentSize.height ) / 2), left: Math.max(0, (viewport.width - contentSize.width) / 2) }; this.boxy.css(pos); this.restoreSize = pos; //console.warn(this.restoreSize); this.mask.append(this.boxy.css({visibility: 'visible'})).appendTo(document.body); this.toTop(); if (navigator.userAgent.indexOf('Firefox') > -1 && this.iframe) { jq.css({height: contentSize.height - titleBarHeight - footBarHeight - 2}); } if (cfg.show) this.open(); return this; } getSize() { return { width: this.boxy.outerWidth(), height: this.boxy.outerHeight() } } getPosition(): any { let b = this.boxy[0]; return {left: b.offsetLeft, top: b.offsetTop}; } toTop() { this.mask.css({zIndex: nextZ()}); return this; } open() { this.boxy.stop(true, true); if (this.visible) { return this.toTop(); } this.mask.css({display: "block", opacity: 1}); let topPx = this.boxy.position().top; //console.warn(this.boxy[0], topPx); this.boxy.css({top: topPx - 20, opacity: 0}).animate({opacity: 1, top: topPx}, 200); this.visible = true; return this; } close(fn?: Function) { let that = this; let css = this.getPosition(); css.opacity = 0; css.top = Math.max(css.top - 40, 0); this.mask.animate({opacity: 0}, 200); this.boxy.stop(true, true).animate(css, 300, function () { if (typeof that.cfg.onClose === 'function') that.cfg.onClose.call(that); if (typeof fn === 'function') fn.call(that); if (that.cfg.destroy) that.destroy.call(that); else{ that.visible = false; that.boxy.css({top: css.top + 40}); that.mask.css({display: 'none'}); } }); return this; } max() { //resize window entity this.boxy.stop(true, true).css({ left: 0, top: 0, width: '100%', height: '100%' }); if (this.btnMax) this.btnMax.toggleClass('dg-btn-max dg-btn-restore'); //$(document.body).addClass('no-scroll'); this
if (cfg.drag) { setDraggable.call(this, this, cfg); }
random_line_split
Popup.ts
class="dg-title-buttons"></div>').appendTo(tb); if (cfg.btnMax) { this.btnMax = $("<b class='dg-btn-max'></b>"); btnSets.append(this.btnMax); this.btnMax.on('click', function () { self.toggle(); }); } if (cfg.btnClose) { this.btnClose = $("<b class='dg-btn-x'></b>"); btnSets.append(this.btnClose); this.btnClose.on('click', function () { self.close(); }); } if (cfg.drag) { setDraggable.call(this, this, cfg); } $('<div class="row tb-row" />').prependTo(this.boxy).append(tb); } function setDraggable(self) { let tb = self.titleBar; tb.on('mousedown', function (evt) { self.toTop(); if (evt.target.tagName === 'B') return; if (evt.button < 2 && self.state !== "max") { tb.on('mousemove.boxy', function (e) { tb.unbind("mousemove.boxy"); let boxy = self.boxy[0]; document.onselectstart = returnFalse; let size = self.getSize(); BoxyStore.dragging = [ boxy, e.pageX - boxy.offsetLeft, e.pageY - boxy.offsetTop, document.body.scrollWidth - size.width, document.body.scrollHeight - size.height ]; $(document) .bind("mousemove.boxy", BoxyStore._handleDrag) .bind("mouseup.boxy", function () { if (self.state !== "max" && BoxyStore.dragging) { $(document).unbind(".boxy"); BoxyStore.dragging = document.onselectstart = null; let pos = self.boxy.position(); self.restoreSize.top = pos.top; self.restoreSize.left = pos.left; } }); }); } tb.on("mouseup.boxy", function () { tb.unbind(".boxy"); }); }); } function setFooter(cfg) { let footer = this.footBar = $('<div class="dg-footer"></div>'); let htmlArr = []; for (let key in cfg.buttons) { let v = cfg.buttons[key], x = htmlArr.length; let cls, txt; if (typeof v === 'string') { cls = (x === 0 ? 'btn-primary' : ''); txt = v; } else { cls = v.className || ''; txt = v.text || ''; if(typeof v.onClick === 'function'){ if(!cfg.callback){ cfg.callback = function () {}; } cfg.callback[key] = v.onClick; } } htmlArr.push(`<button class="${cls}" name="${x}" data-key="${key}">${txt}</button>`); } footer.html(htmlArr.join(' ')); let self = this; footer.on('click', 'button', function (evt) { let keepOpen = false; if (cfg.callback) { let clicked = this; let btnKey = $(this).data('key'); let ifrWin = null; if (self.iframe) { ifrWin = self.iframe.contentWindow ? self.iframe.contentWindow : self.iframe.contentDocument.defaultView; } let i = parseInt(clicked.name, 10); if(btnKey && (typeof cfg.callback[btnKey] === 'function')){ keepOpen = cfg.callback[btnKey].call(self, i, ifrWin, clicked); } else{ keepOpen = cfg.callback.call(self, i, ifrWin, clicked); } } if (!keepOpen) self.close(); }); $('<div class="row tf-row" />').appendTo(this.boxy).append(footer); } class PopUp extends DisplayObject { cfg: any; state: string; restoreSize: any; visible: boolean; mask: JQuery; boxy: JQuery; titleBar ?: JQuery; footBar ?: JQuery; content: JQuery; btnMax ?: JQuery; btnClose ?: JQuery; iframe ?: HTMLIFrameElement; constructor(jq: JQuery, cfg: any) { cfg = $.extend({}, DEFAULTS, cfg); super(jq, cfg); BoxyStore.manager.push(this); if (cfg.popId) { if (cfg.popId in BoxyStore.managerHash) throw new Error(`Duplicated PopId "${cfg.popId}"`); else BoxyStore.managerHash[cfg.popId] = this; } } init(jq: JQuery, cfg: any) { super.init(jq, cfg); this.cfg = cfg; } create(jq: JQuery, cfg: any) : PopUp{ this.state = 'normal'; this.visible = false; this.mask = $('<div class="dg-mask"></div>'); this.boxy = $('<div class="dg-wrapper" id="' + ('dialog_' + DisplayObject.guid() ) + '"></div>'); this.content = $('<div class="dg-content"></div>'); this.content.append(jq); this.boxy.append(this.content).appendTo(document.body); let titleBarHeight = 0, footBarHeight = 0; if (cfg.title) { setTitleBar.call(this, cfg); titleBarHeight = this.titleBar.outerHeight(); this.boxy.find('.tb-row').css({height: titleBarHeight}); } if (cfg.buttons) { setFooter.call(this, cfg); this.boxy.find('.tf-row').css({height: this.footBar.outerHeight()}); footBarHeight = this.footBar.outerHeight(); } if (this.jq[0].tagName === 'IFRAME') this.iframe = this.jq[0] as HTMLIFrameElement; let contentSize = { width: cfg.width || this.boxy.outerWidth() || 500, height: cfg.height || this.boxy.outerHeight() || 300 }; /*//console.log(size); this.boxy.css(contentSize);*/ let doc = document.documentElement;//, win = window; let viewport = { //top: win.pageYOffset, //left: win.pageXOffset, width: doc.clientWidth, height: doc.clientHeight }; //console.log(p); let pos = { width: cfg.width || this.boxy.outerWidth() || 500, height: cfg.height || this.boxy.outerHeight() || 300, top: Math.max(0, (viewport.height - contentSize.height ) / 2), left: Math.max(0, (viewport.width - contentSize.width) / 2) }; this.boxy.css(pos); this.restoreSize = pos; //console.warn(this.restoreSize); this.mask.append(this.boxy.css({visibility: 'visible'})).appendTo(document.body); this.toTop(); if (navigator.userAgent.indexOf('Firefox') > -1 && this.iframe) { jq.css({height: contentSize.height - titleBarHeight - footBarHeight - 2}); } if (cfg.show) this.open(); return this; } getSize() { return { width: this.boxy.outerWidth(), height: this.boxy.outerHeight() } } getPosition(): any { let b = this.boxy[0]; return {left: b.offsetLeft, top: b.offsetTop}; } toTop() { this.mask.css({zIndex: nextZ()}); return this; } open() { this.boxy.stop(true, true); if (this.visible) { return this.toTop(); } this.mask.css({display: "block", opacity: 1}); let topPx = this.boxy.position().top; //console.warn(this.boxy[0], topPx); this.boxy.css({top: topPx - 20, opacity: 0}).animate({opacity: 1, top: topPx}, 200); this.visible = true; return this; } close(fn?: Function) { let that = this; let css = this.getPosition(); css.opacity = 0; css.top = Math.max(css.top - 40, 0); this.mask.animate({opacity: 0}, 200); this.boxy.stop(true, true).animate(css, 300, function () { if (typeof that.cfg.onClose === 'function') that.cfg.onClose.call(that); if (typeof fn === 'function') fn.call(that); if (that.cfg.destroy) that.destroy.call(that); else{ that.visible = false; that.boxy.css({top: css.top + 40}); that.mask.css({display: 'none'}); } }); return this; } max() { //resize window entity this.boxy.stop(true, true).css({ left: 0, top: 0, width: '100%', height: '100%' }); if (this.btnMax) this.btnMax.toggleClass('dg-btn-max dg-btn-restore'); //$(document.body).addClass('no-scroll'); this.state = 'max'; return this; }
restore
identifier_name
Popup.ts
onselectstart = returnFalse; let btnSets = $('<div class="dg-title-buttons"></div>').appendTo(tb); if (cfg.btnMax) { this.btnMax = $("<b class='dg-btn-max'></b>"); btnSets.append(this.btnMax); this.btnMax.on('click', function () { self.toggle(); }); } if (cfg.btnClose) { this.btnClose = $("<b class='dg-btn-x'></b>"); btnSets.append(this.btnClose); this.btnClose.on('click', function () { self.close(); }); } if (cfg.drag) { setDraggable.call(this, this, cfg); } $('<div class="row tb-row" />').prependTo(this.boxy).append(tb); } function setDraggable(self) { let tb = self.titleBar; tb.on('mousedown', function (evt) { self.toTop(); if (evt.target.tagName === 'B') return; if (evt.button < 2 && self.state !== "max") { tb.on('mousemove.boxy', function (e) { tb.unbind("mousemove.boxy"); let boxy = self.boxy[0]; document.onselectstart = returnFalse; let size = self.getSize(); BoxyStore.dragging = [ boxy, e.pageX - boxy.offsetLeft, e.pageY - boxy.offsetTop, document.body.scrollWidth - size.width, document.body.scrollHeight - size.height ]; $(document) .bind("mousemove.boxy", BoxyStore._handleDrag) .bind("mouseup.boxy", function () { if (self.state !== "max" && BoxyStore.dragging) { $(document).unbind(".boxy"); BoxyStore.dragging = document.onselectstart = null; let pos = self.boxy.position(); self.restoreSize.top = pos.top; self.restoreSize.left = pos.left; } }); }); } tb.on("mouseup.boxy", function () { tb.unbind(".boxy"); }); }); } function setFooter(cfg) { let footer = this.footBar = $('<div class="dg-footer"></div>'); let htmlArr = []; for (let key in cfg.buttons) { let v = cfg.buttons[key], x = htmlArr.length; let cls, txt; if (typeof v === 'string') { cls = (x === 0 ? 'btn-primary' : ''); txt = v; } else { cls = v.className || ''; txt = v.text || ''; if(typeof v.onClick === 'function'){ if(!cfg.callback){ cfg.callback = function () {}; } cfg.callback[key] = v.onClick; } } htmlArr.push(`<button class="${cls}" name="${x}" data-key="${key}">${txt}</button>`); } footer.html(htmlArr.join(' ')); let self = this; footer.on('click', 'button', function (evt) { let keepOpen = false; if (cfg.callback) { let clicked = this; let btnKey = $(this).data('key'); let ifrWin = null; if (self.iframe) { ifrWin = self.iframe.contentWindow ? self.iframe.contentWindow : self.iframe.contentDocument.defaultView; } let i = parseInt(clicked.name, 10); if(btnKey && (typeof cfg.callback[btnKey] === 'function')){ keepOpen = cfg.callback[btnKey].call(self, i, ifrWin, clicked); } else{ keepOpen = cfg.callback.call(self, i, ifrWin, clicked); } } if (!keepOpen) self.close(); }); $('<div class="row tf-row" />').appendTo(this.boxy).append(footer); } class PopUp extends DisplayObject { cfg: any; state: string; restoreSize: any; visible: boolean; mask: JQuery; boxy: JQuery; titleBar ?: JQuery; footBar ?: JQuery; content: JQuery; btnMax ?: JQuery; btnClose ?: JQuery; iframe ?: HTMLIFrameElement; constructor(jq: JQuery, cfg: any) { cfg = $.extend({}, DEFAULTS, cfg); super(jq, cfg); BoxyStore.manager.push(this); if (cfg.popId) { if (cfg.popId in BoxyStore.managerHash) throw new Error(`Duplicated PopId "${cfg.popId}"`); else BoxyStore.managerHash[cfg.popId] = this; } } init(jq: JQuery, cfg: any) { super.init(jq, cfg); this.cfg = cfg; } create(jq: JQuery, cfg: any) : PopUp{ this.state = 'normal'; this.visible = false; this.mask = $('<div class="dg-mask"></div>'); this.boxy = $('<div class="dg-wrapper" id="' + ('dialog_' + DisplayObject.guid() ) + '"></div>'); this.content = $('<div class="dg-content"></div>'); this.content.append(jq); this.boxy.append(this.content).appendTo(document.body); let titleBarHeight = 0, footBarHeight = 0; if (cfg.title) { setTitleBar.call(this, cfg); titleBarHeight = this.titleBar.outerHeight(); this.boxy.find('.tb-row').css({height: titleBarHeight}); } if (cfg.buttons) { setFooter.call(this, cfg); this.boxy.find('.tf-row').css({height: this.footBar.outerHeight()}); footBarHeight = this.footBar.outerHeight(); } if (this.jq[0].tagName === 'IFRAME') this.iframe = this.jq[0] as HTMLIFrameElement; let contentSize = { width: cfg.width || this.boxy.outerWidth() || 500, height: cfg.height || this.boxy.outerHeight() || 300 }; /*//console.log(size); this.boxy.css(contentSize);*/ let doc = document.documentElement;//, win = window; let viewport = { //top: win.pageYOffset, //left: win.pageXOffset, width: doc.clientWidth, height: doc.clientHeight }; //console.log(p); let pos = { width: cfg.width || this.boxy.outerWidth() || 500, height: cfg.height || this.boxy.outerHeight() || 300, top: Math.max(0, (viewport.height - contentSize.height ) / 2), left: Math.max(0, (viewport.width - contentSize.width) / 2) }; this.boxy.css(pos); this.restoreSize = pos; //console.warn(this.restoreSize); this.mask.append(this.boxy.css({visibility: 'visible'})).appendTo(document.body); this.toTop(); if (navigator.userAgent.indexOf('Firefox') > -1 && this.iframe) { jq.css({height: contentSize.height - titleBarHeight - footBarHeight - 2}); } if (cfg.show) this.open(); return this; } getSize() { return { width: this.boxy.outerWidth(), height: this.boxy.outerHeight() } } getPosition(): any { let b = this.boxy[0]; return {left: b.offsetLeft, top: b.offsetTop}; } toTop() { this.mask.css({zIndex: nextZ()}); return this; } open() { this.boxy.stop(true, true); if (this.visible) { return this.toTop(); } this.mask.css({display: "block", opacity: 1}); let topPx = this.boxy.position().top; //console.warn(this.boxy[0], topPx); this.boxy.css({top: topPx - 20, opacity: 0}).animate({opacity: 1, top: topPx}, 200); this.visible = true; return this; } close(fn?: Function)
that.destroy.call(that); else{ that.visible = false; that.boxy.css({top: css.top + 40}); that.mask.css({display: 'none'}); } }); return this; } max() { //resize window entity this.boxy.stop(true, true).css({ left: 0, top: 0, width: '100%', height: '100%' }); if (this.btnMax) this.btnMax.toggleClass('dg-btn-max dg-btn-restore'); //$(document.body).addClass('no-scroll');
{ let that = this; let css = this.getPosition(); css.opacity = 0; css.top = Math.max(css.top - 40, 0); this.mask.animate({opacity: 0}, 200); this.boxy.stop(true, true).animate(css, 300, function () { if (typeof that.cfg.onClose === 'function') that.cfg.onClose.call(that); if (typeof fn === 'function') fn.call(that); if (that.cfg.destroy)
identifier_body
Popup.ts
onselectstart = returnFalse; let btnSets = $('<div class="dg-title-buttons"></div>').appendTo(tb); if (cfg.btnMax) { this.btnMax = $("<b class='dg-btn-max'></b>"); btnSets.append(this.btnMax); this.btnMax.on('click', function () { self.toggle(); }); } if (cfg.btnClose) { this.btnClose = $("<b class='dg-btn-x'></b>"); btnSets.append(this.btnClose); this.btnClose.on('click', function () { self.close(); }); } if (cfg.drag) { setDraggable.call(this, this, cfg); } $('<div class="row tb-row" />').prependTo(this.boxy).append(tb); } function setDraggable(self) { let tb = self.titleBar; tb.on('mousedown', function (evt) { self.toTop(); if (evt.target.tagName === 'B') return; if (evt.button < 2 && self.state !== "max") { tb.on('mousemove.boxy', function (e) { tb.unbind("mousemove.boxy"); let boxy = self.boxy[0]; document.onselectstart = returnFalse; let size = self.getSize(); BoxyStore.dragging = [ boxy, e.pageX - boxy.offsetLeft, e.pageY - boxy.offsetTop, document.body.scrollWidth - size.width, document.body.scrollHeight - size.height ]; $(document) .bind("mousemove.boxy", BoxyStore._handleDrag) .bind("mouseup.boxy", function () { if (self.state !== "max" && BoxyStore.dragging) { $(document).unbind(".boxy"); BoxyStore.dragging = document.onselectstart = null; let pos = self.boxy.position(); self.restoreSize.top = pos.top; self.restoreSize.left = pos.left; } }); }); } tb.on("mouseup.boxy", function () { tb.unbind(".boxy"); }); }); } function setFooter(cfg) { let footer = this.footBar = $('<div class="dg-footer"></div>'); let htmlArr = []; for (let key in cfg.buttons) { let v = cfg.buttons[key], x = htmlArr.length; let cls, txt; if (typeof v === 'string') { cls = (x === 0 ? 'btn-primary' : ''); txt = v; } else { cls = v.className || ''; txt = v.text || ''; if(typeof v.onClick === 'function'){ if(!cfg.callback){ cfg.callback = function () {}; } cfg.callback[key] = v.onClick; } } htmlArr.push(`<button class="${cls}" name="${x}" data-key="${key}">${txt}</button>`); } footer.html(htmlArr.join(' ')); let self = this; footer.on('click', 'button', function (evt) { let keepOpen = false; if (cfg.callback) { let clicked = this; let btnKey = $(this).data('key'); let ifrWin = null; if (self.iframe) { ifrWin = self.iframe.contentWindow ? self.iframe.contentWindow : self.iframe.contentDocument.defaultView; } let i = parseInt(clicked.name, 10); if(btnKey && (typeof cfg.callback[btnKey] === 'function')){ keepOpen = cfg.callback[btnKey].call(self, i, ifrWin, clicked); } else{ keepOpen = cfg.callback.call(self, i, ifrWin, clicked); } } if (!keepOpen) self.close(); }); $('<div class="row tf-row" />').appendTo(this.boxy).append(footer); } class PopUp extends DisplayObject { cfg: any; state: string; restoreSize: any; visible: boolean; mask: JQuery; boxy: JQuery; titleBar ?: JQuery; footBar ?: JQuery; content: JQuery; btnMax ?: JQuery; btnClose ?: JQuery; iframe ?: HTMLIFrameElement; constructor(jq: JQuery, cfg: any) { cfg = $.extend({}, DEFAULTS, cfg); super(jq, cfg); BoxyStore.manager.push(this); if (cfg.popId) { if (cfg.popId in BoxyStore.managerHash) throw new Error(`Duplicated PopId "${cfg.popId}"`); else BoxyStore.managerHash[cfg.popId] = this; } } init(jq: JQuery, cfg: any) { super.init(jq, cfg); this.cfg = cfg; } create(jq: JQuery, cfg: any) : PopUp{ this.state = 'normal'; this.visible = false; this.mask = $('<div class="dg-mask"></div>'); this.boxy = $('<div class="dg-wrapper" id="' + ('dialog_' + DisplayObject.guid() ) + '"></div>'); this.content = $('<div class="dg-content"></div>'); this.content.append(jq); this.boxy.append(this.content).appendTo(document.body); let titleBarHeight = 0, footBarHeight = 0; if (cfg.title) { setTitleBar.call(this, cfg); titleBarHeight = this.titleBar.outerHeight(); this.boxy.find('.tb-row').css({height: titleBarHeight}); } if (cfg.buttons) { setFooter.call(this, cfg); this.boxy.find('.tf-row').css({height: this.footBar.outerHeight()}); footBarHeight = this.footBar.outerHeight(); } if (this.jq[0].tagName === 'IFRAME') this.iframe = this.jq[0] as HTMLIFrameElement; let contentSize = { width: cfg.width || this.boxy.outerWidth() || 500, height: cfg.height || this.boxy.outerHeight() || 300 }; /*//console.log(size); this.boxy.css(contentSize);*/ let doc = document.documentElement;//, win = window; let viewport = { //top: win.pageYOffset, //left: win.pageXOffset, width: doc.clientWidth, height: doc.clientHeight }; //console.log(p); let pos = { width: cfg.width || this.boxy.outerWidth() || 500, height: cfg.height || this.boxy.outerHeight() || 300, top: Math.max(0, (viewport.height - contentSize.height ) / 2), left: Math.max(0, (viewport.width - contentSize.width) / 2) }; this.boxy.css(pos); this.restoreSize = pos; //console.warn(this.restoreSize); this.mask.append(this.boxy.css({visibility: 'visible'})).appendTo(document.body); this.toTop(); if (navigator.userAgent.indexOf('Firefox') > -1 && this.iframe) { jq.css({height: contentSize.height - titleBarHeight - footBarHeight - 2}); } if (cfg.show) this.open(); return this; } getSize() { return { width: this.boxy.outerWidth(), height: this.boxy.outerHeight() } } getPosition(): any { let b = this.boxy[0]; return {left: b.offsetLeft, top: b.offsetTop}; } toTop() { this.mask.css({zIndex: nextZ()}); return this; } open() { this.boxy.stop(true, true); if (this.visible)
this.mask.css({display: "block", opacity: 1}); let topPx = this.boxy.position().top; //console.warn(this.boxy[0], topPx); this.boxy.css({top: topPx - 20, opacity: 0}).animate({opacity: 1, top: topPx}, 200); this.visible = true; return this; } close(fn?: Function) { let that = this; let css = this.getPosition(); css.opacity = 0; css.top = Math.max(css.top - 40, 0); this.mask.animate({opacity: 0}, 200); this.boxy.stop(true, true).animate(css, 300, function () { if (typeof that.cfg.onClose === 'function') that.cfg.onClose.call(that); if (typeof fn === 'function') fn.call(that); if (that.cfg.destroy) that.destroy.call(that); else{ that.visible = false; that.boxy.css({top: css.top + 40}); that.mask.css({display: 'none'}); } }); return this; } max() { //resize window entity this.boxy.stop(true, true).css({ left: 0, top: 0, width: '100%', height: '100%' }); if (this.btnMax) this.btnMax.toggleClass('dg-btn-max dg-btn-restore'); //$(document.body).addClass('no-scroll');
{ return this.toTop(); }
conditional_block
quasar.ts
receive a publication we are interested in this._groups[topic] = callback; } // Update our ABF with our subscription information, add our negative // information, then update our neighbors with our bloom filters if (Array.isArray(topic)) { topic.forEach(_addTopicToBloomFilter); } else { _addTopicToBloomFilter(topic); } this._sendUpdatesToNeighbors(); } /** * Implements the Quasar join protocol * @private */ private _sendUpdatesToNeighbors():void { var nodeID = this._router._self.nodeID; var limit = kad.constants.ALPHA; // Get our nearest overlay neighbors var neighbors = this._router.getNearestContacts(nodeID, limit, nodeID); this._log.debug('requesting neighbors\' bloom filters'); // Get neighbors bloom filters and merge them with our own this._updateAttenuatedBloomFilter(neighbors).then(() => { // Send our neighbors our merged bloom filters for (var n = 0; n < neighbors.length; n++) { this._updateNeighbor(neighbors[n]); } }); } /** * Iteratitvely update our local bloom filters with our neighbors' * @private * @param {Array} neighbors * @param {Function} callback */ private _updateAttenuatedBloomFilter(neighbors):Promise<any> { let p = Promise.resolve(true); for (let n of neighbors) { p = p.then(() => this._getBloomFilterFromNeighbor(n)).then((atbf: BloomFilter) => { this._log.info('merging neighbor\'s bloom filter with our own'); // Merge the remote copy of the bloom filter with our own this._applyAttenuatedBloomFilterUpdate(atbf); }, (err: Error) => { this._log.warn('failed to get neighbor\'s bloom filter, reason: %s', err.message); }); } return p; } /** * Merges the attenuated bloom filter with our own * @private * @param {BloomFilter} atbf */ private _applyAttenuatedBloomFilterUpdate(atbf) { // Iterate for the depth of our bitfield minus our view of neighbors for (var f = 1; f < this._bf.depth; f++) { // Then for each bloom filter in our neighbor's response, merge their // bloom filter with ours for the given "hop" in our attenuated filter for (var b = 0; b < atbf.filters[f].bitfield.buffer.length; b++) { var local = this._bf.filters[f].bitfield.buffer; var remote = atbf.filters[f].bitfield.buffer; local[b] += remote[b]; } } return this._bf; } /** * Sends a PUBLISH message after verifying it has not expired * @private * @param {kad.Contact} contact * @param {Object} params * @param {Function} callback */ _sendPublish(contact, params):Promise<any> { return new Promise((resolve, reject) => { // check to make sure the message we are sending hasn't expired: if (params.ttl < 1) { reject(new Error('outgoing PUBLISH message has expired')); } else { this._router._rpc.send(contact, kad.Message({ params: Object.assign({}, params, { contact: this._router._self, ttl: --params.ttl }), method: Quasar.PUBLISH_METHOD }), (err, data) => { if (err) { reject(err); } else { resolve(data); } }); } }); } /** * Inspects the message and routes it accordingly * @private * @param {Object} params * @param {Function} callback */ _handlePublish(params, callback) { /* jshint maxstatements:false */ var neighbors = this._router.getNearestContacts( this._router._self.nodeID, kad.constants.K, this._router._self.nodeID ); // Check to make sure that we have not already seen this message if (this._seen.get(params.uuid)) { return callback(new Error('Message previously routed')); } if (params.ttl > this._options.maxRelayHops || params.ttl < 0) { return callback(new Error('Refusing to relay message due to invalid TTL')); } // Filter the neighbor list of previous publishers neighbors = shuffle(neighbors.filter(function (contact) { return params.publishers.indexOf(contact.nodeID) === -1; })).splice(0, 3); // Add ourselves to the publishers (negative information) params.publishers.push(this._router._self.nodeID); this._seen.set(params.uuid, Date.now()); // Check if we are subscribed to this topic if (this._bf.filters[0].has(params.topic) && this._groups[params.topic]) { // If we are, then execute our subscription handler this._groups[params.topic](params.contents, params.topic); let p = Promise.resolve(true); for (let n of neighbors) { p = p.then(() => this._sendPublish(n, params)); } p.then(() => callback()); } else { // We are not interested in this message, so let's forward it on to our // neighbors to see if any of them are interested this._relayPublication(neighbors, params).then(data => callback(null, data), err => callback(err)); } } /** * Relays the message to the given neighbors * @private * @param {Array} neighbors * @param {Object} params * @param {Function} callback */ private _relayPublication(neighbors, params):Promise<any> { var nodeID = this._router._self.nodeID; let _relayToRandomNeighbor = () => { var randNeighbor = this._getRandomOverlayNeighbor(nodeID, params.topic); this._sendPublish(randNeighbor, params); } if (this._options.randomRelay) { _relayToRandomNeighbor(); } else { var p = []; for (let n of neighbors) { p.push(this._getBloomFilterFromNeighbor(n).then(atbf => { // We iterate over the total number of hops in our bloom filter for (var i = 0; i < this._bf.depth; i++) { // Check if their bloom filter for the given hop contains the topic if (atbf.filters[i].has(params.topic)) { var negativeRT = false; // Check if their bloom filter contains any of the negative // information for the previous message publishers for (var p = 0; p < params.publishers.length; p++) { if (atbf.filters[i].has(params.publishers[p])) { negativeRT = true; } } // If there is isn't any negative information, then let's relay the // message to the contact if (!negativeRT) { return this._sendPublish(n, params).then(() => true, () => true); } } } // Nothing to do, all done return false; }, () => false)); } Promise.all(p).then(results => results.filter(i => !!i)).then(results => { if (!results || !results.length) { // If none of the neighbors in the above loop should get the message // then we must pick a random overlay neighbor and send it to them _relayToRandomNeighbor(); } }); } // Ack the original sender, so they do not drop us from routing table return Promise.resolve({}); } /** * Inspects the message and routes it accordingly * @private * @param {Object} params * @param {Function} callback */ _handleSubscribe(params, callback) { callback(null, {filters: this._bf.serialize()}); } /** * Inspects the message and routes it accordingly * @private * @param {Object} params * @param {Function} callback */ _handleUpdate(params, callback) { this._applyAttenuatedBloomFilterUpdate( BloomFilter.deserialize(params.filters) ); callback(null, {}); } /** * Request a copy of the contact's attenuated bloom filter (SUBSCRIBE) * @private * @param {kad.Contact} contact * @param {Function} callback */ private _getBloomFilterFromNeighbor(contact):Promise<BloomFilter> { return new Promise((resolve, reject) => { // Construct our SUBSCRIBE message let message = kad.Message({ method: Quasar.SUBSCRIBE_METHOD, params: {contact: this._router._self} }); this._router._rpc.send(contact, message, function (err, message) { if (err) { reject(err); } else if (!message.result.filters) { reject(new Error('Invalid response received')); } else if (!Array.isArray(message.result.filters))
{ reject(new Error('Invalid response received')); }
conditional_block
quasar.ts
log:any; private _protocol:{ [method:string]: (params:any, callback:(...args:any[])=>void)=>void; }; constructor(router, options = {}) { if (!(router instanceof kad.Router)) throw new Error('Invalid router supplied'); this._router = router; this._options = Object.create(Quasar.DEFAULTS); Object.assign(this._options, options); this._protocol = {}; this._seen = new LRUCache(this._options.lruCacheSize); this._log = this._options.logger || this._router._log; this._protocol[Quasar.PUBLISH_METHOD] = this._handlePublish.bind(this); this._protocol[Quasar.SUBSCRIBE_METHOD] = this._handleSubscribe.bind(this); this._protocol[Quasar.UPDATE_METHOD] = this._handleUpdate.bind(this); this._router._rpc.before('receive', kad.hooks.protocol(this._protocol)); this._bf.filters[0].add(this._router._self.nodeID); } /** * Publish some data for the given topic * @param {String} topic - The publication identifier * @param {Object} data - Arbitrary publication contents * @param {Object} options * @param {String} options.key - Use neighbors close to this key (optional) */ publish(topic:string, data:any, options?:{ key?:string }):Promise<any> { let nodeID = this._router._self.nodeID; let limit = kad.constants.ALPHA; let key = options ? (options.key || nodeID) : nodeID; let neighbors = this._router.getNearestContacts(key, limit, nodeID); this._log.info('publishing message on topic "%s"', topic); // Dispatch message to our closest neighbors let p = []; for (let n of neighbors) { p.push(this._sendPublish(n, { uuid: uuid.v4(), topic: topic, contents: data, publishers: [nodeID], ttl: this._options.maxRelayHops, contact: this._router._self })); } return Promise.all(p); } /** * Subscribe to the given topic and handle events * @param {String|Array} topic - The publication identifier(s) * @param {Function} callback - Function to call when publication is received */ subscribe(topic:string|string[], callback:(data:any, topic?:string)=>void):void { let _addTopicToBloomFilter = topic => { this._log.info('subscribing to topic "%s"', topic); this._bf.filters[0].add(topic); // Set a handler for when we receive a publication we are interested in this._groups[topic] = callback; } // Update our ABF with our subscription information, add our negative // information, then update our neighbors with our bloom filters if (Array.isArray(topic)) { topic.forEach(_addTopicToBloomFilter); } else { _addTopicToBloomFilter(topic); } this._sendUpdatesToNeighbors(); } /** * Implements the Quasar join protocol * @private */ private _sendUpdatesToNeighbors():void { var nodeID = this._router._self.nodeID; var limit = kad.constants.ALPHA; // Get our nearest overlay neighbors var neighbors = this._router.getNearestContacts(nodeID, limit, nodeID); this._log.debug('requesting neighbors\' bloom filters'); // Get neighbors bloom filters and merge them with our own this._updateAttenuatedBloomFilter(neighbors).then(() => { // Send our neighbors our merged bloom filters for (var n = 0; n < neighbors.length; n++) { this._updateNeighbor(neighbors[n]); } }); } /** * Iteratitvely update our local bloom filters with our neighbors' * @private * @param {Array} neighbors * @param {Function} callback */ private _updateAttenuatedBloomFilter(neighbors):Promise<any> { let p = Promise.resolve(true); for (let n of neighbors) { p = p.then(() => this._getBloomFilterFromNeighbor(n)).then((atbf: BloomFilter) => { this._log.info('merging neighbor\'s bloom filter with our own'); // Merge the remote copy of the bloom filter with our own this._applyAttenuatedBloomFilterUpdate(atbf); }, (err: Error) => { this._log.warn('failed to get neighbor\'s bloom filter, reason: %s', err.message); }); } return p; } /** * Merges the attenuated bloom filter with our own * @private * @param {BloomFilter} atbf */ private _applyAttenuatedBloomFilterUpdate(atbf) { // Iterate for the depth of our bitfield minus our view of neighbors for (var f = 1; f < this._bf.depth; f++) { // Then for each bloom filter in our neighbor's response, merge their // bloom filter with ours for the given "hop" in our attenuated filter for (var b = 0; b < atbf.filters[f].bitfield.buffer.length; b++) { var local = this._bf.filters[f].bitfield.buffer; var remote = atbf.filters[f].bitfield.buffer; local[b] += remote[b]; } } return this._bf; } /** * Sends a PUBLISH message after verifying it has not expired * @private * @param {kad.Contact} contact * @param {Object} params * @param {Function} callback */ _sendPublish(contact, params):Promise<any> { return new Promise((resolve, reject) => { // check to make sure the message we are sending hasn't expired: if (params.ttl < 1) { reject(new Error('outgoing PUBLISH message has expired')); } else { this._router._rpc.send(contact, kad.Message({ params: Object.assign({}, params, { contact: this._router._self, ttl: --params.ttl }), method: Quasar.PUBLISH_METHOD }), (err, data) => { if (err) { reject(err); } else { resolve(data); } }); } }); } /** * Inspects the message and routes it accordingly * @private * @param {Object} params * @param {Function} callback */ _handlePublish(params, callback) { /* jshint maxstatements:false */ var neighbors = this._router.getNearestContacts( this._router._self.nodeID, kad.constants.K, this._router._self.nodeID ); // Check to make sure that we have not already seen this message if (this._seen.get(params.uuid)) { return callback(new Error('Message previously routed')); } if (params.ttl > this._options.maxRelayHops || params.ttl < 0) { return callback(new Error('Refusing to relay message due to invalid TTL')); } // Filter the neighbor list of previous publishers neighbors = shuffle(neighbors.filter(function (contact) { return params.publishers.indexOf(contact.nodeID) === -1; })).splice(0, 3); // Add ourselves to the publishers (negative information) params.publishers.push(this._router._self.nodeID); this._seen.set(params.uuid, Date.now()); // Check if we are subscribed to this topic if (this._bf.filters[0].has(params.topic) && this._groups[params.topic]) { // If we are, then execute our subscription handler this._groups[params.topic](params.contents, params.topic); let p = Promise.resolve(true); for (let n of neighbors) { p = p.then(() => this._sendPublish(n, params)); } p.then(() => callback()); } else { // We are not interested in this message, so let's forward it on to our // neighbors to see if any of them are interested this._relayPublication(neighbors, params).then(data => callback(null, data), err => callback(err)); } } /** * Relays the message to the given neighbors * @private * @param {Array} neighbors * @param {Object} params * @param {Function} callback */ private
(neighbors, params):Promise<any> { var nodeID = this._router._self.nodeID; let _relayToRandomNeighbor = () => { var randNeighbor = this._getRandomOverlayNeighbor(nodeID, params.topic); this._sendPublish(randNeighbor, params); } if (this._options.randomRelay) { _relayToRandomNeighbor(); } else { var p = []; for (let n of neighbors) { p.push(this._getBloomFilterFromNeighbor(n).then(atbf => { // We iterate over the total number of hops in our bloom filter for (var i = 0; i < this._bf.depth; i++) { // Check if their bloom filter for the given hop contains the
_relayPublication
identifier_name
quasar.ts
log:any; private _protocol:{ [method:string]: (params:any, callback:(...args:any[])=>void)=>void; }; constructor(router, options = {}) { if (!(router instanceof kad.Router)) throw new Error('Invalid router supplied'); this._router = router; this._options = Object.create(Quasar.DEFAULTS); Object.assign(this._options, options); this._protocol = {}; this._seen = new LRUCache(this._options.lruCacheSize); this._log = this._options.logger || this._router._log; this._protocol[Quasar.PUBLISH_METHOD] = this._handlePublish.bind(this); this._protocol[Quasar.SUBSCRIBE_METHOD] = this._handleSubscribe.bind(this); this._protocol[Quasar.UPDATE_METHOD] = this._handleUpdate.bind(this); this._router._rpc.before('receive', kad.hooks.protocol(this._protocol)); this._bf.filters[0].add(this._router._self.nodeID); }
* @param {Object} options * @param {String} options.key - Use neighbors close to this key (optional) */ publish(topic:string, data:any, options?:{ key?:string }):Promise<any> { let nodeID = this._router._self.nodeID; let limit = kad.constants.ALPHA; let key = options ? (options.key || nodeID) : nodeID; let neighbors = this._router.getNearestContacts(key, limit, nodeID); this._log.info('publishing message on topic "%s"', topic); // Dispatch message to our closest neighbors let p = []; for (let n of neighbors) { p.push(this._sendPublish(n, { uuid: uuid.v4(), topic: topic, contents: data, publishers: [nodeID], ttl: this._options.maxRelayHops, contact: this._router._self })); } return Promise.all(p); } /** * Subscribe to the given topic and handle events * @param {String|Array} topic - The publication identifier(s) * @param {Function} callback - Function to call when publication is received */ subscribe(topic:string|string[], callback:(data:any, topic?:string)=>void):void { let _addTopicToBloomFilter = topic => { this._log.info('subscribing to topic "%s"', topic); this._bf.filters[0].add(topic); // Set a handler for when we receive a publication we are interested in this._groups[topic] = callback; } // Update our ABF with our subscription information, add our negative // information, then update our neighbors with our bloom filters if (Array.isArray(topic)) { topic.forEach(_addTopicToBloomFilter); } else { _addTopicToBloomFilter(topic); } this._sendUpdatesToNeighbors(); } /** * Implements the Quasar join protocol * @private */ private _sendUpdatesToNeighbors():void { var nodeID = this._router._self.nodeID; var limit = kad.constants.ALPHA; // Get our nearest overlay neighbors var neighbors = this._router.getNearestContacts(nodeID, limit, nodeID); this._log.debug('requesting neighbors\' bloom filters'); // Get neighbors bloom filters and merge them with our own this._updateAttenuatedBloomFilter(neighbors).then(() => { // Send our neighbors our merged bloom filters for (var n = 0; n < neighbors.length; n++) { this._updateNeighbor(neighbors[n]); } }); } /** * Iteratitvely update our local bloom filters with our neighbors' * @private * @param {Array} neighbors * @param {Function} callback */ private _updateAttenuatedBloomFilter(neighbors):Promise<any> { let p = Promise.resolve(true); for (let n of neighbors) { p = p.then(() => this._getBloomFilterFromNeighbor(n)).then((atbf: BloomFilter) => { this._log.info('merging neighbor\'s bloom filter with our own'); // Merge the remote copy of the bloom filter with our own this._applyAttenuatedBloomFilterUpdate(atbf); }, (err: Error) => { this._log.warn('failed to get neighbor\'s bloom filter, reason: %s', err.message); }); } return p; } /** * Merges the attenuated bloom filter with our own * @private * @param {BloomFilter} atbf */ private _applyAttenuatedBloomFilterUpdate(atbf) { // Iterate for the depth of our bitfield minus our view of neighbors for (var f = 1; f < this._bf.depth; f++) { // Then for each bloom filter in our neighbor's response, merge their // bloom filter with ours for the given "hop" in our attenuated filter for (var b = 0; b < atbf.filters[f].bitfield.buffer.length; b++) { var local = this._bf.filters[f].bitfield.buffer; var remote = atbf.filters[f].bitfield.buffer; local[b] += remote[b]; } } return this._bf; } /** * Sends a PUBLISH message after verifying it has not expired * @private * @param {kad.Contact} contact * @param {Object} params * @param {Function} callback */ _sendPublish(contact, params):Promise<any> { return new Promise((resolve, reject) => { // check to make sure the message we are sending hasn't expired: if (params.ttl < 1) { reject(new Error('outgoing PUBLISH message has expired')); } else { this._router._rpc.send(contact, kad.Message({ params: Object.assign({}, params, { contact: this._router._self, ttl: --params.ttl }), method: Quasar.PUBLISH_METHOD }), (err, data) => { if (err) { reject(err); } else { resolve(data); } }); } }); } /** * Inspects the message and routes it accordingly * @private * @param {Object} params * @param {Function} callback */ _handlePublish(params, callback) { /* jshint maxstatements:false */ var neighbors = this._router.getNearestContacts( this._router._self.nodeID, kad.constants.K, this._router._self.nodeID ); // Check to make sure that we have not already seen this message if (this._seen.get(params.uuid)) { return callback(new Error('Message previously routed')); } if (params.ttl > this._options.maxRelayHops || params.ttl < 0) { return callback(new Error('Refusing to relay message due to invalid TTL')); } // Filter the neighbor list of previous publishers neighbors = shuffle(neighbors.filter(function (contact) { return params.publishers.indexOf(contact.nodeID) === -1; })).splice(0, 3); // Add ourselves to the publishers (negative information) params.publishers.push(this._router._self.nodeID); this._seen.set(params.uuid, Date.now()); // Check if we are subscribed to this topic if (this._bf.filters[0].has(params.topic) && this._groups[params.topic]) { // If we are, then execute our subscription handler this._groups[params.topic](params.contents, params.topic); let p = Promise.resolve(true); for (let n of neighbors) { p = p.then(() => this._sendPublish(n, params)); } p.then(() => callback()); } else { // We are not interested in this message, so let's forward it on to our // neighbors to see if any of them are interested this._relayPublication(neighbors, params).then(data => callback(null, data), err => callback(err)); } } /** * Relays the message to the given neighbors * @private * @param {Array} neighbors * @param {Object} params * @param {Function} callback */ private _relayPublication(neighbors, params):Promise<any> { var nodeID = this._router._self.nodeID; let _relayToRandomNeighbor = () => { var randNeighbor = this._getRandomOverlayNeighbor(nodeID, params.topic); this._sendPublish(randNeighbor, params); } if (this._options.randomRelay) { _relayToRandomNeighbor(); } else { var p = []; for (let n of neighbors) { p.push(this._getBloomFilterFromNeighbor(n).then(atbf => { // We iterate over the total number of hops in our bloom filter for (var i = 0; i < this._bf.depth; i++) { // Check if their bloom filter for the given hop contains the topic
/** * Publish some data for the given topic * @param {String} topic - The publication identifier * @param {Object} data - Arbitrary publication contents
random_line_split
quasar.ts
:any; private _protocol:{ [method:string]: (params:any, callback:(...args:any[])=>void)=>void; }; constructor(router, options = {})
/** * Publish some data for the given topic * @param {String} topic - The publication identifier * @param {Object} data - Arbitrary publication contents * @param {Object} options * @param {String} options.key - Use neighbors close to this key (optional) */ publish(topic:string, data:any, options?:{ key?:string }):Promise<any> { let nodeID = this._router._self.nodeID; let limit = kad.constants.ALPHA; let key = options ? (options.key || nodeID) : nodeID; let neighbors = this._router.getNearestContacts(key, limit, nodeID); this._log.info('publishing message on topic "%s"', topic); // Dispatch message to our closest neighbors let p = []; for (let n of neighbors) { p.push(this._sendPublish(n, { uuid: uuid.v4(), topic: topic, contents: data, publishers: [nodeID], ttl: this._options.maxRelayHops, contact: this._router._self })); } return Promise.all(p); } /** * Subscribe to the given topic and handle events * @param {String|Array} topic - The publication identifier(s) * @param {Function} callback - Function to call when publication is received */ subscribe(topic:string|string[], callback:(data:any, topic?:string)=>void):void { let _addTopicToBloomFilter = topic => { this._log.info('subscribing to topic "%s"', topic); this._bf.filters[0].add(topic); // Set a handler for when we receive a publication we are interested in this._groups[topic] = callback; } // Update our ABF with our subscription information, add our negative // information, then update our neighbors with our bloom filters if (Array.isArray(topic)) { topic.forEach(_addTopicToBloomFilter); } else { _addTopicToBloomFilter(topic); } this._sendUpdatesToNeighbors(); } /** * Implements the Quasar join protocol * @private */ private _sendUpdatesToNeighbors():void { var nodeID = this._router._self.nodeID; var limit = kad.constants.ALPHA; // Get our nearest overlay neighbors var neighbors = this._router.getNearestContacts(nodeID, limit, nodeID); this._log.debug('requesting neighbors\' bloom filters'); // Get neighbors bloom filters and merge them with our own this._updateAttenuatedBloomFilter(neighbors).then(() => { // Send our neighbors our merged bloom filters for (var n = 0; n < neighbors.length; n++) { this._updateNeighbor(neighbors[n]); } }); } /** * Iteratitvely update our local bloom filters with our neighbors' * @private * @param {Array} neighbors * @param {Function} callback */ private _updateAttenuatedBloomFilter(neighbors):Promise<any> { let p = Promise.resolve(true); for (let n of neighbors) { p = p.then(() => this._getBloomFilterFromNeighbor(n)).then((atbf: BloomFilter) => { this._log.info('merging neighbor\'s bloom filter with our own'); // Merge the remote copy of the bloom filter with our own this._applyAttenuatedBloomFilterUpdate(atbf); }, (err: Error) => { this._log.warn('failed to get neighbor\'s bloom filter, reason: %s', err.message); }); } return p; } /** * Merges the attenuated bloom filter with our own * @private * @param {BloomFilter} atbf */ private _applyAttenuatedBloomFilterUpdate(atbf) { // Iterate for the depth of our bitfield minus our view of neighbors for (var f = 1; f < this._bf.depth; f++) { // Then for each bloom filter in our neighbor's response, merge their // bloom filter with ours for the given "hop" in our attenuated filter for (var b = 0; b < atbf.filters[f].bitfield.buffer.length; b++) { var local = this._bf.filters[f].bitfield.buffer; var remote = atbf.filters[f].bitfield.buffer; local[b] += remote[b]; } } return this._bf; } /** * Sends a PUBLISH message after verifying it has not expired * @private * @param {kad.Contact} contact * @param {Object} params * @param {Function} callback */ _sendPublish(contact, params):Promise<any> { return new Promise((resolve, reject) => { // check to make sure the message we are sending hasn't expired: if (params.ttl < 1) { reject(new Error('outgoing PUBLISH message has expired')); } else { this._router._rpc.send(contact, kad.Message({ params: Object.assign({}, params, { contact: this._router._self, ttl: --params.ttl }), method: Quasar.PUBLISH_METHOD }), (err, data) => { if (err) { reject(err); } else { resolve(data); } }); } }); } /** * Inspects the message and routes it accordingly * @private * @param {Object} params * @param {Function} callback */ _handlePublish(params, callback) { /* jshint maxstatements:false */ var neighbors = this._router.getNearestContacts( this._router._self.nodeID, kad.constants.K, this._router._self.nodeID ); // Check to make sure that we have not already seen this message if (this._seen.get(params.uuid)) { return callback(new Error('Message previously routed')); } if (params.ttl > this._options.maxRelayHops || params.ttl < 0) { return callback(new Error('Refusing to relay message due to invalid TTL')); } // Filter the neighbor list of previous publishers neighbors = shuffle(neighbors.filter(function (contact) { return params.publishers.indexOf(contact.nodeID) === -1; })).splice(0, 3); // Add ourselves to the publishers (negative information) params.publishers.push(this._router._self.nodeID); this._seen.set(params.uuid, Date.now()); // Check if we are subscribed to this topic if (this._bf.filters[0].has(params.topic) && this._groups[params.topic]) { // If we are, then execute our subscription handler this._groups[params.topic](params.contents, params.topic); let p = Promise.resolve(true); for (let n of neighbors) { p = p.then(() => this._sendPublish(n, params)); } p.then(() => callback()); } else { // We are not interested in this message, so let's forward it on to our // neighbors to see if any of them are interested this._relayPublication(neighbors, params).then(data => callback(null, data), err => callback(err)); } } /** * Relays the message to the given neighbors * @private * @param {Array} neighbors * @param {Object} params * @param {Function} callback */ private _relayPublication(neighbors, params):Promise<any> { var nodeID = this._router._self.nodeID; let _relayToRandomNeighbor = () => { var randNeighbor = this._getRandomOverlayNeighbor(nodeID, params.topic); this._sendPublish(randNeighbor, params); } if (this._options.randomRelay) { _relayToRandomNeighbor(); } else { var p = []; for (let n of neighbors) { p.push(this._getBloomFilterFromNeighbor(n).then(atbf => { // We iterate over the total number of hops in our bloom filter for (var i = 0; i < this._bf.depth; i++) { // Check if their bloom filter for the given hop contains the
{ if (!(router instanceof kad.Router)) throw new Error('Invalid router supplied'); this._router = router; this._options = Object.create(Quasar.DEFAULTS); Object.assign(this._options, options); this._protocol = {}; this._seen = new LRUCache(this._options.lruCacheSize); this._log = this._options.logger || this._router._log; this._protocol[Quasar.PUBLISH_METHOD] = this._handlePublish.bind(this); this._protocol[Quasar.SUBSCRIBE_METHOD] = this._handleSubscribe.bind(this); this._protocol[Quasar.UPDATE_METHOD] = this._handleUpdate.bind(this); this._router._rpc.before('receive', kad.hooks.protocol(this._protocol)); this._bf.filters[0].add(this._router._self.nodeID); }
identifier_body
state.rs
pub image_data: web_sys::ImageData, pub config: shared::Config, pub history: Vec<shared::Config>, pub history_index: usize, pub last_rendered_config: Option<shared::Config>, pub buffer: Vec<u32>, pub ui: crate::ui::UiState, pub hist_canvas: Option<web_sys::HtmlCanvasElement>, pub on_change: js_sys::Function, pub workers: Vec<(web_sys::Worker, bool, Option<shared::messaging::Message>)>, } // umm I dunno if this is cheating or something // I mean bad things could happen if I accessed the ctx // from different threads // but given that wasm doesn't yet have threads, it's probably fine. unsafe impl Send for State {} impl State { pub fn new(config: shared::Config, on_change: js_sys::Function) -> Self { State { hide_timeout: None, render_id: 0, hist_canvas: None, last_rendered: 0, ctx: crate::ui::init(&config).expect("Unable to setup canvas"), image_data: web_sys::ImageData::new_with_sw( config.rendering.width as u32, config.rendering.height as u32, ) .expect("Can't make an imagedata"), buffer: vec![0_u32; config.rendering.width * config.rendering.height], workers: vec![], ui: Default::default(), history: vec![config.clone()], history_index: 0, last_rendered_config: None, on_change, config, } } } pub fn make_image_data( config: &shared::Config, bright: &[u32], ) -> Result<web_sys::ImageData, JsValue> { let colored = shared::colorize(config, bright); let mut clamped = wasm_bindgen::Clamped(colored.clone()); // let mut clamped = Clamped(state.buffer.clone()); let data = web_sys::ImageData::new_with_u8_clamped_array_and_sh( wasm_bindgen::Clamped(clamped.as_mut_slice()), config.rendering.width as u32, config.rendering.height as u32, )?; Ok(data) } impl State { pub fn reset_buffer(&mut self) { self.buffer = vec![0_u32; self.config.rendering.width * self.config.rendering.height]; self.invalidate_past_renders(); } pub fn add_worker(&mut self, worker: web_sys::Worker) { self.workers.push((worker, false, None)) } pub fn invalidate_past_renders(&mut self) { self.render_id += 1; self.last_rendered = self.render_id; } pub fn undo(&mut self) -> Result<(), JsValue> { log!("Undo {} {}", self.history.len(), self.history_index); if self.history_index == 0 { if Some(&self.config) != self.history.last() { self.history.push(self.config.clone()); } } self.history_index = (self.history_index + 1).min(self.history.len() - 1); if let Some(config) = self .history .get(self.history.len() - self.history_index - 1) { self.config = config.clone(); self.async_render(false)?; } Ok(()) } pub fn redo(&mut self) -> Result<(), JsValue> { if self.history_index == 0 { log!("nothing to redo"); return Ok(()); } log!("redo"); self.history_index = (self.history_index - 1).max(0); if let Some(config) = self .history .get(self.history.len() - self.history_index - 1) { self.config = config.clone(); self.async_render(false)?; } Ok(()) } pub fn maybe_save_history(&mut self) { log!("saving history"); // If the lastest is the same if self.history_index == 0 && self .history .last() .map_or(false, |last| *last == self.config) { return; } if self.history_index != 0 && self .history .get(self.history.len() - self.history_index - 1) .map_or(false, |last| *last == self.config) { return; } // snip undone stuff if self.history_index != 0 { self.history = self.history[0..self.history.len() - self.history_index].to_vec(); self.history_index = 0; } // if self.history.last().map_or(true, |last| *last != self.config) { self.history.push(self.config.clone()); if self.history.len() > 500 { // trim to 500 len self.history = self.history[self.history.len() - 500..].to_vec(); } // } } pub fn handle_render( &mut self, worker: usize, id: usize, array: js_sys::Uint32Array, ) -> Result<(), JsValue> { if id < self.last_rendered
if id > self.last_rendered { self.reset_buffer(); self.last_rendered = id; } let mut bright = vec![0_u32; self.config.rendering.width * self.config.rendering.height]; array.copy_to(&mut bright); for i in 0..bright.len() { self.buffer[i] += bright[i]; } self.image_data = make_image_data(&self.config, &self.buffer)?; // crate::ui::use_ui(|ui| { // crate::ui::draw(ui, &self) // }); // self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; let (worker, busy, queued) = &mut self.workers[worker]; match queued { None => { // log!("Finished a thread"); *busy = false } Some(message) => { // log!("Sending a new config to render"); worker.post_message(&JsValue::from_serde(message).unwrap())?; *queued = None } } Ok(()) } pub fn debug_render(&mut self) -> Result<(), JsValue> { let brightness = shared::calculate::deterministic_calc(&self.config); self.image_data = make_image_data(&self.config, &brightness)?; self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; Ok(()) } pub fn clear(&mut self) { self.ctx.clear_rect( 0.0, 0.0, self.config.rendering.width as f64, self.config.rendering.height as f64, ) } pub fn reexpose(&mut self) -> Result<(), JsValue> { self.image_data = make_image_data(&self.config, &self.buffer)?; // self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; // crate::ui::use_ui(|ui| { crate::ui::draw(&self); // }); Ok(()) } pub fn send_on_change(&self) { let _res = self.on_change.call2( &JsValue::null(), &JsValue::from_serde(&self.config).unwrap(), &JsValue::from_serde(&self.ui).unwrap(), ); } pub fn async_render(&mut self, small: bool) -> Result<(), JsValue> { // log!("Async nreder folks"); match &self.last_rendered_config { Some(config) => { if *config == self.config { return Ok(()); } let mut old_config_with_new_exposure = config.clone(); old_config_with_new_exposure.rendering.exposure = self.config.rendering.exposure.clone(); old_config_with_new_exposure.rendering.coloration = self.config.rendering.coloration.clone(); // We've only changed settings that don't require recalculation if old_config_with_new_exposure == self.config { self.last_rendered_config = Some(self.config.clone()); self.send_on_change(); self.reexpose(); return Ok(()); } else { log!("Not the same") // log!("Not the same! {} vs {}", old_json, json) } } _ => (), } // log!("Render new config"); // web_sys::console::log_1(&JsValue::from_serde(&self.config).unwrap()); self.send_on_change(); self.last_rendered_config = Some(self.config.clone()); self.render_id += 1; let message = shared::messaging::Message { config: self.config.clone(), id: self.render_id, // count: if
{ let (worker, busy, queued) = &mut self.workers[worker]; match queued { None => { // log!("Finished a thread"); *busy = false } Some(message) => { // log!("Sending a new config to render"); worker.post_message(&JsValue::from_serde(message).unwrap())?; *queued = None } } // this is old data, disregard return Ok(()); }
conditional_block
state.rs
_rendered: 0, ctx: crate::ui::init(&config).expect("Unable to setup canvas"), image_data: web_sys::ImageData::new_with_sw( config.rendering.width as u32, config.rendering.height as u32, ) .expect("Can't make an imagedata"), buffer: vec![0_u32; config.rendering.width * config.rendering.height], workers: vec![], ui: Default::default(), history: vec![config.clone()], history_index: 0, last_rendered_config: None, on_change, config, } } } pub fn make_image_data( config: &shared::Config, bright: &[u32], ) -> Result<web_sys::ImageData, JsValue> { let colored = shared::colorize(config, bright); let mut clamped = wasm_bindgen::Clamped(colored.clone()); // let mut clamped = Clamped(state.buffer.clone()); let data = web_sys::ImageData::new_with_u8_clamped_array_and_sh( wasm_bindgen::Clamped(clamped.as_mut_slice()), config.rendering.width as u32, config.rendering.height as u32, )?; Ok(data) } impl State { pub fn reset_buffer(&mut self) { self.buffer = vec![0_u32; self.config.rendering.width * self.config.rendering.height]; self.invalidate_past_renders(); } pub fn add_worker(&mut self, worker: web_sys::Worker) { self.workers.push((worker, false, None)) } pub fn invalidate_past_renders(&mut self) { self.render_id += 1; self.last_rendered = self.render_id; } pub fn undo(&mut self) -> Result<(), JsValue> { log!("Undo {} {}", self.history.len(), self.history_index); if self.history_index == 0 { if Some(&self.config) != self.history.last() { self.history.push(self.config.clone()); } } self.history_index = (self.history_index + 1).min(self.history.len() - 1); if let Some(config) = self .history .get(self.history.len() - self.history_index - 1) { self.config = config.clone(); self.async_render(false)?; } Ok(()) } pub fn redo(&mut self) -> Result<(), JsValue> { if self.history_index == 0 { log!("nothing to redo"); return Ok(()); } log!("redo"); self.history_index = (self.history_index - 1).max(0); if let Some(config) = self .history .get(self.history.len() - self.history_index - 1) { self.config = config.clone(); self.async_render(false)?; } Ok(()) } pub fn maybe_save_history(&mut self) { log!("saving history"); // If the lastest is the same if self.history_index == 0 && self .history .last() .map_or(false, |last| *last == self.config) { return; } if self.history_index != 0 && self .history .get(self.history.len() - self.history_index - 1) .map_or(false, |last| *last == self.config) { return; } // snip undone stuff if self.history_index != 0 { self.history = self.history[0..self.history.len() - self.history_index].to_vec(); self.history_index = 0; } // if self.history.last().map_or(true, |last| *last != self.config) { self.history.push(self.config.clone()); if self.history.len() > 500 { // trim to 500 len self.history = self.history[self.history.len() - 500..].to_vec(); } // } } pub fn handle_render( &mut self, worker: usize, id: usize, array: js_sys::Uint32Array, ) -> Result<(), JsValue> { if id < self.last_rendered { let (worker, busy, queued) = &mut self.workers[worker]; match queued { None => { // log!("Finished a thread"); *busy = false } Some(message) => { // log!("Sending a new config to render"); worker.post_message(&JsValue::from_serde(message).unwrap())?; *queued = None } } // this is old data, disregard return Ok(()); } if id > self.last_rendered { self.reset_buffer(); self.last_rendered = id; } let mut bright = vec![0_u32; self.config.rendering.width * self.config.rendering.height]; array.copy_to(&mut bright); for i in 0..bright.len() { self.buffer[i] += bright[i]; } self.image_data = make_image_data(&self.config, &self.buffer)?; // crate::ui::use_ui(|ui| { // crate::ui::draw(ui, &self) // }); // self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; let (worker, busy, queued) = &mut self.workers[worker]; match queued { None => { // log!("Finished a thread"); *busy = false } Some(message) => { // log!("Sending a new config to render"); worker.post_message(&JsValue::from_serde(message).unwrap())?; *queued = None } } Ok(()) } pub fn debug_render(&mut self) -> Result<(), JsValue> { let brightness = shared::calculate::deterministic_calc(&self.config); self.image_data = make_image_data(&self.config, &brightness)?; self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; Ok(()) } pub fn clear(&mut self) { self.ctx.clear_rect( 0.0, 0.0, self.config.rendering.width as f64, self.config.rendering.height as f64, ) } pub fn reexpose(&mut self) -> Result<(), JsValue> { self.image_data = make_image_data(&self.config, &self.buffer)?; // self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; // crate::ui::use_ui(|ui| { crate::ui::draw(&self); // }); Ok(()) } pub fn send_on_change(&self) { let _res = self.on_change.call2( &JsValue::null(), &JsValue::from_serde(&self.config).unwrap(), &JsValue::from_serde(&self.ui).unwrap(), ); } pub fn async_render(&mut self, small: bool) -> Result<(), JsValue> { // log!("Async nreder folks"); match &self.last_rendered_config { Some(config) => { if *config == self.config { return Ok(()); } let mut old_config_with_new_exposure = config.clone(); old_config_with_new_exposure.rendering.exposure = self.config.rendering.exposure.clone(); old_config_with_new_exposure.rendering.coloration = self.config.rendering.coloration.clone(); // We've only changed settings that don't require recalculation if old_config_with_new_exposure == self.config { self.last_rendered_config = Some(self.config.clone()); self.send_on_change(); self.reexpose(); return Ok(()); } else { log!("Not the same") // log!("Not the same! {} vs {}", old_json, json) } } _ => (), } // log!("Render new config"); // web_sys::console::log_1(&JsValue::from_serde(&self.config).unwrap()); self.send_on_change(); self.last_rendered_config = Some(self.config.clone()); self.render_id += 1; let message = shared::messaging::Message { config: self.config.clone(), id: self.render_id, // count: if small { 10_000 } else { 500_000 }, count: 200_000, }; if self.workers.is_empty() { return self.debug_render(); } for (worker, busy, queued) in self.workers.iter_mut() { if *busy { // log!("Queueing up for a worker"); *queued = Some(message.clone()) } else { *busy = true; // log!("Sending a new config to render"); worker.post_message(&JsValue::from_serde(&message).unwrap())?; } } Ok(()) } } lazy_static! { static ref STATE: Mutex<Option<State>> = Mutex::new(None); } pub fn with_opt_state<F: FnOnce(&mut Option<State>)>(f: F) { f(&mut STATE.lock().unwrap()) } pub fn set_state(state: State) { with_opt_state(|wrapper| *wrapper = Some(state)) } pub fn
has_state
identifier_name
state.rs
pub image_data: web_sys::ImageData, pub config: shared::Config, pub history: Vec<shared::Config>, pub history_index: usize, pub last_rendered_config: Option<shared::Config>, pub buffer: Vec<u32>, pub ui: crate::ui::UiState, pub hist_canvas: Option<web_sys::HtmlCanvasElement>, pub on_change: js_sys::Function, pub workers: Vec<(web_sys::Worker, bool, Option<shared::messaging::Message>)>, } // umm I dunno if this is cheating or something // I mean bad things could happen if I accessed the ctx // from different threads // but given that wasm doesn't yet have threads, it's probably fine. unsafe impl Send for State {} impl State { pub fn new(config: shared::Config, on_change: js_sys::Function) -> Self { State { hide_timeout: None, render_id: 0, hist_canvas: None, last_rendered: 0, ctx: crate::ui::init(&config).expect("Unable to setup canvas"), image_data: web_sys::ImageData::new_with_sw( config.rendering.width as u32, config.rendering.height as u32, ) .expect("Can't make an imagedata"), buffer: vec![0_u32; config.rendering.width * config.rendering.height], workers: vec![], ui: Default::default(), history: vec![config.clone()], history_index: 0, last_rendered_config: None, on_change, config, } } } pub fn make_image_data( config: &shared::Config, bright: &[u32], ) -> Result<web_sys::ImageData, JsValue> { let colored = shared::colorize(config, bright); let mut clamped = wasm_bindgen::Clamped(colored.clone()); // let mut clamped = Clamped(state.buffer.clone()); let data = web_sys::ImageData::new_with_u8_clamped_array_and_sh( wasm_bindgen::Clamped(clamped.as_mut_slice()), config.rendering.width as u32, config.rendering.height as u32, )?; Ok(data) } impl State { pub fn reset_buffer(&mut self)
pub fn add_worker(&mut self, worker: web_sys::Worker) { self.workers.push((worker, false, None)) } pub fn invalidate_past_renders(&mut self) { self.render_id += 1; self.last_rendered = self.render_id; } pub fn undo(&mut self) -> Result<(), JsValue> { log!("Undo {} {}", self.history.len(), self.history_index); if self.history_index == 0 { if Some(&self.config) != self.history.last() { self.history.push(self.config.clone()); } } self.history_index = (self.history_index + 1).min(self.history.len() - 1); if let Some(config) = self .history .get(self.history.len() - self.history_index - 1) { self.config = config.clone(); self.async_render(false)?; } Ok(()) } pub fn redo(&mut self) -> Result<(), JsValue> { if self.history_index == 0 { log!("nothing to redo"); return Ok(()); } log!("redo"); self.history_index = (self.history_index - 1).max(0); if let Some(config) = self .history .get(self.history.len() - self.history_index - 1) { self.config = config.clone(); self.async_render(false)?; } Ok(()) } pub fn maybe_save_history(&mut self) { log!("saving history"); // If the lastest is the same if self.history_index == 0 && self .history .last() .map_or(false, |last| *last == self.config) { return; } if self.history_index != 0 && self .history .get(self.history.len() - self.history_index - 1) .map_or(false, |last| *last == self.config) { return; } // snip undone stuff if self.history_index != 0 { self.history = self.history[0..self.history.len() - self.history_index].to_vec(); self.history_index = 0; } // if self.history.last().map_or(true, |last| *last != self.config) { self.history.push(self.config.clone()); if self.history.len() > 500 { // trim to 500 len self.history = self.history[self.history.len() - 500..].to_vec(); } // } } pub fn handle_render( &mut self, worker: usize, id: usize, array: js_sys::Uint32Array, ) -> Result<(), JsValue> { if id < self.last_rendered { let (worker, busy, queued) = &mut self.workers[worker]; match queued { None => { // log!("Finished a thread"); *busy = false } Some(message) => { // log!("Sending a new config to render"); worker.post_message(&JsValue::from_serde(message).unwrap())?; *queued = None } } // this is old data, disregard return Ok(()); } if id > self.last_rendered { self.reset_buffer(); self.last_rendered = id; } let mut bright = vec![0_u32; self.config.rendering.width * self.config.rendering.height]; array.copy_to(&mut bright); for i in 0..bright.len() { self.buffer[i] += bright[i]; } self.image_data = make_image_data(&self.config, &self.buffer)?; // crate::ui::use_ui(|ui| { // crate::ui::draw(ui, &self) // }); // self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; let (worker, busy, queued) = &mut self.workers[worker]; match queued { None => { // log!("Finished a thread"); *busy = false } Some(message) => { // log!("Sending a new config to render"); worker.post_message(&JsValue::from_serde(message).unwrap())?; *queued = None } } Ok(()) } pub fn debug_render(&mut self) -> Result<(), JsValue> { let brightness = shared::calculate::deterministic_calc(&self.config); self.image_data = make_image_data(&self.config, &brightness)?; self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; Ok(()) } pub fn clear(&mut self) { self.ctx.clear_rect( 0.0, 0.0, self.config.rendering.width as f64, self.config.rendering.height as f64, ) } pub fn reexpose(&mut self) -> Result<(), JsValue> { self.image_data = make_image_data(&self.config, &self.buffer)?; // self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; // crate::ui::use_ui(|ui| { crate::ui::draw(&self); // }); Ok(()) } pub fn send_on_change(&self) { let _res = self.on_change.call2( &JsValue::null(), &JsValue::from_serde(&self.config).unwrap(), &JsValue::from_serde(&self.ui).unwrap(), ); } pub fn async_render(&mut self, small: bool) -> Result<(), JsValue> { // log!("Async nreder folks"); match &self.last_rendered_config { Some(config) => { if *config == self.config { return Ok(()); } let mut old_config_with_new_exposure = config.clone(); old_config_with_new_exposure.rendering.exposure = self.config.rendering.exposure.clone(); old_config_with_new_exposure.rendering.coloration = self.config.rendering.coloration.clone(); // We've only changed settings that don't require recalculation if old_config_with_new_exposure == self.config { self.last_rendered_config = Some(self.config.clone()); self.send_on_change(); self.reexpose(); return Ok(()); } else { log!("Not the same") // log!("Not the same! {} vs {}", old_json, json) } } _ => (), } // log!("Render new config"); // web_sys::console::log_1(&JsValue::from_serde(&self.config).unwrap()); self.send_on_change(); self.last_rendered_config = Some(self.config.clone()); self.render_id += 1; let message = shared::messaging::Message { config: self.config.clone(), id: self.render_id, // count: if
{ self.buffer = vec![0_u32; self.config.rendering.width * self.config.rendering.height]; self.invalidate_past_renders(); }
identifier_body
state.rs
pub image_data: web_sys::ImageData, pub config: shared::Config, pub history: Vec<shared::Config>, pub history_index: usize, pub last_rendered_config: Option<shared::Config>, pub buffer: Vec<u32>, pub ui: crate::ui::UiState, pub hist_canvas: Option<web_sys::HtmlCanvasElement>, pub on_change: js_sys::Function, pub workers: Vec<(web_sys::Worker, bool, Option<shared::messaging::Message>)>, } // umm I dunno if this is cheating or something // I mean bad things could happen if I accessed the ctx // from different threads // but given that wasm doesn't yet have threads, it's probably fine. unsafe impl Send for State {} impl State { pub fn new(config: shared::Config, on_change: js_sys::Function) -> Self { State { hide_timeout: None, render_id: 0, hist_canvas: None, last_rendered: 0, ctx: crate::ui::init(&config).expect("Unable to setup canvas"), image_data: web_sys::ImageData::new_with_sw( config.rendering.width as u32, config.rendering.height as u32, ) .expect("Can't make an imagedata"), buffer: vec![0_u32; config.rendering.width * config.rendering.height], workers: vec![], ui: Default::default(), history: vec![config.clone()], history_index: 0, last_rendered_config: None, on_change, config, } } } pub fn make_image_data( config: &shared::Config, bright: &[u32], ) -> Result<web_sys::ImageData, JsValue> { let colored = shared::colorize(config, bright); let mut clamped = wasm_bindgen::Clamped(colored.clone()); // let mut clamped = Clamped(state.buffer.clone()); let data = web_sys::ImageData::new_with_u8_clamped_array_and_sh( wasm_bindgen::Clamped(clamped.as_mut_slice()), config.rendering.width as u32, config.rendering.height as u32, )?; Ok(data) } impl State { pub fn reset_buffer(&mut self) { self.buffer = vec![0_u32; self.config.rendering.width * self.config.rendering.height]; self.invalidate_past_renders(); } pub fn add_worker(&mut self, worker: web_sys::Worker) { self.workers.push((worker, false, None)) } pub fn invalidate_past_renders(&mut self) { self.render_id += 1; self.last_rendered = self.render_id; } pub fn undo(&mut self) -> Result<(), JsValue> { log!("Undo {} {}", self.history.len(), self.history_index); if self.history_index == 0 { if Some(&self.config) != self.history.last() { self.history.push(self.config.clone()); } } self.history_index = (self.history_index + 1).min(self.history.len() - 1); if let Some(config) = self .history .get(self.history.len() - self.history_index - 1) { self.config = config.clone(); self.async_render(false)?; } Ok(()) } pub fn redo(&mut self) -> Result<(), JsValue> { if self.history_index == 0 { log!("nothing to redo"); return Ok(()); } log!("redo"); self.history_index = (self.history_index - 1).max(0); if let Some(config) = self .history .get(self.history.len() - self.history_index - 1) { self.config = config.clone(); self.async_render(false)?; } Ok(()) } pub fn maybe_save_history(&mut self) { log!("saving history"); // If the lastest is the same if self.history_index == 0 && self .history .last() .map_or(false, |last| *last == self.config) { return; } if self.history_index != 0 && self .history .get(self.history.len() - self.history_index - 1) .map_or(false, |last| *last == self.config) { return; } // snip undone stuff if self.history_index != 0 { self.history = self.history[0..self.history.len() - self.history_index].to_vec(); self.history_index = 0; } // if self.history.last().map_or(true, |last| *last != self.config) { self.history.push(self.config.clone()); if self.history.len() > 500 { // trim to 500 len self.history = self.history[self.history.len() - 500..].to_vec(); }
pub fn handle_render( &mut self, worker: usize, id: usize, array: js_sys::Uint32Array, ) -> Result<(), JsValue> { if id < self.last_rendered { let (worker, busy, queued) = &mut self.workers[worker]; match queued { None => { // log!("Finished a thread"); *busy = false } Some(message) => { // log!("Sending a new config to render"); worker.post_message(&JsValue::from_serde(message).unwrap())?; *queued = None } } // this is old data, disregard return Ok(()); } if id > self.last_rendered { self.reset_buffer(); self.last_rendered = id; } let mut bright = vec![0_u32; self.config.rendering.width * self.config.rendering.height]; array.copy_to(&mut bright); for i in 0..bright.len() { self.buffer[i] += bright[i]; } self.image_data = make_image_data(&self.config, &self.buffer)?; // crate::ui::use_ui(|ui| { // crate::ui::draw(ui, &self) // }); // self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; let (worker, busy, queued) = &mut self.workers[worker]; match queued { None => { // log!("Finished a thread"); *busy = false } Some(message) => { // log!("Sending a new config to render"); worker.post_message(&JsValue::from_serde(message).unwrap())?; *queued = None } } Ok(()) } pub fn debug_render(&mut self) -> Result<(), JsValue> { let brightness = shared::calculate::deterministic_calc(&self.config); self.image_data = make_image_data(&self.config, &brightness)?; self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; Ok(()) } pub fn clear(&mut self) { self.ctx.clear_rect( 0.0, 0.0, self.config.rendering.width as f64, self.config.rendering.height as f64, ) } pub fn reexpose(&mut self) -> Result<(), JsValue> { self.image_data = make_image_data(&self.config, &self.buffer)?; // self.ctx.put_image_data(&self.image_data, 0.0, 0.0)?; // crate::ui::use_ui(|ui| { crate::ui::draw(&self); // }); Ok(()) } pub fn send_on_change(&self) { let _res = self.on_change.call2( &JsValue::null(), &JsValue::from_serde(&self.config).unwrap(), &JsValue::from_serde(&self.ui).unwrap(), ); } pub fn async_render(&mut self, small: bool) -> Result<(), JsValue> { // log!("Async nreder folks"); match &self.last_rendered_config { Some(config) => { if *config == self.config { return Ok(()); } let mut old_config_with_new_exposure = config.clone(); old_config_with_new_exposure.rendering.exposure = self.config.rendering.exposure.clone(); old_config_with_new_exposure.rendering.coloration = self.config.rendering.coloration.clone(); // We've only changed settings that don't require recalculation if old_config_with_new_exposure == self.config { self.last_rendered_config = Some(self.config.clone()); self.send_on_change(); self.reexpose(); return Ok(()); } else { log!("Not the same") // log!("Not the same! {} vs {}", old_json, json) } } _ => (), } // log!("Render new config"); // web_sys::console::log_1(&JsValue::from_serde(&self.config).unwrap()); self.send_on_change(); self.last_rendered_config = Some(self.config.clone()); self.render_id += 1; let message = shared::messaging::Message { config: self.config.clone(), id: self.render_id, // count: if small
// } }
random_line_split
health.pb.go
HealthCheckRequest) Reset() { *m = HealthCheckRequest{} } func (m *HealthCheckRequest) String() string { return proto.CompactTextString(m) } func (*HealthCheckRequest) ProtoMessage() {} func (*HealthCheckRequest) Descriptor() ([]byte, []int) { return fileDescriptor_65380b3b807a73ad, []int{0} } func (m *HealthCheckRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_HealthCheckRequest.Unmarshal(m, b) } func (m *HealthCheckRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_HealthCheckRequest.Marshal(b, m, deterministic) } func (m *HealthCheckRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_HealthCheckRequest.Merge(m, src) } func (m *HealthCheckRequest) XXX_Size() int
func (m *HealthCheckRequest) XXX_DiscardUnknown() { xxx_messageInfo_HealthCheckRequest.DiscardUnknown(m) } var xxx_messageInfo_HealthCheckRequest proto.InternalMessageInfo func (m *HealthCheckRequest) GetService() string { if m != nil { return m.Service } return "" } type HealthCheckResponse struct { Status HealthCheckResponse_ServingStatus `protobuf:"varint,1,opt,name=status,proto3,enum=health.HealthCheckResponse_ServingStatus" json:"status,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *HealthCheckResponse) Reset() { *m = HealthCheckResponse{} } func (m *HealthCheckResponse) String() string { return proto.CompactTextString(m) } func (*HealthCheckResponse) ProtoMessage() {} func (*HealthCheckResponse) Descriptor() ([]byte, []int) { return fileDescriptor_65380b3b807a73ad, []int{1} } func (m *HealthCheckResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_HealthCheckResponse.Unmarshal(m, b) } func (m *HealthCheckResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_HealthCheckResponse.Marshal(b, m, deterministic) } func (m *HealthCheckResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_HealthCheckResponse.Merge(m, src) } func (m *HealthCheckResponse) XXX_Size() int { return xxx_messageInfo_HealthCheckResponse.Size(m) } func (m *HealthCheckResponse) XXX_DiscardUnknown() { xxx_messageInfo_HealthCheckResponse.DiscardUnknown(m) } var xxx_messageInfo_HealthCheckResponse proto.InternalMessageInfo func (m *HealthCheckResponse) GetStatus() HealthCheckResponse_ServingStatus { if m != nil { return m.Status } return HealthCheckResponse_UNKNOWN } func init() { proto.RegisterEnum("health.HealthCheckResponse_ServingStatus", HealthCheckResponse_ServingStatus_name, HealthCheckResponse_ServingStatus_value) proto.RegisterType((*HealthCheckRequest)(nil), "health.HealthCheckRequest") proto.RegisterType((*HealthCheckResponse)(nil), "health.HealthCheckResponse") } func init() { proto.RegisterFile("proto/health.proto", fileDescriptor_65380b3b807a73ad) } var fileDescriptor_65380b3b807a73ad = []byte{ // 282 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x2a, 0x28, 0xca, 0x2f, 0xc9, 0xd7, 0xcf, 0x48, 0x4d, 0xcc, 0x29, 0xc9, 0xd0, 0x03, 0x73, 0x84, 0xd8, 0x20, 0x3c, 0x29, 0x99, 0xf4, 0xfc, 0xfc, 0xf4, 0x9c, 0x54, 0xfd, 0xc4, 0x82, 0x4c, 0xfd, 0xc4, 0xbc, 0xbc, 0xfc, 0x92, 0xc4, 0x92, 0xcc, 0xfc, 0xbc, 0x62, 0x88, 0x2a, 0x25, 0x3d, 0x2e, 0x21, 0x0f, 0xb0, 0x3a, 0xe7, 0x8c, 0xd4, 0xe4, 0xec, 0xa0, 0xd4, 0xc2, 0xd2, 0xd4, 0xe2, 0x12, 0x21, 0x09, 0x2e, 0xf6, 0xe2, 0xd4, 0xa2, 0xb2, 0xcc, 0xe4, 0x54, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x18, 0x57, 0x69, 0x25, 0x23, 0x97, 0x30, 0x8a, 0x86, 0xe2, 0x82, 0xfc, 0xbc, 0xe2, 0x54, 0x21, 0x47, 0x2e, 0xb6, 0xe2, 0x92, 0xc4, 0x92, 0xd2, 0x62, 0xb0, 0x06, 0x3e, 0x23, 0x4d, 0x3d, 0xa8, 0x63, 0xb0, 0x28, 0xd6, 0x0b, 0x06, 0x19, 0x96, 0x97, 0x1e, 0x0c, 0xd6, 0x10, 0x04, 0xd5, 0xa8, 0xe4, 0xcf, 0xc5, 0x8b, 0x22, 0x21, 0xc4, 0xcd, 0xc5, 0x1e, 0xea, 0xe7, 0xed, 0xe7, 0x1f, 0xee, 0x27, 0xc0, 0x00, 0xe2, 0x04, 0xbb, 0x06, 0x85, 0x79, 0xfa, 0xb9, 0x0b, 0x30, 0x0a, 0xf1, 0x73, 0x71, 0xfb, 0xf9, 0x87, 0xc4, 0xc3, 0x04, 0x98, 0x84, 0x84, 0xb9, 0xf8, 0xc1, 0x1c, 0x67, 0xd7, 0x78, 0x98, 0x16, 0x66, 0xa3, 0xf5, 0x8c, 0x5c, 0x6c, 0x10, 0xeb, 0x85, 0x82, 0xb8, 0x58, 0xc1, 0x4e, 0x10, 0x92, 0xc2, 0xea, 0x2e, 0xb0, 0xaf, 0xa5, 0xa4, 0xf1, 0xb8, 0x59, 0x49, 0xa0, 0xe9, 0xf2, 0x93, 0xc9, 0x4c, 0x5c, 0x42, 0x1c, 0xd0,
{ return xxx_messageInfo_HealthCheckRequest.Size(m) }
identifier_body
health.pb.go
HealthCheckRequest) Reset() { *m = HealthCheckRequest{} } func (m *HealthCheckRequest) String() string { return proto.CompactTextString(m) } func (*HealthCheckRequest) ProtoMessage() {} func (*HealthCheckRequest) Descriptor() ([]byte, []int) { return fileDescriptor_65380b3b807a73ad, []int{0} } func (m *HealthCheckRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_HealthCheckRequest.Unmarshal(m, b) } func (m *HealthCheckRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_HealthCheckRequest.Marshal(b, m, deterministic) } func (m *HealthCheckRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_HealthCheckRequest.Merge(m, src) } func (m *HealthCheckRequest) XXX_Size() int { return xxx_messageInfo_HealthCheckRequest.Size(m) } func (m *HealthCheckRequest) XXX_DiscardUnknown() { xxx_messageInfo_HealthCheckRequest.DiscardUnknown(m) } var xxx_messageInfo_HealthCheckRequest proto.InternalMessageInfo func (m *HealthCheckRequest)
() string { if m != nil { return m.Service } return "" } type HealthCheckResponse struct { Status HealthCheckResponse_ServingStatus `protobuf:"varint,1,opt,name=status,proto3,enum=health.HealthCheckResponse_ServingStatus" json:"status,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *HealthCheckResponse) Reset() { *m = HealthCheckResponse{} } func (m *HealthCheckResponse) String() string { return proto.CompactTextString(m) } func (*HealthCheckResponse) ProtoMessage() {} func (*HealthCheckResponse) Descriptor() ([]byte, []int) { return fileDescriptor_65380b3b807a73ad, []int{1} } func (m *HealthCheckResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_HealthCheckResponse.Unmarshal(m, b) } func (m *HealthCheckResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_HealthCheckResponse.Marshal(b, m, deterministic) } func (m *HealthCheckResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_HealthCheckResponse.Merge(m, src) } func (m *HealthCheckResponse) XXX_Size() int { return xxx_messageInfo_HealthCheckResponse.Size(m) } func (m *HealthCheckResponse) XXX_DiscardUnknown() { xxx_messageInfo_HealthCheckResponse.DiscardUnknown(m) } var xxx_messageInfo_HealthCheckResponse proto.InternalMessageInfo func (m *HealthCheckResponse) GetStatus() HealthCheckResponse_ServingStatus { if m != nil { return m.Status } return HealthCheckResponse_UNKNOWN } func init() { proto.RegisterEnum("health.HealthCheckResponse_ServingStatus", HealthCheckResponse_ServingStatus_name, HealthCheckResponse_ServingStatus_value) proto.RegisterType((*HealthCheckRequest)(nil), "health.HealthCheckRequest") proto.RegisterType((*HealthCheckResponse)(nil), "health.HealthCheckResponse") } func init() { proto.RegisterFile("proto/health.proto", fileDescriptor_65380b3b807a73ad) } var fileDescriptor_65380b3b807a73ad = []byte{ // 282 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x2a, 0x28, 0xca, 0x2f, 0xc9, 0xd7, 0xcf, 0x48, 0x4d, 0xcc, 0x29, 0xc9, 0xd0, 0x03, 0x73, 0x84, 0xd8, 0x20, 0x3c, 0x29, 0x99, 0xf4, 0xfc, 0xfc, 0xf4, 0x9c, 0x54, 0xfd, 0xc4, 0x82, 0x4c, 0xfd, 0xc4, 0xbc, 0xbc, 0xfc, 0x92, 0xc4, 0x92, 0xcc, 0xfc, 0xbc, 0x62, 0x88, 0x2a, 0x25, 0x3d, 0x2e, 0x21, 0x0f, 0xb0, 0x3a, 0xe7, 0x8c, 0xd4, 0xe4, 0xec, 0xa0, 0xd4, 0xc2, 0xd2, 0xd4, 0xe2, 0x12, 0x21, 0x09, 0x2e, 0xf6, 0xe2, 0xd4, 0xa2, 0xb2, 0xcc, 0xe4, 0x54, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x18, 0x57, 0x69, 0x25, 0x23, 0x97, 0x30, 0x8a, 0x86, 0xe2, 0x82, 0xfc, 0xbc, 0xe2, 0x54, 0x21, 0x47, 0x2e, 0xb6, 0xe2, 0x92, 0xc4, 0x92, 0xd2, 0x62, 0xb0, 0x06, 0x3e, 0x23, 0x4d, 0x3d, 0xa8, 0x63, 0xb0, 0x28, 0xd6, 0x0b, 0x06, 0x19, 0x96, 0x97, 0x1e, 0x0c, 0xd6, 0x10, 0x04, 0xd5, 0xa8, 0xe4, 0xcf, 0xc5, 0x8b, 0x22, 0x21, 0xc4, 0xcd, 0xc5, 0x1e, 0xea, 0xe7, 0xed, 0xe7, 0x1f, 0xee, 0x27, 0xc0, 0x00, 0xe2, 0x04, 0xbb, 0x06, 0x85, 0x79, 0xfa, 0xb9, 0x0b, 0x30, 0x0a, 0xf1, 0x73, 0x71, 0xfb, 0xf9, 0x87, 0xc4, 0xc3, 0x04, 0x98, 0x84, 0x84, 0xb9, 0xf8, 0xc1, 0x1c, 0x67, 0xd7, 0x78, 0x98, 0x16, 0x66, 0xa3, 0xf5, 0x8c, 0x5c, 0x6c, 0x10, 0xeb, 0x85, 0x82, 0xb8, 0x58, 0xc1, 0x4e, 0x10, 0x92, 0xc2, 0xea, 0x2e, 0xb0, 0xaf, 0xa5, 0xa4, 0xf1, 0xb8, 0x59, 0x49, 0xa0, 0xe9, 0xf2, 0x93, 0xc9, 0x4c, 0x5c, 0x42, 0x1c, 0xd0,
GetService
identifier_name
health.pb.go
*HealthCheckRequest) Reset() { *m = HealthCheckRequest{} } func (m *HealthCheckRequest) String() string { return proto.CompactTextString(m) } func (*HealthCheckRequest) ProtoMessage() {} func (*HealthCheckRequest) Descriptor() ([]byte, []int) { return fileDescriptor_65380b3b807a73ad, []int{0} } func (m *HealthCheckRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_HealthCheckRequest.Unmarshal(m, b) } func (m *HealthCheckRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_HealthCheckRequest.Marshal(b, m, deterministic) } func (m *HealthCheckRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_HealthCheckRequest.Merge(m, src) } func (m *HealthCheckRequest) XXX_Size() int { return xxx_messageInfo_HealthCheckRequest.Size(m) }
func (m *HealthCheckRequest) GetService() string { if m != nil { return m.Service } return "" } type HealthCheckResponse struct { Status HealthCheckResponse_ServingStatus `protobuf:"varint,1,opt,name=status,proto3,enum=health.HealthCheckResponse_ServingStatus" json:"status,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *HealthCheckResponse) Reset() { *m = HealthCheckResponse{} } func (m *HealthCheckResponse) String() string { return proto.CompactTextString(m) } func (*HealthCheckResponse) ProtoMessage() {} func (*HealthCheckResponse) Descriptor() ([]byte, []int) { return fileDescriptor_65380b3b807a73ad, []int{1} } func (m *HealthCheckResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_HealthCheckResponse.Unmarshal(m, b) } func (m *HealthCheckResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_HealthCheckResponse.Marshal(b, m, deterministic) } func (m *HealthCheckResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_HealthCheckResponse.Merge(m, src) } func (m *HealthCheckResponse) XXX_Size() int { return xxx_messageInfo_HealthCheckResponse.Size(m) } func (m *HealthCheckResponse) XXX_DiscardUnknown() { xxx_messageInfo_HealthCheckResponse.DiscardUnknown(m) } var xxx_messageInfo_HealthCheckResponse proto.InternalMessageInfo func (m *HealthCheckResponse) GetStatus() HealthCheckResponse_ServingStatus { if m != nil { return m.Status } return HealthCheckResponse_UNKNOWN } func init() { proto.RegisterEnum("health.HealthCheckResponse_ServingStatus", HealthCheckResponse_ServingStatus_name, HealthCheckResponse_ServingStatus_value) proto.RegisterType((*HealthCheckRequest)(nil), "health.HealthCheckRequest") proto.RegisterType((*HealthCheckResponse)(nil), "health.HealthCheckResponse") } func init() { proto.RegisterFile("proto/health.proto", fileDescriptor_65380b3b807a73ad) } var fileDescriptor_65380b3b807a73ad = []byte{ // 282 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x2a, 0x28, 0xca, 0x2f, 0xc9, 0xd7, 0xcf, 0x48, 0x4d, 0xcc, 0x29, 0xc9, 0xd0, 0x03, 0x73, 0x84, 0xd8, 0x20, 0x3c, 0x29, 0x99, 0xf4, 0xfc, 0xfc, 0xf4, 0x9c, 0x54, 0xfd, 0xc4, 0x82, 0x4c, 0xfd, 0xc4, 0xbc, 0xbc, 0xfc, 0x92, 0xc4, 0x92, 0xcc, 0xfc, 0xbc, 0x62, 0x88, 0x2a, 0x25, 0x3d, 0x2e, 0x21, 0x0f, 0xb0, 0x3a, 0xe7, 0x8c, 0xd4, 0xe4, 0xec, 0xa0, 0xd4, 0xc2, 0xd2, 0xd4, 0xe2, 0x12, 0x21, 0x09, 0x2e, 0xf6, 0xe2, 0xd4, 0xa2, 0xb2, 0xcc, 0xe4, 0x54, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x18, 0x57, 0x69, 0x25, 0x23, 0x97, 0x30, 0x8a, 0x86, 0xe2, 0x82, 0xfc, 0xbc, 0xe2, 0x54, 0x21, 0x47, 0x2e, 0xb6, 0xe2, 0x92, 0xc4, 0x92, 0xd2, 0x62, 0xb0, 0x06, 0x3e, 0x23, 0x4d, 0x3d, 0xa8, 0x63, 0xb0, 0x28, 0xd6, 0x0b, 0x06, 0x19, 0x96, 0x97, 0x1e, 0x0c, 0xd6, 0x10, 0x04, 0xd5, 0xa8, 0xe4, 0xcf, 0xc5, 0x8b, 0x22, 0x21, 0xc4, 0xcd, 0xc5, 0x1e, 0xea, 0xe7, 0xed, 0xe7, 0x1f, 0xee, 0x27, 0xc0, 0x00, 0xe2, 0x04, 0xbb, 0x06, 0x85, 0x79, 0xfa, 0xb9, 0x0b, 0x30, 0x0a, 0xf1, 0x73, 0x71, 0xfb, 0xf9, 0x87, 0xc4, 0xc3, 0x04, 0x98, 0x84, 0x84, 0xb9, 0xf8, 0xc1, 0x1c, 0x67, 0xd7, 0x78, 0x98, 0x16, 0x66, 0xa3, 0xf5, 0x8c, 0x5c, 0x6c, 0x10, 0xeb, 0x85, 0x82, 0xb8, 0x58, 0xc1, 0x4e, 0x10, 0x92, 0xc2, 0xea, 0x2e, 0xb0, 0xaf, 0xa5, 0xa4, 0xf1, 0xb8, 0x59, 0x49, 0xa0, 0xe9, 0xf2, 0x93, 0xc9, 0x4c, 0x5c, 0x42, 0x1c, 0xd0,
func (m *HealthCheckRequest) XXX_DiscardUnknown() { xxx_messageInfo_HealthCheckRequest.DiscardUnknown(m) } var xxx_messageInfo_HealthCheckRequest proto.InternalMessageInfo
random_line_split
health.pb.go
0xbc, 0xfc, 0x92, 0xc4, 0x92, 0xcc, 0xfc, 0xbc, 0x62, 0x88, 0x2a, 0x25, 0x3d, 0x2e, 0x21, 0x0f, 0xb0, 0x3a, 0xe7, 0x8c, 0xd4, 0xe4, 0xec, 0xa0, 0xd4, 0xc2, 0xd2, 0xd4, 0xe2, 0x12, 0x21, 0x09, 0x2e, 0xf6, 0xe2, 0xd4, 0xa2, 0xb2, 0xcc, 0xe4, 0x54, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x18, 0x57, 0x69, 0x25, 0x23, 0x97, 0x30, 0x8a, 0x86, 0xe2, 0x82, 0xfc, 0xbc, 0xe2, 0x54, 0x21, 0x47, 0x2e, 0xb6, 0xe2, 0x92, 0xc4, 0x92, 0xd2, 0x62, 0xb0, 0x06, 0x3e, 0x23, 0x4d, 0x3d, 0xa8, 0x63, 0xb0, 0x28, 0xd6, 0x0b, 0x06, 0x19, 0x96, 0x97, 0x1e, 0x0c, 0xd6, 0x10, 0x04, 0xd5, 0xa8, 0xe4, 0xcf, 0xc5, 0x8b, 0x22, 0x21, 0xc4, 0xcd, 0xc5, 0x1e, 0xea, 0xe7, 0xed, 0xe7, 0x1f, 0xee, 0x27, 0xc0, 0x00, 0xe2, 0x04, 0xbb, 0x06, 0x85, 0x79, 0xfa, 0xb9, 0x0b, 0x30, 0x0a, 0xf1, 0x73, 0x71, 0xfb, 0xf9, 0x87, 0xc4, 0xc3, 0x04, 0x98, 0x84, 0x84, 0xb9, 0xf8, 0xc1, 0x1c, 0x67, 0xd7, 0x78, 0x98, 0x16, 0x66, 0xa3, 0xf5, 0x8c, 0x5c, 0x6c, 0x10, 0xeb, 0x85, 0x82, 0xb8, 0x58, 0xc1, 0x4e, 0x10, 0x92, 0xc2, 0xea, 0x2e, 0xb0, 0xaf, 0xa5, 0xa4, 0xf1, 0xb8, 0x59, 0x49, 0xa0, 0xe9, 0xf2, 0x93, 0xc9, 0x4c, 0x5c, 0x42, 0x1c, 0xd0, 0x50, 0xae, 0x12, 0x0a, 0xe4, 0x62, 0x0d, 0x4a, 0x4d, 0x4c, 0xa9, 0x24, 0xdf, 0x4c, 0x7e, 0xb0, 0x99, 0x9c, 0x42, 0xec, 0xfa, 0x45, 0x20, 0x83, 0xaa, 0x9c, 0xb8, 0xa2, 0x38, 0x20, 0xca, 0x0b, 0x92, 0x92, 0xd8, 0xc0, 0x11, 0x64, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x2c, 0x3d, 0xe3, 0xe7, 0xdc, 0x01, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConnInterface // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion6 // HealthClient is the client API for Health service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type HealthClient interface { Check(ctx context.Context, in *HealthCheckRequest, opts ...grpc.CallOption) (*HealthCheckResponse, error) Ready(ctx context.Context, in *HealthCheckRequest, opts ...grpc.CallOption) (*HealthCheckResponse, error) } type healthClient struct { cc grpc.ClientConnInterface } func NewHealthClient(cc grpc.ClientConnInterface) HealthClient { return &healthClient{cc} } func (c *healthClient) Check(ctx context.Context, in *HealthCheckRequest, opts ...grpc.CallOption) (*HealthCheckResponse, error) { out := new(HealthCheckResponse) err := c.cc.Invoke(ctx, "/health.Health/Check", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *healthClient) Ready(ctx context.Context, in *HealthCheckRequest, opts ...grpc.CallOption) (*HealthCheckResponse, error) { out := new(HealthCheckResponse) err := c.cc.Invoke(ctx, "/health.Health/Ready", in, out, opts...) if err != nil { return nil, err } return out, nil } // HealthServer is the server API for Health service. type HealthServer interface { Check(context.Context, *HealthCheckRequest) (*HealthCheckResponse, error) Ready(context.Context, *HealthCheckRequest) (*HealthCheckResponse, error) } // UnimplementedHealthServer can be embedded to have forward compatible implementations. type UnimplementedHealthServer struct { } func (*UnimplementedHealthServer) Check(ctx context.Context, req *HealthCheckRequest) (*HealthCheckResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method Check not implemented") } func (*UnimplementedHealthServer) Ready(ctx context.Context, req *HealthCheckRequest) (*HealthCheckResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method Ready not implemented") } func RegisterHealthServer(s *grpc.Server, srv HealthServer) { s.RegisterService(&_Health_serviceDesc, srv) } func _Health_Check_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(HealthCheckRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(HealthServer).Check(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/health.Health/Check", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(HealthServer).Check(ctx, req.(*HealthCheckRequest)) } return interceptor(ctx, in, info, handler) } func _Health_Ready_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(HealthCheckRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil
{ return srv.(HealthServer).Ready(ctx, in) }
conditional_block
lib.rs
) -> &OMatrix<R, SS, SS>; /// Get the transpose of the state transition model. fn transition_model_transpose(&self) -> &OMatrix<R, SS, SS>; /// Get the transition noise covariance. fn transition_noise_covariance(&self) -> &OMatrix<R, SS, SS>; /// Predict new state from old state. fn predict(&self, previous_estimate: &StateAndCovariance<R, SS>) -> StateAndCovariance<R, SS> { let state = self.transition_model() * previous_estimate.state(); let covariance = ((self.transition_model() * previous_estimate.covariance()) * self.transition_model_transpose()) + self.transition_noise_covariance(); StateAndCovariance::new(state, covariance) } } /// A linear observation model /// /// Note, to use a non-linear observation model, the non-linear model must /// be linearized (using the prior state estimate) and use this linearization /// as the basis for a `ObservationModelLinear` implementation. pub trait ObservationModelLinear<R, SS, OS> where R: RealField, SS: DimName, OS: DimName + DimMin<OS, Output = OS>, DefaultAllocator: Allocator<R, SS, SS>, DefaultAllocator: Allocator<R, SS>, DefaultAllocator: Allocator<R, OS, SS>, DefaultAllocator: Allocator<R, SS, OS>, DefaultAllocator: Allocator<R, OS, OS>, DefaultAllocator: Allocator<R, OS>, DefaultAllocator: Allocator<(usize, usize), OS>, { /// For a given state, predict the observation. /// /// If an observation is not possible, this returns NaN values. (This /// happens, for example, when a non-linear observation model implements /// this trait and must be evaluated for a state for which no observation is /// possible.) Observations with NaN values are treated as missing /// observations. fn evaluate(&self, state: &OVector<R, SS>) -> OVector<R, OS>; /// Get the observation model fn observation_matrix(&self) -> &OMatrix<R, OS, SS>; /// Get the transpose of the observation model. fn observation_matrix_transpose(&self) -> &OMatrix<R, SS, OS>; /// Get the observation noise covariance. // TODO: ensure this is positive definite? fn observation_noise_covariance(&self) -> &OMatrix<R, OS, OS>; /// Given a prior state and an observation, compute a posterior state estimate. fn
( &self, prior: &StateAndCovariance<R, SS>, observation: &OVector<R, OS>, covariance_method: CoverianceUpdateMethod, ) -> Result<StateAndCovariance<R, SS>, Error> { // Use conventional (e.g. wikipedia) names for these variables let h = self.observation_matrix(); trace!("h {}", pretty_print!(h)); let p = prior.covariance(); trace!("p {}", pretty_print!(p)); debug_assert_symmetric!(p); let ht = self.observation_matrix_transpose(); trace!("ht {}", pretty_print!(ht)); let r = self.observation_noise_covariance(); trace!("r {}", pretty_print!(r)); // Calculate innovation covariance // // Math note: if (h*p*ht) and r are positive definite, s is also // positive definite. If p is positive definite, then (h*p*ht) is at // least positive semi-definite. If h is full rank, it is positive // definite. let s = (h * p * ht) + r; trace!("s {}", pretty_print!(s)); // Calculate kalman gain by inverting. let s_chol = match na::linalg::Cholesky::new(s) { Some(v) => v, None => { // Maybe state covariance is not symmetric or // for from positive definite? Also, observation // noise should be positive definite. return Err(ErrorKind::CovarianceNotPositiveSemiDefinite.into()); } }; let s_inv: OMatrix<R, OS, OS> = s_chol.inverse(); trace!("s_inv {}", pretty_print!(s_inv)); let k_gain: OMatrix<R, SS, OS> = p * ht * s_inv; // let k_gain: OMatrix<R,SS,OS> = solve!( (p*ht), s ); trace!("k_gain {}", pretty_print!(k_gain)); let predicted: OVector<R, OS> = self.evaluate(prior.state()); trace!("predicted {}", pretty_print!(predicted)); trace!("observation {}", pretty_print!(observation)); let innovation: OVector<R, OS> = observation - predicted; trace!("innovation {}", pretty_print!(innovation)); let state: OVector<R, SS> = prior.state() + &k_gain * innovation; trace!("state {}", pretty_print!(state)); trace!( "self.observation_matrix() {}", pretty_print!(self.observation_matrix()) ); let kh: OMatrix<R, SS, SS> = &k_gain * self.observation_matrix(); trace!("kh {}", pretty_print!(kh)); let one_minus_kh = OMatrix::<R, SS, SS>::one() - kh; trace!("one_minus_kh {}", pretty_print!(one_minus_kh)); let covariance: OMatrix<R, SS, SS> = match covariance_method { CoverianceUpdateMethod::JosephForm => { // Joseph form of covariance update keeps covariance matrix symmetric. let left = &one_minus_kh * prior.covariance() * &one_minus_kh.transpose(); let right = &k_gain * r * &k_gain.transpose(); left + right } CoverianceUpdateMethod::OptimalKalman => one_minus_kh * prior.covariance(), CoverianceUpdateMethod::OptimalKalmanForcedSymmetric => { let covariance1 = one_minus_kh * prior.covariance(); trace!("covariance1 {}", pretty_print!(covariance1)); // Hack to force covariance to be symmetric. // See https://math.stackexchange.com/q/2335831 let half: R = na::convert(0.5); (&covariance1 + &covariance1.transpose()) * half } }; trace!("covariance {}", pretty_print!(covariance)); debug_assert_symmetric!(covariance); Ok(StateAndCovariance::new(state, covariance)) } } /// Specifies the approach used for updating the covariance matrix #[derive(Debug, PartialEq, Clone, Copy)] pub enum CoverianceUpdateMethod { /// Assumes optimal Kalman gain. /// /// Due to numerical errors, covariance matrix may not remain symmetric. OptimalKalman, /// Assumes optimal Kalman gain and then forces symmetric covariance matrix. /// /// With original covariance matrix P, returns covariance as (P + P.T)/2 /// to enforce that the covariance matrix remains symmetric. OptimalKalmanForcedSymmetric, /// Joseph form of covariance update keeps covariance matrix symmetric. JosephForm, } /// A Kalman filter with no control inputs, a linear process model and linear observation model pub struct KalmanFilterNoControl<'a, R, SS, OS> where R: RealField, SS: DimName, OS: DimName, { transition_model: &'a dyn TransitionModelLinearNoControl<R, SS>, observation_matrix: &'a dyn ObservationModelLinear<R, SS, OS>, } impl<'a, R, SS, OS> KalmanFilterNoControl<'a, R, SS, OS> where R: RealField, SS: DimName, OS: DimName + DimMin<OS, Output = OS>, DefaultAllocator: Allocator<R, SS, SS>, DefaultAllocator: Allocator<R, SS>, DefaultAllocator: Allocator<R, OS, SS>, DefaultAllocator: Allocator<R, SS, OS>, DefaultAllocator: Allocator<R, OS, OS>, DefaultAllocator: Allocator<R, OS>, DefaultAllocator: Allocator<(usize, usize), OS>, { /// Initialize a new `KalmanFilterNoControl` struct. /// /// The first parameter, `transition_model`, specifies the state transition /// model, including the function `F` and the process covariance `Q`. The /// second parameter, `observation_matrix`, specifies the observation model, /// including the measurement function `H` and the measurement covariance /// `R`. pub fn new( transition_model: &'a dyn TransitionModelLinearNoControl<R, SS>, observation_matrix: &'a dyn ObservationModelLinear<R, SS, OS>, ) -> Self { Self { transition_model, observation_matrix, } } /// Perform Kalman prediction and update steps with default values /// /// If any component of the observation is NaN (not a number), the /// observation will not be used but rather the prior will be returned as /// the posterior without performing the update step. /// /// This calls the prediction step of the transition model and then, if /// there is a (non-`nan`) observation, calls the update step of the /// observation model using the /// `CoverianceUpdateMethod::OptimalKalmanForcedSymmetric` covariance update /// method. /// /// This is a convenience method that calls /// [step
update
identifier_name
lib.rs
R: RealField, SS: DimName, OS: DimName, { transition_model: &'a dyn TransitionModelLinearNoControl<R, SS>, observation_matrix: &'a dyn ObservationModelLinear<R, SS, OS>, } impl<'a, R, SS, OS> KalmanFilterNoControl<'a, R, SS, OS> where R: RealField, SS: DimName, OS: DimName + DimMin<OS, Output = OS>, DefaultAllocator: Allocator<R, SS, SS>, DefaultAllocator: Allocator<R, SS>, DefaultAllocator: Allocator<R, OS, SS>, DefaultAllocator: Allocator<R, SS, OS>, DefaultAllocator: Allocator<R, OS, OS>, DefaultAllocator: Allocator<R, OS>, DefaultAllocator: Allocator<(usize, usize), OS>, { /// Initialize a new `KalmanFilterNoControl` struct. /// /// The first parameter, `transition_model`, specifies the state transition /// model, including the function `F` and the process covariance `Q`. The /// second parameter, `observation_matrix`, specifies the observation model, /// including the measurement function `H` and the measurement covariance /// `R`. pub fn new( transition_model: &'a dyn TransitionModelLinearNoControl<R, SS>, observation_matrix: &'a dyn ObservationModelLinear<R, SS, OS>, ) -> Self { Self { transition_model, observation_matrix, } } /// Perform Kalman prediction and update steps with default values /// /// If any component of the observation is NaN (not a number), the /// observation will not be used but rather the prior will be returned as /// the posterior without performing the update step. /// /// This calls the prediction step of the transition model and then, if /// there is a (non-`nan`) observation, calls the update step of the /// observation model using the /// `CoverianceUpdateMethod::OptimalKalmanForcedSymmetric` covariance update /// method. /// /// This is a convenience method that calls /// [step_with_options](struct.KalmanFilterNoControl.html#method.step_with_options). pub fn step( &self, previous_estimate: &StateAndCovariance<R, SS>, observation: &OVector<R, OS>, ) -> Result<StateAndCovariance<R, SS>, Error> { self.step_with_options( previous_estimate, observation, CoverianceUpdateMethod::OptimalKalmanForcedSymmetric, ) } /// Perform Kalman prediction and update steps with default values /// /// If any component of the observation is NaN (not a number), the /// observation will not be used but rather the prior will be returned as /// the posterior without performing the update step. /// /// This calls the prediction step of the transition model and then, if /// there is a (non-`nan`) observation, calls the update step of the /// observation model using the specified covariance update method. pub fn step_with_options( &self, previous_estimate: &StateAndCovariance<R, SS>, observation: &OVector<R, OS>, covariance_update_method: CoverianceUpdateMethod, ) -> Result<StateAndCovariance<R, SS>, Error> { let prior = self.transition_model.predict(previous_estimate); if observation.iter().any(|x| is_nan(*x)) { Ok(prior) } else { self.observation_matrix .update(&prior, observation, covariance_update_method) } } /// Kalman filter (operates on in-place data without allocating) /// /// Operates on entire time series (by repeatedly calling /// [`step`](struct.KalmanFilterNoControl.html#method.step) for each /// observation) and returns a vector of state estimates. To be /// mathematically correct, the interval between observations must be the /// `dt` specified in the motion model. /// /// If any observation has a NaN component, it is treated as missing. pub fn filter_inplace( &self, initial_estimate: &StateAndCovariance<R, SS>, observations: &[OVector<R, OS>], state_estimates: &mut [StateAndCovariance<R, SS>], ) -> Result<(), Error> { let mut previous_estimate = initial_estimate.clone(); assert!(state_estimates.len() >= observations.len()); for (this_observation, state_estimate) in observations.iter().zip(state_estimates.iter_mut()) { let this_estimate = self.step(&previous_estimate, this_observation)?; *state_estimate = this_estimate.clone(); previous_estimate = this_estimate; } Ok(()) } /// Kalman filter /// /// This is a convenience function that calls [`filter_inplace`](struct.KalmanFilterNoControl.html#method.filter_inplace). #[cfg(feature = "std")] pub fn filter( &self, initial_estimate: &StateAndCovariance<R, SS>, observations: &[OVector<R, OS>], ) -> Result<Vec<StateAndCovariance<R, SS>>, Error> { let mut state_estimates = Vec::with_capacity(observations.len()); let empty = StateAndCovariance::new(na::zero(), na::OMatrix::<R, SS, SS>::identity()); for _ in 0..observations.len() { state_estimates.push(empty.clone()); } self.filter_inplace(initial_estimate, observations, &mut state_estimates)?; Ok(state_estimates) } /// Rauch-Tung-Striebel (RTS) smoother /// /// Operates on entire time series (by calling /// [`filter`](struct.KalmanFilterNoControl.html#method.filter) then /// [`smooth_from_filtered`](struct.KalmanFilterNoControl.html#method.smooth_from_filtered)) /// and returns a vector of state estimates. To be mathematically correct, /// the interval between observations must be the `dt` specified in the /// motion model. /// Operates on entire time series in one shot and returns a vector of state /// estimates. To be mathematically correct, the interval between /// observations must be the `dt` specified in the motion model. /// /// If any observation has a NaN component, it is treated as missing. #[cfg(feature = "std")] pub fn smooth( &self, initial_estimate: &StateAndCovariance<R, SS>, observations: &[OVector<R, OS>], ) -> Result<Vec<StateAndCovariance<R, SS>>, Error> { let forward_results = self.filter(initial_estimate, observations)?; self.smooth_from_filtered(forward_results) } /// Rauch-Tung-Striebel (RTS) smoother using already Kalman filtered estimates /// /// Operates on entire time series in one shot and returns a vector of state /// estimates. To be mathematically correct, the interval between /// observations must be the `dt` specified in the motion model. #[cfg(feature = "std")] pub fn smooth_from_filtered( &self, mut forward_results: Vec<StateAndCovariance<R, SS>>, ) -> Result<Vec<StateAndCovariance<R, SS>>, Error> { forward_results.reverse(); let mut smoothed_backwards = Vec::with_capacity(forward_results.len()); let mut smooth_future = forward_results[0].clone(); smoothed_backwards.push(smooth_future.clone()); for filt in forward_results.iter().skip(1) { smooth_future = self.smooth_step(&smooth_future, filt)?; smoothed_backwards.push(smooth_future.clone()); } smoothed_backwards.reverse(); Ok(smoothed_backwards) } #[cfg(feature = "std")] fn smooth_step( &self, smooth_future: &StateAndCovariance<R, SS>, filt: &StateAndCovariance<R, SS>, ) -> Result<StateAndCovariance<R, SS>, Error> { let prior = self.transition_model.predict(filt); let v_chol = match na::linalg::Cholesky::new(prior.covariance().clone()) { Some(v) => v, None => { return Err(ErrorKind::CovarianceNotPositiveSemiDefinite.into()); } }; let inv_prior_covariance: OMatrix<R, SS, SS> = v_chol.inverse(); trace!( "inv_prior_covariance {}", pretty_print!(inv_prior_covariance) ); // J = dot(Vfilt, dot(A.T, inv(Vpred))) # smoother gain matrix let j = filt.covariance() * (self.transition_model.transition_model_transpose() * inv_prior_covariance); // xsmooth = xfilt + dot(J, xsmooth_future - xpred) let residuals = smooth_future.state() - prior.state(); let state = filt.state() + &j * residuals; // Vsmooth = Vfilt + dot(J, dot(Vsmooth_future - Vpred, J.T)) let covar_residuals = smooth_future.covariance() - prior.covariance(); let covariance = filt.covariance() + &j * (covar_residuals * j.transpose());
Ok(StateAndCovariance::new(state, covariance)) } }
random_line_split
10162017.js
"Object Added" } if (Message==''){ sMessage = "Hover over any object to continue..." } jq('<div class="spy-BoldPop ignrPopUp" id="spyAlert" style="font-family:sans-serif;margin: 0px auto;width: 240px;height: 90px;box-shadow: 1px 2px 8px 2px rgba(0,0,0,0.25);border-radius: 10px;border: 1px solid #888;zIndex:9999999999"><div class="spy-BoldContent ignrPopUp" style="width: 220px;text-align: center;float: left;padding: 10px;background-color: grey;border-radius: 10px 10px 0px 0px;"><h1 class="ignrPopUp" style="font-size: 20px;color: #FFF;padding: 0px;margin: 0px;font-weight: 300;">' + sHeader + '</h1></div><div class="spy-BoldBody ignrPopUp" style="width: 100%;margin: 10px auto; float: left;"><p class="ignrPopUp" style="padding: 5px;margin: 0px;font-size: 14px;text-align: center;color: #111;">' + sMessage + '</p></div>') .appendTo('body'); setTimeout(function() { if (jq('#spyAlert').length > 0) { jq('#spyAlert').remove(); } }, 1200) } function removeSpyPanel() { //jq(".ignrPopUp").detach(); jq("#ATOMspyPopUpDiv").hide(); } function spyMouseOut(e) { var element = e.target; e.stopPropagation(); //jq("#ATOMspyPopUpDiv").hide(); element.style.outline = '' } function getObjectType(object) { var title = jq(object).get(0).tagName.toLowerCase(); switch (title) { case "a": return ('Link'); break; case "button": return ('Button'); break; case "caption": case "table": case "caption": case "tbody": case "th": case "tfoot": case "td": case "tr": return ('Table'); break; case "div": case "dl": return ('Element'); break; case "ul": case "li": case "ol": case "menu": case "menuitem": case "optgroup": case "select": return ('List'); break; case "iframe": return ('Frame'); break; case "fieldset": case "form": return ('Form'); break; case "input": var inputType = jq(object).get(0).tagName.toLowerCase() === "input" ? jq(object).get(0).type.toLowerCase() : jq(object).get(0).tagName.toLowerCase(); switch (inputType) { case "reset": case "submit": case "button": return ("Button"); break; case "file": return ("File"); break; default: return ('Input ' + toCamelCase(inputType)); break; } break; default: return ('Element'); break; } } //var htmlObjTypes = {}; //htmlObjTypes[] props = Object.getOwnPropertyNames(window) for (var idx in props) { if (props[idx].indexOf("HTML") == 0) { //do something here //console.log(props[idx]); } } //Get Default Object properties function getDfltObjPrpties(obj, ObjType) { var objProps = []; var objRect = jq(obj).get(0).getBoundingClientRect() var obHeight = objRect.height; var objWidth = objRect.width; var objleft = objRect.left; var objRight = objRect.right; var objTop = objRect.top; var objBottom = objRect.bottom; var sAddtoTst=false if (jq('#spyAddtoTstChkBx').is(':checked')){ sAddtoTst=true } var props = { 'objName':jq('#SpyObjTypeVal').text(), 'title': jq(document).find("title").text(), 'type': toCamelCase(getObjectType(obj)), 'name':jq(obj).attr('name'), 'value':jq(obj).val(), 'innerText': jq(obj).text(), 'objectName': jq("#objSpyObjName").val(), 'id': jq(obj).attr('id'), 'xPATH': getXpath(obj), 'css': getCssSelector(obj), 'Class': jq(obj).attr('class'), 'height': obHeight.toFixed(2), 'width': objWidth.toFixed(2), 'x': objleft, 'y': objTop, 'visible': jq(obj).is(":visible"), 'addToTest':sAddtoTst, 'idProps':'name;value', 'href':jq(obj).attr("href"), 'frame':checkIfinFrame(obj), } objProps.push(props); return objProps; } //Generate Object function genObjName(element, objType) { var text = ''; var str=getVisibleText(element) str=toCamelCase(str) var arr = str.split(' '); if (arr.length = 1){ arr[0]=str; iWordCnt=arr[0].length } else if (arr.length > 1 && arr.length < 7){ iWordCnt=3 } else if (arr.length > 7 ){ iWordCnt=5 } for(i=0;i<arr.length;i++) { text += arr[i].substr(0,iWordCnt) } return (text); }; // function getVisibleText(element){ var clone = jq(element).clone(true); clone.appendTo('body').find(':hidden').remove(); var text = clone.text(); clone.remove(); //Remove newlines and whitespaces text=text.replace(/\r?\n?/g, '').trim(); //Remove multiple splaces text=text.replace(/ +(?= )/g,''); if (element.defaultValue=="Submit") { if (typeof text === 'undefined'){ text=''; } } if (text.trim()==''){ text=jq(element).attr('name'); if (typeof text === 'undefined'){ text=''; } } if (text.trim()==''){ text=jq(element).val(); } //Remove Special Chracters text=removeSpecialChars(text); //Remove Extra Spaces text=text.replace(/\s/g, ''); //Remove newlines and whitespaces text=text.replace(/\r?\n?/g, '').trim(); //Remove multiple splaces text=text.replace(/ +(?= )/g,''); return(text); } //Remove Special Chracters function removeSpecialChars(str) { return str.replace(/(?!\w|\s)./g, '') .replace(/\s+/g, ' ') .replace(/^(\s*)([\W\w]*)(\b\s*$)/g, '$2'); } //Get X Path of select Object function getXpath(element) { var xPath, element_sibling, siblingTagName, siblings, cnt, sibling_count; var ELEMENT_NODE = 1 var elementTagName = element.tagName.toLowerCase(); if (element.id != '') { return 'id("' + element.id + '")'; // alternative : // return '*[@id="' + element.id + '"]'; } else if (element.name && document.getElementsByName(element.name).length === 1) { return '//' + elementTagName + '[@name="' + element.name + '"]'; } if (element === document.body) { return '/html/' + elementTagName; } sibling_count = 0; siblings = element.parentNode.childNodes; siblings_length = siblings.length; for (cnt = 0; cnt < siblings_length; cnt++) { var element_sibling = siblings[cnt]; if (element_sibling.nodeType !== ELEMENT_NODE) { // not ELEMENT_NODE continue; } if (element_sibling === element) { return getXpath(element.parentNode) + '/' + elementTagName + '[' + (sibling_count + 1) + ']'; } if (element_sibling.nodeType === 1 && element_sibling.tagName.toLowerCase() === elementTagName) { sibling_count++; } } return xPath; }; //Temp FrameCheck in place for now function checkIfinFrame(element){ if(element.ownerDocument !== document) { //return element.ownerDocument return "someiframe" //For Testing }else{ return '' } } function getCssSelector(element) { var ELEMENT_NODE = 1; if (!(element instanceof Element)) return; var path = []; while (element.nodeType === ELEMENT_NODE) { var selector = element.nodeName.toLowerCase(); if (element.id)
{ if (element.id.indexOf('-') > -1) { selector += '[id = "' + element.id + '"]'; } else { selector += '#' + element.id; } path.unshift(selector); break; }
conditional_block
10162017.js
(); //jq("#ATOMspyPopUpDiv").attr('obj-prop') //showAlert("Message","Object Added to Repository"); }); //Attach Event listner on in and out from a given element if (document.addEventListener) { document.addEventListener("mouseover", spyMouseOver, true); document.addEventListener("mouseout", spyMouseOut, true); } else if (document.attachEvent) { document.attachEvent("mouseover", spyMouseOver, true); document.attachEvent("mouseout", spyMouseOut, true); } } function spyMouseOver(e) { var element = e.target; e.stopPropagation(); //Stop Propagation is not really Working as it too late by now if (window.getComputedStyle(element).display != 'none') { if (window.getComputedStyle(element).visibility != 'hidden') { if (element.getBoundingClientRect().width > 0) { if (!(jq(element).get(0).tagName == 'BODY')) { if (!(jq(element).get(0).tagName == 'HTML')) { if (!(jq(element).hasClass('ignrPopUp'))) { //Get the tool tip container width adn height var eleRect = jq(element).get(0).getBoundingClientRect(); var doc = document.documentElement; var scrollOffSetLeft = (window.pageXOffset || doc.scrollLeft) - (doc.clientLeft || 0); var scrollOffSetTop = (window.pageYOffset || doc.scrollTop) - (doc.clientTop || 0); var elementHeight = eleRect.height; //elementHeight=elementHeight-scrollOffSetTop var elementWidth = eleRect.width; //elementWidth=elementWidth-scrollOffSetLeft var elementleft = eleRect.left; var elementRight = eleRect.right; var elementTop = eleRect.top; //elementTop=elementTop-scrollOffSetTop var elementBottom = eleRect.bottom; var offsetWidth if (elementWidth / jq(document).width() > 0.5) { //alert(jq(document).width()/elementWidth); offsetWidth = elementWidth / 2 } else { offsetWidth = elementWidth; } var offsetHeight = 10; var toolTipWidth = jq("#ATOMspyPopUpDiv").width(); var toolTipHeight = jq("#ATOMspyPopUpDiv").height(); //Get the HTML document width and height var documentWidth = jq(document).width(); var documentHeight = jq(document).height(); //var top = jq(element).offset().top; var top = elementTop if (top + toolTipHeight > documentHeight) { // flip the tool tip position to the top of the object // so it won't go out of the current Html document height // and show up in the correct place top = documentHeight - toolTipHeight - offsetHeight - (2 * elementHeight); } else if (elementWidth / jq(document).width() > 0.5) { top = elementBottom; } if (scrollOffSetTop>0) { top=top+scrollOffSetTop } //set the left and right position of the tool tip var left = jq(element).offset().left + offsetWidth; if (left + toolTipWidth > documentWidth) { // shift the tool tip position to the left of the object // so it won't go out of width of current HTML document width // and show up in the correct place left = documentWidth - toolTipWidth - (2 * offsetWidth); } if (scrollOffSetLeft>0) { left=left+scrollOffSetLeft } element.style.outline = '3px solid black'; //var top=element.getBoundingClientRect().top; //var right=element.getBoundingClientRect().right; var title = jq(element).get(0).tagName; jq("#ATOMspyPopUpDiv").show(); var oBType = toCamelCase(getObjectType(element)); jq("#SpyObjTypeVal").text(' ' + oBType); // jq("#ATOMspyPopUpDiv").css({ top: top, left: right, position:'absolute', 'zIndex': "99999999" }); jq("#ATOMspyPopUpDiv").css({ top: top, left: left, position: 'absolute', 'zIndex': "99999999" }); //Set Place Holder i.e. default jq("#objSpyObjName").attr("placeholder", "Object Name"); //Set Object name var objValToSet=genObjName(element, oBType); if (objValToSet != ''){ var tmpNm=genObjName(element, oBType) if (tmpNm.length >20){ tmpNm=tmpNm.substring(0, 20); } jq("#objSpyObjName").val(tmpNm); } //Call to get properties var rcvdProp = getDfltObjPrpties(element, oBType); jq("#ATOMspyPopUpDiv").attr('obj-prop', JSON.stringify(rcvdProp)); } } } //if (!(jq(element).get(0).tagName =='BODY')) } } } } //function spyMouseOver(e function attachSpyPanel() { jq('<div class="spypopup ignrPopUp" id="ATOMspyPopUpDiv" style="display: none;"><span id="SpyObjType" class="ignrPopUp"><strong class="ignrPopUp">Object Type:</strong></span><span id="SpyObjTypeVal" class="ignrPopUp"></span><br><a class="spyclosebttn ignrPopUp" id="spyCloseBtton" href="javascript:void(0)" >x</a><label class="ignrPopUp" for="ObjName"><strong class="ignrPopUp">Object Name:</strong></label><br><input class="ignrPopUp" type="text" name="objName" value="" id="objSpyObjName" placeholder="Object Name"><br><input class="ignrPopUp" type="checkbox" id="spyAddtoTstChkBx" name="addToTest" value="X" />Add to Test <br /><input class="ignrPopUp" type="submit" name="" value="Add" id="spySubmit"> </div>') .appendTo('body') .fadeIn('slow'); jq("#ATOMspyPopUpDiv").hide(); //Attach Delayed binding for Close Button jq('#spyCloseBtton').on("click", function(){ jq("#ATOMspyPopUpDiv").hide(); }); } function showAlert(Header, Message) { var sHeader, sMessage sHeader=Header; sMessage=Message; if (Header==''){ sHeader = "Object Added" } if (Message==''){ sMessage = "Hover over any object to continue..." } jq('<div class="spy-BoldPop ignrPopUp" id="spyAlert" style="font-family:sans-serif;margin: 0px auto;width: 240px;height: 90px;box-shadow: 1px 2px 8px 2px rgba(0,0,0,0.25);border-radius: 10px;border: 1px solid #888;zIndex:9999999999"><div class="spy-BoldContent ignrPopUp" style="width: 220px;text-align: center;float: left;padding: 10px;background-color: grey;border-radius: 10px 10px 0px 0px;"><h1 class="ignrPopUp" style="font-size: 20px;color: #FFF;padding: 0px;margin: 0px;font-weight: 300;">' + sHeader + '</h1></div><div class="spy-BoldBody ignrPopUp" style="width: 100%;margin: 10px auto; float: left;"><p class="ignrPopUp" style="padding: 5px;margin: 0px;font-size: 14px;text-align: center;color: #111;">' + sMessage + '</p></div>') .appendTo('body'); setTimeout(function() { if (jq('#spyAlert').length > 0) { jq('#spyAlert').remove(); } }, 1200) } function
() { //jq(".ignrPopUp").detach(); jq("#ATOMspyPopUpDiv").hide(); } function spyMouseOut(e) { var element = e.target; e.stopPropagation(); //jq("#ATOMspyPopUpDiv").hide(); element.style.outline = '' } function getObjectType(object) { var title = jq(object).get(0).tagName.toLowerCase(); switch (title) { case "a": return ('Link'); break; case "button": return ('Button'); break; case "caption": case "table": case "caption": case "tbody": case "th": case "tfoot": case "td": case "tr": return ('Table
removeSpyPanel
identifier_name
10162017.js
//jq("#ATOMspyPopUpDiv").attr('obj-prop') //showAlert("Message","Object Added to Repository"); }); //Attach Event listner on in and out from a given element if (document.addEventListener) { document.addEventListener("mouseover", spyMouseOver, true); document.addEventListener("mouseout", spyMouseOut, true); } else if (document.attachEvent) { document.attachEvent("mouseover", spyMouseOver, true); document.attachEvent("mouseout", spyMouseOut, true); } } function spyMouseOver(e) { var element = e.target; e.stopPropagation(); //Stop Propagation is not really Working as it too late by now if (window.getComputedStyle(element).display != 'none') { if (window.getComputedStyle(element).visibility != 'hidden') { if (element.getBoundingClientRect().width > 0) { if (!(jq(element).get(0).tagName == 'BODY')) { if (!(jq(element).get(0).tagName == 'HTML')) { if (!(jq(element).hasClass('ignrPopUp'))) { //Get the tool tip container width adn height var eleRect = jq(element).get(0).getBoundingClientRect(); var doc = document.documentElement; var scrollOffSetLeft = (window.pageXOffset || doc.scrollLeft) - (doc.clientLeft || 0); var scrollOffSetTop = (window.pageYOffset || doc.scrollTop) - (doc.clientTop || 0); var elementHeight = eleRect.height; //elementHeight=elementHeight-scrollOffSetTop var elementWidth = eleRect.width; //elementWidth=elementWidth-scrollOffSetLeft var elementleft = eleRect.left; var elementRight = eleRect.right; var elementTop = eleRect.top; //elementTop=elementTop-scrollOffSetTop var elementBottom = eleRect.bottom; var offsetWidth if (elementWidth / jq(document).width() > 0.5) { //alert(jq(document).width()/elementWidth); offsetWidth = elementWidth / 2 } else { offsetWidth = elementWidth; } var offsetHeight = 10; var toolTipWidth = jq("#ATOMspyPopUpDiv").width(); var toolTipHeight = jq("#ATOMspyPopUpDiv").height(); //Get the HTML document width and height var documentWidth = jq(document).width(); var documentHeight = jq(document).height(); //var top = jq(element).offset().top; var top = elementTop if (top + toolTipHeight > documentHeight) { // flip the tool tip position to the top of the object // so it won't go out of the current Html document height // and show up in the correct place top = documentHeight - toolTipHeight - offsetHeight - (2 * elementHeight); } else if (elementWidth / jq(document).width() > 0.5) { top = elementBottom; } if (scrollOffSetTop>0) { top=top+scrollOffSetTop } //set the left and right position of the tool tip var left = jq(element).offset().left + offsetWidth; if (left + toolTipWidth > documentWidth) { // shift the tool tip position to the left of the object // so it won't go out of width of current HTML document width // and show up in the correct place left = documentWidth - toolTipWidth - (2 * offsetWidth); } if (scrollOffSetLeft>0) { left=left+scrollOffSetLeft } element.style.outline = '3px solid black'; //var top=element.getBoundingClientRect().top; //var right=element.getBoundingClientRect().right; var title = jq(element).get(0).tagName; jq("#ATOMspyPopUpDiv").show(); var oBType = toCamelCase(getObjectType(element)); jq("#SpyObjTypeVal").text(' ' + oBType); // jq("#ATOMspyPopUpDiv").css({ top: top, left: right, position:'absolute', 'zIndex': "99999999" }); jq("#ATOMspyPopUpDiv").css({ top: top, left: left, position: 'absolute', 'zIndex': "99999999" }); //Set Place Holder i.e. default jq("#objSpyObjName").attr("placeholder", "Object Name"); //Set Object name var objValToSet=genObjName(element, oBType); if (objValToSet != ''){ var tmpNm=genObjName(element, oBType) if (tmpNm.length >20){ tmpNm=tmpNm.substring(0, 20); } jq("#objSpyObjName").val(tmpNm); } //Call to get properties var rcvdProp = getDfltObjPrpties(element, oBType); jq("#ATOMspyPopUpDiv").attr('obj-prop', JSON.stringify(rcvdProp)); } } } //if (!(jq(element).get(0).tagName =='BODY')) } } } } //function spyMouseOver(e function attachSpyPanel() { jq('<div class="spypopup ignrPopUp" id="ATOMspyPopUpDiv" style="display: none;"><span id="SpyObjType" class="ignrPopUp"><strong class="ignrPopUp">Object Type:</strong></span><span id="SpyObjTypeVal" class="ignrPopUp"></span><br><a class="spyclosebttn ignrPopUp" id="spyCloseBtton" href="javascript:void(0)" >x</a><label class="ignrPopUp" for="ObjName"><strong class="ignrPopUp">Object Name:</strong></label><br><input class="ignrPopUp" type="text" name="objName" value="" id="objSpyObjName" placeholder="Object Name"><br><input class="ignrPopUp" type="checkbox" id="spyAddtoTstChkBx" name="addToTest" value="X" />Add to Test <br /><input class="ignrPopUp" type="submit" name="" value="Add" id="spySubmit"> </div>') .appendTo('body') .fadeIn('slow'); jq("#ATOMspyPopUpDiv").hide(); //Attach Delayed binding for Close Button jq('#spyCloseBtton').on("click", function(){ jq("#ATOMspyPopUpDiv").hide(); }); } function showAlert(Header, Message) { var sHeader, sMessage sHeader=Header; sMessage=Message; if (Header==''){ sHeader = "Object Added" } if (Message==''){ sMessage = "Hover over any object to continue..." } jq('<div class="spy-BoldPop ignrPopUp" id="spyAlert" style="font-family:sans-serif;margin: 0px auto;width: 240px;height: 90px;box-shadow: 1px 2px 8px 2px rgba(0,0,0,0.25);border-radius: 10px;border: 1px solid #888;zIndex:9999999999"><div class="spy-BoldContent ignrPopUp" style="width: 220px;text-align: center;float: left;padding: 10px;background-color: grey;border-radius: 10px 10px 0px 0px;"><h1 class="ignrPopUp" style="font-size: 20px;color: #FFF;padding: 0px;margin: 0px;font-weight: 300;">' + sHeader + '</h1></div><div class="spy-BoldBody ignrPopUp" style="width: 100%;margin: 10px auto; float: left;"><p class="ignrPopUp" style="padding: 5px;margin: 0px;font-size: 14px;text-align: center;color: #111;">' + sMessage + '</p></div>') .appendTo('body'); setTimeout(function() { if (jq('#spyAlert').length > 0) { jq('#spyAlert').remove(); } }, 1200) } function removeSpyPanel()
function spyMouseOut(e) { var element = e.target; e.stopPropagation(); //jq("#ATOMspyPopUpDiv").hide(); element.style.outline = '' } function getObjectType(object) { var title = jq(object).get(0).tagName.toLowerCase(); switch (title) { case "a": return ('Link'); break; case "button": return ('Button'); break; case "caption": case "table": case "caption": case "tbody": case "th": case "tfoot": case "td": case "tr": return ('Table
{ //jq(".ignrPopUp").detach(); jq("#ATOMspyPopUpDiv").hide(); }
identifier_body
10162017.js
(); //jq("#ATOMspyPopUpDiv").attr('obj-prop') //showAlert("Message","Object Added to Repository"); }); //Attach Event listner on in and out from a given element if (document.addEventListener) { document.addEventListener("mouseover", spyMouseOver, true); document.addEventListener("mouseout", spyMouseOut, true); } else if (document.attachEvent) { document.attachEvent("mouseover", spyMouseOver, true); document.attachEvent("mouseout", spyMouseOut, true); } } function spyMouseOver(e) { var element = e.target; e.stopPropagation(); //Stop Propagation is not really Working as it too late by now if (window.getComputedStyle(element).display != 'none') { if (window.getComputedStyle(element).visibility != 'hidden') { if (element.getBoundingClientRect().width > 0) { if (!(jq(element).get(0).tagName == 'BODY')) { if (!(jq(element).get(0).tagName == 'HTML')) { if (!(jq(element).hasClass('ignrPopUp'))) { //Get the tool tip container width adn height var eleRect = jq(element).get(0).getBoundingClientRect(); var doc = document.documentElement; var scrollOffSetLeft = (window.pageXOffset || doc.scrollLeft) - (doc.clientLeft || 0); var scrollOffSetTop = (window.pageYOffset || doc.scrollTop) - (doc.clientTop || 0); var elementHeight = eleRect.height; //elementHeight=elementHeight-scrollOffSetTop var elementWidth = eleRect.width; //elementWidth=elementWidth-scrollOffSetLeft var elementleft = eleRect.left; var elementRight = eleRect.right; var elementTop = eleRect.top; //elementTop=elementTop-scrollOffSetTop var elementBottom = eleRect.bottom; var offsetWidth if (elementWidth / jq(document).width() > 0.5) { //alert(jq(document).width()/elementWidth); offsetWidth = elementWidth / 2 } else { offsetWidth = elementWidth; } var offsetHeight = 10; var toolTipWidth = jq("#ATOMspyPopUpDiv").width(); var toolTipHeight = jq("#ATOMspyPopUpDiv").height(); //Get the HTML document width and height var documentWidth = jq(document).width(); var documentHeight = jq(document).height(); //var top = jq(element).offset().top; var top = elementTop if (top + toolTipHeight > documentHeight) { // flip the tool tip position to the top of the object // so it won't go out of the current Html document height // and show up in the correct place top = documentHeight - toolTipHeight - offsetHeight - (2 * elementHeight); } else if (elementWidth / jq(document).width() > 0.5) { top = elementBottom; } if (scrollOffSetTop>0) { top=top+scrollOffSetTop } //set the left and right position of the tool tip var left = jq(element).offset().left + offsetWidth; if (left + toolTipWidth > documentWidth) { // shift the tool tip position to the left of the object // so it won't go out of width of current HTML document width // and show up in the correct place left = documentWidth - toolTipWidth - (2 * offsetWidth); } if (scrollOffSetLeft>0) { left=left+scrollOffSetLeft } element.style.outline = '3px solid black'; //var top=element.getBoundingClientRect().top; //var right=element.getBoundingClientRect().right; var title = jq(element).get(0).tagName; jq("#ATOMspyPopUpDiv").show(); var oBType = toCamelCase(getObjectType(element)); jq("#SpyObjTypeVal").text(' ' + oBType); // jq("#ATOMspyPopUpDiv").css({ top: top, left: right, position:'absolute', 'zIndex': "99999999" }); jq("#ATOMspyPopUpDiv").css({ top: top, left: left, position: 'absolute', 'zIndex': "99999999" }); //Set Place Holder i.e. default jq("#objSpyObjName").attr("placeholder", "Object Name"); //Set Object name var objValToSet=genObjName(element, oBType); if (objValToSet != ''){ var tmpNm=genObjName(element, oBType) if (tmpNm.length >20){ tmpNm=tmpNm.substring(0, 20); } jq("#objSpyObjName").val(tmpNm); } //Call to get properties var rcvdProp = getDfltObjPrpties(element, oBType); jq("#ATOMspyPopUpDiv").attr('obj-prop', JSON.stringify(rcvdProp)); } } } //if (!(jq(element).get(0).tagName =='BODY')) } } } } //function spyMouseOver(e function attachSpyPanel() { jq('<div class="spypopup ignrPopUp" id="ATOMspyPopUpDiv" style="display: none;"><span id="SpyObjType" class="ignrPopUp"><strong class="ignrPopUp">Object Type:</strong></span><span id="SpyObjTypeVal" class="ignrPopUp"></span><br><a class="spyclosebttn ignrPopUp" id="spyCloseBtton" href="javascript:void(0)" >x</a><label class="ignrPopUp" for="ObjName"><strong class="ignrPopUp">Object Name:</strong></label><br><input class="ignrPopUp" type="text" name="objName" value="" id="objSpyObjName" placeholder="Object Name"><br><input class="ignrPopUp" type="checkbox" id="spyAddtoTstChkBx" name="addToTest" value="X" />Add to Test <br /><input class="ignrPopUp" type="submit" name="" value="Add" id="spySubmit"> </div>') .appendTo('body') .fadeIn('slow'); jq("#ATOMspyPopUpDiv").hide(); //Attach Delayed binding for Close Button jq('#spyCloseBtton').on("click", function(){ jq("#ATOMspyPopUpDiv").hide(); }); } function showAlert(Header, Message) { var sHeader, sMessage sHeader=Header; sMessage=Message; if (Header==''){ sHeader = "Object Added" } if (Message==''){ sMessage = "Hover over any object to continue..." } jq('<div class="spy-BoldPop ignrPopUp" id="spyAlert" style="font-family:sans-serif;margin: 0px auto;width: 240px;height: 90px;box-shadow: 1px 2px 8px 2px rgba(0,0,0,0.25);border-radius: 10px;border: 1px solid #888;zIndex:9999999999"><div class="spy-BoldContent ignrPopUp" style="width: 220px;text-align: center;float: left;padding: 10px;background-color: grey;border-radius: 10px 10px 0px 0px;"><h1 class="ignrPopUp" style="font-size: 20px;color: #FFF;padding: 0px;margin: 0px;font-weight: 300;">' + sHeader + '</h1></div><div class="spy-BoldBody ignrPopUp" style="width: 100%;margin: 10px auto; float: left;"><p class="ignrPopUp" style="padding: 5px;margin: 0px;font-size: 14px;text-align: center;color: #111;">' + sMessage + '</p></div>') .appendTo('body'); setTimeout(function() { if (jq('#spyAlert').length > 0) { jq('#spyAlert').remove(); } }, 1200) } function removeSpyPanel() { //jq(".ignrPopUp").detach(); jq("#ATOMspyPopUpDiv").hide(); } function spyMouseOut(e) { var element = e.target; e.stopPropagation(); //jq("#ATOMspyPopUpDiv").hide(); element.style.outline = ''
function getObjectType(object) { var title = jq(object).get(0).tagName.toLowerCase(); switch (title) { case "a": return ('Link'); break; case "button": return ('Button'); break; case "caption": case "table": case "caption": case "tbody": case "th": case "tfoot": case "td": case "tr": return ('Table
}
random_line_split
utils.ts
('focus', focusHandler, true); // } } /** * Sets the selected item in the dropdown menu * of available loadedListItems. * * @param {object} list * @param {object} item */ export function scrollActiveOption(list, item) { let y, height_menu, height_item, scroll, scroll_top, scroll_bottom; if (item) { height_menu = list.offsetHeight; height_item = getWidthOrHeight(item, 'height', 'margin'); //outerHeight(true); scroll = list.scrollTop || 0; y = getOffset(item).top - getOffset(list).top + scroll; scroll_top = y; scroll_bottom = y - height_menu + height_item; //TODO Make animation if (y + height_item > height_menu + scroll) { list.scrollTop = scroll_bottom; } else if (y < scroll) { list.scrollTop = scroll_top; } } } // Used for matching numbers const core_pnum = /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source; const rnumnonpx = new RegExp("^(" + core_pnum + ")(?!px)[a-z%]+$", "i"); function augmentWidthOrHeight(elem, name, extra, isBorderBox, styles) { let i = extra === (isBorderBox ? 'border' : 'content') ? // If we already have the right measurement, avoid augmentation 4 : // Otherwise initialize for horizontal or vertical properties name === 'width' ? 1 : 0, val = 0, cssExpand = ['Top', 'Right', 'Bottom', 'Left']; //TODO Use angular.element.css instead of getStyleValue after https://github.com/caitp/angular.js/commit/92bbb5e225253ebddd38ef5735d66ffef76b6a14 will be applied function getStyleValue(name) { return parseFloat(styles[name]); } for (; i < 4; i += 2) { // both box models exclude margin, so add it if we want it if (extra === 'margin') { val += getStyleValue(extra + cssExpand[i]); } if (isBorderBox) { // border-box includes padding, so remove it if we want content if (extra === 'content') { val -= getStyleValue('padding' + cssExpand[i]); } // at this point, extra isn't border nor margin, so remove border if (extra !== 'margin') { val -= getStyleValue('border' + cssExpand[i] + 'Width'); } } else { val += getStyleValue('padding' + cssExpand[i]); // at this point, extra isn't content nor padding, so add border if (extra !== 'padding') { val += getStyleValue('border' + cssExpand[i] + 'Width'); } } } return val; } function getOffset(elem) { let docElem, win, box = elem.getBoundingClientRect(), doc = elem && elem.ownerDocument; if (!doc)
docElem = doc.documentElement; win = getWindow(doc); return { top: box.top + win.pageYOffset - docElem.clientTop, left: box.left + win.pageXOffset - docElem.clientLeft }; } function getWindow(elem) { return elem != null && elem === elem.window ? elem : elem.nodeType === 9 && elem.defaultView; } function getWidthOrHeight(elem, name, extra) { // Start with offset property, which is equivalent to the border-box selectedItems let valueIsBorderBox = true, val = name === 'width' ? elem.offsetWidth : elem.offsetHeight, styles = window.getComputedStyle(elem, null), //TODO Make isBorderBox after https://github.com/caitp/angular.js/commit/92bbb5e225253ebddd38ef5735d66ffef76b6a14 will be applied isBorderBox = false; //jQuery.support.boxSizing && jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; // some non-html elements return undefined for offsetWidth, so check for null/undefined // svg - https://bugzilla.mozilla.org/show_bug.cgi?id=649285 // MathML - https://bugzilla.mozilla.org/show_bug.cgi?id=491668 if (val <= 0 || val == null) { // Fall back to computed then uncomputed css if necessary val = styles[name]; if (val < 0 || val == null) { val = elem.style[name]; } // Computed unit is not pixels. Stop here and return. if (rnumnonpx.test(val)) { return val; } // we need the check for style in case a browser which returns unreliable values // for getComputedStyle silently falls back to the reliable elem.style //valueIsBorderBox = isBorderBox && ( jQuery.support.boxSizingReliable || val === elem.style[ name ] ); // Normalize "", auto, and prepare for extra val = parseFloat(val) || 0; } // use the active box-sizing model to add/subtract irrelevant styles return val + augmentWidthOrHeight(elem, name, extra || ( isBorderBox ? "border" : "content" ), valueIsBorderBox, styles); } /** * Calculate free space for menu and return true if need to change menu direction * @param toggleElement * @param menuElement * @param defaultMenuHeightPx * @returns {boolean} */ export function hasNoSpaceBelowForMenu(toggleElement, menuElement, defaultMenuHeightPx = 100) { const spaceAbove = toggleElement.getBoundingClientRect().top; const spaceBelow = window.innerHeight - toggleElement.getBoundingClientRect().bottom; const maxMenuHeight = parseInt(window.getComputedStyle(menuElement)['max-height']) || defaultMenuHeightPx; return spaceBelow < maxMenuHeight && spaceBelow < spaceAbove; } export function groupsIsEmpty(groups) { for (let k in groups) { if (groups.hasOwnProperty(k) && groups[k].length) { return false; } } return true; } /** * Find array intersections * Equal of lodash _.intersection + getter + invert * * @param {any[]} xArr * @param {any[]} yArr * @param {Function} getter * @param {boolean} invert * @returns {any[]} */ export function intersection(xArr: any[], yArr: any[], getter?: Function, invert?: boolean): any[] { let i, j, n, filteredX, filteredY, out: any[] = invert ? xArr.slice() : []; for (i = 0, n = xArr.length; i < xArr.length; i++) { filteredX = getter ? getter(xArr[i]) : xArr[i]; for (j = 0; j < yArr.length; j++) { filteredY = getter ? getter(yArr[j]) : yArr[j]; if (deepEqual(filteredX, filteredY)) { invert ? out.splice(i + out.length - n, 1) : out.push(yArr[j]); break; } } } return out; } /** * Deep comparing of two values * * @param actual * @param expected * @param {boolean} strict * @returns {any} */ function deepEqual(actual, expected, strict = true) { if (actual === expected) { return true; } else if (actual instanceof Date && expected instanceof Date) { return actual.getTime() === expected.getTime(); } else if (!actual || !expected || typeof actual != 'object' && typeof expected != 'object') { return strict ? actual === expected : actual == expected; } else { return objEqual(actual, expected, strict); } } /** * Deep comparison of two objects * * @param a * @param b * @param strict * @returns {boolean} */ function objEqual(a, b, strict) { let i, key; if (a == null || b == null) { return false; } if (a.prototype !== b.prototype) return false; try { let ka = Object.keys(a), kb = Object.keys(b); if (ka.length !== kb.length) return false; ka.sort(); kb.sort(); //cheap key test for (i = ka.length - 1; i >= 0; i--) { if (ka[i] != kb[i]) return false; } //possibly expensive deep test for (i = ka.length - 1; i >= 0; i--) { key = ka[i]; if (!deepEqual(a[key], b[key], strict)) return false; } return typeof a === typeof b; } catch (e) {//happens when one is a string literal and the other isn't return false; } } function toString(value) { return String(value !== void 0 ? value : ''); } // todo: remove excess chars const rEscapableCharacters = /[-\/\\^$*+?.()|[\]{}]/
{ return; }
conditional_block
utils.ts
); } else { output3.push(item); } } output = output2.concat(output3); } } else { output = [].concat(items); } if (sort) { output = output.sort((A, B) => toString(getLabel(A)).localeCompare(toString(getLabel(B))) * orderFactor); } return output; } /** * Transform flat loadedListItems array to groupped object * * Example: * const loadedListItems = [{t: 'A', g: 'x'}, {t: 'B', g: 'x'}, {t: 'C'}] * const getter = (option) => option.g * * groupOptions(loadedListItems, getter) * // Result: {'x': [{t: 'A', g: 'x'}, {t: 'B', g: 'x'}], '': [{t: 'C'}]} * * @param {Array} options * @param {(option) => string} groupNameGetter * @returns {{: Array}} */ export function distributeOptionsByGroup(options = [], groupNameGetter = (item) => '') { let optionGroups = {'':[]}, optionGroupName, optionGroup; for (let i = 0; i < options.length; i++) { optionGroupName = groupNameGetter(options[i]) || ''; if (!(optionGroup = optionGroups[optionGroupName])) { optionGroup = optionGroups[optionGroupName] = []; } optionGroup.push(options[i]); } return optionGroups; } export function findIndex(items = [], item, trackByGetter = (item) => item) { for (let i = 0; i < items.length; i++) { if (trackByGetter(items[i]) === trackByGetter(item)) { return i; } } } export function removeChildren(element) { while (element.firstChild) { element.removeChild(element.firstChild); } } /** * * @param {HTMLElement} containerElement * @param {any[]} newItems * @param {(item) => Element} elementConstructor * @param {Function} trackFieldGetter * @param {boolean} appendUndefinedItems - place undefined items to the end of list (they prepend by default) * @returns {HTMLElement} containerElement with changes */ export function updateElements(containerElement: HTMLElement, newItems: any[], elementConstructor: (item) => Element, trackFieldGetter?: Function, appendUndefinedItems?: boolean) { const elementsArr = Array.from(containerElement.children); const track = (item) => { const id = item !== undefined && (trackFieldGetter ? trackFieldGetter(item) : item); if (id || id === 0) return id; }; const uncountableElementId = new Error('Uncountable element'); // We use this id for interface (no data) elements let newItemIds, itemsMap, oldItemIds; if (trackFieldGetter) { oldItemIds = elementsArr.map((element: any) => element.hasOwnProperty('data') ? track(element.data) : uncountableElementId); newItemIds = []; itemsMap = new Map(); //trackFieldGetter can return original item by default newItems.forEach(item => { const itemId = track(item); itemsMap.set(itemId, item); // Save item for fast later retrieval newItemIds.push(itemId); }); } else { oldItemIds = elementsArr.map((element: any) => element.hasOwnProperty('data') ? element.data : uncountableElementId); newItemIds = newItems.slice(); } // Make undefined items for elements which are out of the list (have no data) oldItemIds.forEach((oldElementId: any) => { if (oldElementId === uncountableElementId) { newItemIds[appendUndefinedItems ? 'push' : 'unshift'](uncountableElementId); } else { appendUndefinedItems = true; } }); const instructions = myer.diff(oldItemIds, newItemIds); const operations = instructions.map(args => { //convert id to element for insert operations if (args.hasOwnProperty(2)) { //if we have ids for new elements return args.map((arg, i) => i < 2 ? arg : elementConstructor(itemsMap ? itemsMap.get(arg) : arg)); } return args; }); operations.forEach(operation => { if (operation.hasOwnProperty(1)) { removeElements(containerElement, operation[0], operation[1]); } if (operation.hasOwnProperty(2)) { addElements(containerElement, operation[0], operation.slice(2)) } }); return containerElement; } function removeElements(containerElement, startIndex, amount) { const children = containerElement.children; for (let i = 0; i < amount; i++) { children[startIndex].remove(); } } function addElements(containerElement, startIndex, newElements) { const children = containerElement.children; if (startIndex) { children[startIndex - 1].after.apply(children[startIndex - 1], newElements); } else { containerElement.prepend.apply(containerElement, newElements) } } /** * Replace field value in deep object * * @param oldVal * @param newVal * @param {Object} object * @returns {{} & Object} */ export function deepReplace(oldVal: any, newVal: any, object: object) { const newObject = copy(object); Object.keys(object).forEach(key => { const val = object[key]; if (val === oldVal) { newObject[key] = newVal; } else if (val != null && typeof val === 'object') { newObject[key] = deepReplace(oldVal, newVal, val); } }); return newObject; } /** * Deep copy * * @param {Object} obj * @returns {Array | {}} */ function copy(obj: object) { const clone = {}; for(let i in obj) { if (obj[i] != null && typeof obj[i] === 'object') { clone[i] = copy(obj[i]); } else { clone[i] = obj[i]; } } return clone; } /** * Find value in object by path * * Example: * deepFind({a: {b: 1}}, 'a.b') //1 * * @param {Object} obj * @param {string} path * @param {boolean} originalIfNotFound * @returns {any} */ export function deepFind(obj: any, path: string, originalIfNotFound?: boolean) { if (!path || typeof obj !== 'object') return originalIfNotFound ? obj : undefined; const paths = path.split('.'); let i, current = obj; for (i = 0; i < paths.length; ++i) { if (current[paths[i]] == undefined) { return undefined; } else { current = current[paths[i]]; } } return current; } /** * Highlight `substr` in `str` by `<mark>` or custom tag * * @param {string} str * @param {string} substr * @param {string} tagName. `mark` by default * @returns {string} highlighted string */ export function highlight(str: string = '', substr: string = '', tagName?: string) { const tagTemplate = tagName ? `<${tagName}>$&</${tagName}>` : `<mark>$&</mark>`; let html = str; substr = String(substr); if (substr.length > 0) { str = String(str); substr = escapeCharacters(substr); html = str.replace(new RegExp(substr, 'gi'), tagTemplate); } return html; } /** * Debounce and extract target value from event * Useful for shadow-dom case when universal debounce works incorrect * @param fn * @param timeout * @returns {(e) => any} */ export function debounceEventValue(fn, timeout) { let timer = null; return function (e) { // Save `e.target.value` to value because `e` will be changed in shadow-dom case const value = e.target.value; const onComplete = () => { fn.call(this, value); timer = null; }; if (timer) { clearTimeout(timer); } timer = setTimeout(onComplete, timeout); }; } // Getters export function getItemsByField(fields: any, items: any[], fieldGetter: Function) { fields = Array.isArray(fields) ? fields : [fields]; return fields.map(field => { return items.find(item => fieldGetter(item) === field); }).filter(item => item); } export const noopPipe = (item?) => item; export const noop = (item?) => {}; /** * Cache value for '' query and last value */ export class QueryCache { private cache: {q: string, v: any, t: number}[] = []; get(query: string = '') { return this.getValue(this.cache.find(cacheItem => cacheItem.q === query)); } getLast() { return this.getValue(this.cache[0]); } set(query: string = '', value: any)
{ // Remove duplicates, remove all except '' this.cache = this.cache.filter(cacheItem => cacheItem.q !== query && cacheItem.q === ''); this.cache.unshift({q: query, v: value, t: (new Date().getTime())}) }
identifier_body
utils.ts
/[-\/\\^$*+?.()|[\]{}]/g; // cache escape + match String const sEscapeMatch = '\\$&'; /** * Escape special chars * @param string * @returns {string} */ function escapeCharacters(string: string) { return string.replace(rEscapableCharacters, sEscapeMatch); } /** * Filter items by comparison label (=getLabel(item)) and query * asc sorting of result * * @param items * @param query * @param {Function} getLabel * @param {{fields?: any[]; sort?: ("asc" | "desc"); strict?: boolean}} options * @returns {any[]} */ export function ascSort(items: any, query: any, getLabel: Function, options: {fields?: any[], sort?: 'asc'|'desc', strict?: boolean} = {}) { let i, j, isFound, item, output, output1 = [], output2 = [], output3 = [], sort = options.sort, strict = options.strict, orderFactor = sort === 'desc' ? -1 : 1, regExpParams = strict ? '' : 'i'; let getLabelArr: any[] = [getLabel]; if (options.fields) { getLabelArr = options.fields.map(field => typeof field === 'function' ? field : item => deepFind(item, field, true)) } getLabel = getLabelArr[0]; if (query !== '' && getLabel) { const safeQuery = escapeCharacters(toString(query)); const matchRegExp = new RegExp(safeQuery, regExpParams); const firstMatchRegExp = new RegExp('^' + safeQuery, regExpParams); // Filtering for (i = 0, isFound = false; i < items.length; i++) { item = items[i]; for (j = 0; j < getLabelArr.length; j++) { const label = getLabelArr[j](item); isFound = label === query || !strict && label == query || query !== undefined && matchRegExp.test(toString(label)); if (isFound) break; } if (isFound) { output1.push(item); } } if (sort) { output = output1; } else { // Place items starting with query on the top of list for (i = 0; i < output1.length; i++) { item = output1[i]; if (firstMatchRegExp.test(toString(getLabel(item)))) { output2.push(item); } else { output3.push(item); } } output = output2.concat(output3); } } else { output = [].concat(items); } if (sort) { output = output.sort((A, B) => toString(getLabel(A)).localeCompare(toString(getLabel(B))) * orderFactor); } return output; } /** * Transform flat loadedListItems array to groupped object * * Example: * const loadedListItems = [{t: 'A', g: 'x'}, {t: 'B', g: 'x'}, {t: 'C'}] * const getter = (option) => option.g * * groupOptions(loadedListItems, getter) * // Result: {'x': [{t: 'A', g: 'x'}, {t: 'B', g: 'x'}], '': [{t: 'C'}]} * * @param {Array} options * @param {(option) => string} groupNameGetter * @returns {{: Array}} */ export function distributeOptionsByGroup(options = [], groupNameGetter = (item) => '') { let optionGroups = {'':[]}, optionGroupName, optionGroup; for (let i = 0; i < options.length; i++) { optionGroupName = groupNameGetter(options[i]) || ''; if (!(optionGroup = optionGroups[optionGroupName])) { optionGroup = optionGroups[optionGroupName] = []; } optionGroup.push(options[i]); } return optionGroups; } export function findIndex(items = [], item, trackByGetter = (item) => item) { for (let i = 0; i < items.length; i++) { if (trackByGetter(items[i]) === trackByGetter(item)) { return i; } } } export function removeChildren(element) { while (element.firstChild) { element.removeChild(element.firstChild); } } /** * * @param {HTMLElement} containerElement * @param {any[]} newItems * @param {(item) => Element} elementConstructor * @param {Function} trackFieldGetter * @param {boolean} appendUndefinedItems - place undefined items to the end of list (they prepend by default) * @returns {HTMLElement} containerElement with changes */ export function updateElements(containerElement: HTMLElement, newItems: any[], elementConstructor: (item) => Element, trackFieldGetter?: Function, appendUndefinedItems?: boolean) { const elementsArr = Array.from(containerElement.children); const track = (item) => { const id = item !== undefined && (trackFieldGetter ? trackFieldGetter(item) : item); if (id || id === 0) return id; }; const uncountableElementId = new Error('Uncountable element'); // We use this id for interface (no data) elements let newItemIds, itemsMap, oldItemIds; if (trackFieldGetter) { oldItemIds = elementsArr.map((element: any) => element.hasOwnProperty('data') ? track(element.data) : uncountableElementId); newItemIds = []; itemsMap = new Map(); //trackFieldGetter can return original item by default newItems.forEach(item => { const itemId = track(item); itemsMap.set(itemId, item); // Save item for fast later retrieval newItemIds.push(itemId); }); } else { oldItemIds = elementsArr.map((element: any) => element.hasOwnProperty('data') ? element.data : uncountableElementId); newItemIds = newItems.slice(); } // Make undefined items for elements which are out of the list (have no data) oldItemIds.forEach((oldElementId: any) => { if (oldElementId === uncountableElementId) { newItemIds[appendUndefinedItems ? 'push' : 'unshift'](uncountableElementId); } else { appendUndefinedItems = true; } }); const instructions = myer.diff(oldItemIds, newItemIds); const operations = instructions.map(args => { //convert id to element for insert operations if (args.hasOwnProperty(2)) { //if we have ids for new elements return args.map((arg, i) => i < 2 ? arg : elementConstructor(itemsMap ? itemsMap.get(arg) : arg)); } return args; }); operations.forEach(operation => { if (operation.hasOwnProperty(1)) { removeElements(containerElement, operation[0], operation[1]); } if (operation.hasOwnProperty(2)) { addElements(containerElement, operation[0], operation.slice(2)) } }); return containerElement; } function removeElements(containerElement, startIndex, amount) { const children = containerElement.children; for (let i = 0; i < amount; i++) { children[startIndex].remove(); } } function addElements(containerElement, startIndex, newElements) { const children = containerElement.children; if (startIndex) { children[startIndex - 1].after.apply(children[startIndex - 1], newElements); } else { containerElement.prepend.apply(containerElement, newElements) } } /** * Replace field value in deep object * * @param oldVal * @param newVal * @param {Object} object * @returns {{} & Object} */ export function deepReplace(oldVal: any, newVal: any, object: object) { const newObject = copy(object); Object.keys(object).forEach(key => { const val = object[key]; if (val === oldVal) { newObject[key] = newVal; } else if (val != null && typeof val === 'object') { newObject[key] = deepReplace(oldVal, newVal, val); } }); return newObject; } /** * Deep copy * * @param {Object} obj * @returns {Array | {}} */ function copy(obj: object) { const clone = {}; for(let i in obj) { if (obj[i] != null && typeof obj[i] === 'object') { clone[i] = copy(obj[i]); } else { clone[i] = obj[i]; } } return clone; } /** * Find value in object by path * * Example: * deepFind({a: {b: 1}}, 'a.b') //1 * * @param {Object} obj * @param {string} path * @param {boolean} originalIfNotFound * @returns {any} */ export function deepFind(obj: any, path: string, originalIfNotFound?: boolean) { if (!path || typeof obj !== 'object') return originalIfNotFound ? obj : undefined; const paths = path.split('.'); let i, current = obj;
for (i = 0; i < paths.length; ++i) { if (current[paths[i]] == undefined) { return undefined;
random_line_split
utils.ts
.removeEventListener('focus', focusHandler, true); // } } /** * Sets the selected item in the dropdown menu * of available loadedListItems. * * @param {object} list * @param {object} item */ export function scrollActiveOption(list, item) { let y, height_menu, height_item, scroll, scroll_top, scroll_bottom; if (item) { height_menu = list.offsetHeight; height_item = getWidthOrHeight(item, 'height', 'margin'); //outerHeight(true); scroll = list.scrollTop || 0; y = getOffset(item).top - getOffset(list).top + scroll; scroll_top = y; scroll_bottom = y - height_menu + height_item; //TODO Make animation if (y + height_item > height_menu + scroll) { list.scrollTop = scroll_bottom; } else if (y < scroll) { list.scrollTop = scroll_top; } } } // Used for matching numbers const core_pnum = /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source; const rnumnonpx = new RegExp("^(" + core_pnum + ")(?!px)[a-z%]+$", "i"); function augmentWidthOrHeight(elem, name, extra, isBorderBox, styles) { let i = extra === (isBorderBox ? 'border' : 'content') ? // If we already have the right measurement, avoid augmentation 4 : // Otherwise initialize for horizontal or vertical properties name === 'width' ? 1 : 0, val = 0, cssExpand = ['Top', 'Right', 'Bottom', 'Left']; //TODO Use angular.element.css instead of getStyleValue after https://github.com/caitp/angular.js/commit/92bbb5e225253ebddd38ef5735d66ffef76b6a14 will be applied function getStyleValue(name) { return parseFloat(styles[name]); } for (; i < 4; i += 2) { // both box models exclude margin, so add it if we want it if (extra === 'margin') { val += getStyleValue(extra + cssExpand[i]); } if (isBorderBox) { // border-box includes padding, so remove it if we want content if (extra === 'content') { val -= getStyleValue('padding' + cssExpand[i]); } // at this point, extra isn't border nor margin, so remove border if (extra !== 'margin') { val -= getStyleValue('border' + cssExpand[i] + 'Width'); } } else { val += getStyleValue('padding' + cssExpand[i]); // at this point, extra isn't content nor padding, so add border if (extra !== 'padding') { val += getStyleValue('border' + cssExpand[i] + 'Width'); } } } return val; } function getOffset(elem) { let docElem, win, box = elem.getBoundingClientRect(), doc = elem && elem.ownerDocument; if (!doc) { return; } docElem = doc.documentElement; win = getWindow(doc); return { top: box.top + win.pageYOffset - docElem.clientTop, left: box.left + win.pageXOffset - docElem.clientLeft }; } function getWindow(elem) { return elem != null && elem === elem.window ? elem : elem.nodeType === 9 && elem.defaultView; } function getWidthOrHeight(elem, name, extra) { // Start with offset property, which is equivalent to the border-box selectedItems let valueIsBorderBox = true, val = name === 'width' ? elem.offsetWidth : elem.offsetHeight, styles = window.getComputedStyle(elem, null), //TODO Make isBorderBox after https://github.com/caitp/angular.js/commit/92bbb5e225253ebddd38ef5735d66ffef76b6a14 will be applied isBorderBox = false; //jQuery.support.boxSizing && jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; // some non-html elements return undefined for offsetWidth, so check for null/undefined // svg - https://bugzilla.mozilla.org/show_bug.cgi?id=649285 // MathML - https://bugzilla.mozilla.org/show_bug.cgi?id=491668 if (val <= 0 || val == null) { // Fall back to computed then uncomputed css if necessary val = styles[name]; if (val < 0 || val == null) { val = elem.style[name]; } // Computed unit is not pixels. Stop here and return. if (rnumnonpx.test(val)) { return val; } // we need the check for style in case a browser which returns unreliable values // for getComputedStyle silently falls back to the reliable elem.style //valueIsBorderBox = isBorderBox && ( jQuery.support.boxSizingReliable || val === elem.style[ name ] ); // Normalize "", auto, and prepare for extra val = parseFloat(val) || 0; } // use the active box-sizing model to add/subtract irrelevant styles return val + augmentWidthOrHeight(elem, name, extra || ( isBorderBox ? "border" : "content" ), valueIsBorderBox, styles); } /** * Calculate free space for menu and return true if need to change menu direction * @param toggleElement * @param menuElement * @param defaultMenuHeightPx * @returns {boolean} */ export function hasNoSpaceBelowForMenu(toggleElement, menuElement, defaultMenuHeightPx = 100) { const spaceAbove = toggleElement.getBoundingClientRect().top; const spaceBelow = window.innerHeight - toggleElement.getBoundingClientRect().bottom; const maxMenuHeight = parseInt(window.getComputedStyle(menuElement)['max-height']) || defaultMenuHeightPx; return spaceBelow < maxMenuHeight && spaceBelow < spaceAbove; } export function
(groups) { for (let k in groups) { if (groups.hasOwnProperty(k) && groups[k].length) { return false; } } return true; } /** * Find array intersections * Equal of lodash _.intersection + getter + invert * * @param {any[]} xArr * @param {any[]} yArr * @param {Function} getter * @param {boolean} invert * @returns {any[]} */ export function intersection(xArr: any[], yArr: any[], getter?: Function, invert?: boolean): any[] { let i, j, n, filteredX, filteredY, out: any[] = invert ? xArr.slice() : []; for (i = 0, n = xArr.length; i < xArr.length; i++) { filteredX = getter ? getter(xArr[i]) : xArr[i]; for (j = 0; j < yArr.length; j++) { filteredY = getter ? getter(yArr[j]) : yArr[j]; if (deepEqual(filteredX, filteredY)) { invert ? out.splice(i + out.length - n, 1) : out.push(yArr[j]); break; } } } return out; } /** * Deep comparing of two values * * @param actual * @param expected * @param {boolean} strict * @returns {any} */ function deepEqual(actual, expected, strict = true) { if (actual === expected) { return true; } else if (actual instanceof Date && expected instanceof Date) { return actual.getTime() === expected.getTime(); } else if (!actual || !expected || typeof actual != 'object' && typeof expected != 'object') { return strict ? actual === expected : actual == expected; } else { return objEqual(actual, expected, strict); } } /** * Deep comparison of two objects * * @param a * @param b * @param strict * @returns {boolean} */ function objEqual(a, b, strict) { let i, key; if (a == null || b == null) { return false; } if (a.prototype !== b.prototype) return false; try { let ka = Object.keys(a), kb = Object.keys(b); if (ka.length !== kb.length) return false; ka.sort(); kb.sort(); //cheap key test for (i = ka.length - 1; i >= 0; i--) { if (ka[i] != kb[i]) return false; } //possibly expensive deep test for (i = ka.length - 1; i >= 0; i--) { key = ka[i]; if (!deepEqual(a[key], b[key], strict)) return false; } return typeof a === typeof b; } catch (e) {//happens when one is a string literal and the other isn't return false; } } function toString(value) { return String(value !== void 0 ? value : ''); } // todo: remove excess chars const rEscapableCharacters = /[-\/\\^$*+?.()|[\]{}]/
groupsIsEmpty
identifier_name
tarea8.py
""" Grafica una muestra normal bivriada de tamaño n con parámetros m y sigma usando el metodo de MH con kerneles híbridos de parametro w1, junto con los contornos de nivel de la densidad correspondiente. Input: array, array, float, int (media, matriz de covarianza, probabiidad de tomar el primer Kernel, tamaño) Output: Muestra gráfica """ M1 = NormalMHMC(m,sigma,w1,n) A= M1[:,0] B= M1[:,1] x = (A).tolist() y = (B).tolist() #Scatter colors = np.arange(0, 1, 1.0/n) area = 50*np.ones(n) plt.scatter(x, y, s=area, c=colors, alpha=0.8) grafBivariada(m,sigma) plt.savefig('bivariadaMH'+str(ro)+'-'+str(n)+'.png') plt.title('Muestra de tamano '+str(n)+ ' con rho = '+ str(ro)) plt.show() def densidad2(a,l,T,b,c): n=len(T) r1=1.0 for t in T: r1 = r1 * t sa=0 for t in T: sa = sa + t**a suma=0 for t in T: suma = suma + log(t) return ((n+a-1)*log(l)) - (l*(b+sa)) + (log(b)*a ) + (log(a)*n) + (a-1)*suma - (c*a) - loggamma(a) def grafTiempos(M,T,b,c): """ Función que grafica bivariada """ A=min(0.1, min(M[:,0])) B=max(M[:,0]) C=min(0.1, min(M[:,1])) D=max(M[:,1]) #Contornos delta = 0.25 x = np.arange(A, B+ 2*delta, delta) y = np.arange(C, D+ 2*delta, delta) X, Y = np.meshgrid(x, y) Z = densidad2(X,Y,T,b,c) ma= np.amax(Z) mi= np.amin(Z) plt.figure() plt.contour(X, Y, Z, levels=np.arange(mi,ma,5)) def NormalMHMC(m,sigma,w1,tam): """ Aplica el algoritmo de Metropolis-Hastings considerando como funcion objetivo la distribución normal bivariada con Kerneles híbridos dados por las siguientes propuestas: q1 ((x01, x02)|(x1, x2)) = f_X1|X2(x01|x2)1(x02 = x2) q2 ((x01, x02)|(x1, x2)) = fX2|X1(x02|x1)1(x01 = x1) Input: media, matriz de covarianza, tamaño (array, array, int) Output: muestra (array) """ dim = 2 M = zeros((tam, dim)) m1=m[0,0] m2=m[1,0] s1=np.sqrt(sigma[0,0]) s2=np.sqrt(sigma[1,1]) ro=(sigma[0,1])/(s1*s2) #Inical. M[0,0] = m1 M[0,1] = m2 for i in range(1,tam): if Bernoulli(w1)==1: x2=M[i-1,1] M[i,0]=normal(m1+ro*s1*(x2-m2)/s2, (s1**2)*(1-ro**2)) M[i,1]=M[i-1,1] else: x1=M[i-1,0] M[i,1]=normal(m2+ro*s2*(x1-m1)/s1, (s2**2)*(1-ro**2)) M[i,0]=M[i-1,0] return M def propuesta(numP,a,l,b,c,sigma,T): """ Devuelve distintas propuestas de acuerdo a la opción selecccionada. Input: int (opción deseada) Output: float, float (propuesta, ro) """ n=len(T) if numP>4 or numP<1: raise ValueError("No conozco esa propuesta") if numP==1: sa=0 for t in T: sa = sa + t**a #propuestas lp= gamma(a + n , 1.0/(b + sa)) ap= a #rho ro = 1 return ap,lp,ro elif numP==2: r1=1.0 for t in T: r1 = r1 * t #propuestas ap = gamma(n + 1 , 1.0/(-log(b)-log(r1)+c)) lp = l #rho sap=0 for t in T: sap = sap + t**ap sa=0 for t in T: sa = sa + t**a aux = float(loggamma(a)) + (ap-a)*l - l*sap-float(loggamma(ap)) + l*sa c=min(0,aux) return ap,lp,exp(c) elif numP==3: ap = exponential(c) lp = gamma(ap , 1.0/b) #rho sap=0.0 for t in T: sap = sap + t**ap sa=0.0 for t in T: sa = sa + t**a r1=1.0 for t in T: r1= r1*t aux = n*log((ap*lp)/(a*l)) + (ap-a)*log(r1)- lp*sap + l*sa c=min(0,aux) return ap,lp,exp(c) else: ap=a + normal(0,sigma) lp= l suma=0 for t in T: suma = suma + log(t) sap=0 for t in T: sap = sap + t**ap sa=0 for t in T: sa = sa + t**a aux = ap*log(l)-l*(b+sap)+ap*log(b)+n*log(ap)+(ap-1)*suma-c*ap-float(loggamma(ap))-a*log(l)+l*(b+sa)-a*log(b)-n*log(a)-(a-1)*suma+c*a+float(loggamma(a)) c=min(0,aux) return ap,lp,exp(c) def TiemposMHMC(c,b,sigma,tam,T): """ Aplica el algoritmo MH usando Kerneles híbridos para simular valores de la distribución posterior f(α, λ|t ̄) ∝ f(t ̄|α, λ)f(α, λ), considerando las siguientes propuestas: Propuesta 1: λp|α,t ̄∼ Gamaα + n , b +Xni=1tαi! Propuesta 2: αp|λ,t ̄ ∼ Gama (n + 1 , −log(b) − log(r1) + c) Propuesta 3: αp ∼ exp(c) y λp|αp ∼ Gama(αp, b) Propuesta 4 (RWMH): αp = α + , con ∼ N(0, σ) y dejando λ fijo. con distribuciones a priori datos simulado usando α = 1 y λ = 1 con n = 20 c = 1 y b = 1. Input: Output: muestra (array) """ dim = 2 M = zeros((tam, dim)) ef=0.0 #Inical. M[0,0] = exponential(1) M[0,1] = gamma(M[0,0], 1) for i in range(1,tam): a=M[i-1][0] l=M[i-1][1] numP = int(4*rand(1)[0])+1 R = propuesta(numP,a,l,b,c,sigma,T) ap = R[0] lp = R[1] ro = R[2] if Bernoulli(ro) == 1.0: M[i,0] = ap M[i,1] = lp else: M[i,0] = M[i-1,0] M[i
m,sigma,w1,n):
identifier_name
tarea8.py
area = 50*np.ones(n) plt.scatter(x, y, s=area, c=colors, alpha=0.8) grafBivariada(m,sigma) plt.savefig('bivariadaMH'+str(ro)+'-'+str(n)+'.png') plt.title('Muestra de tamano '+str(n)+ ' con rho = '+ str(ro)) plt.show() def densidad2(a,l,T,b,c): n=len(T) r1=1.0 for t in T: r1 = r1 * t sa=0 for t in T: sa = sa + t**a suma=0 for t in T: suma = suma + log(t) return ((n+a-1)*log(l)) - (l*(b+sa)) + (log(b)*a ) + (log(a)*n) + (a-1)*suma - (c*a) - loggamma(a) def grafTiempos(M,T,b,c): """ Función que grafica bivariada """ A=min(0.1, min(M[:,0])) B=max(M[:,0]) C=min(0.1, min(M[:,1])) D=max(M[:,1]) #Contornos delta = 0.25 x = np.arange(A, B+ 2*delta, delta) y = np.arange(C, D+ 2*delta, delta) X, Y = np.meshgrid(x, y) Z = densidad2(X,Y,T,b,c) ma= np.amax(Z) mi= np.amin(Z) plt.figure() plt.contour(X, Y, Z, levels=np.arange(mi,ma,5)) def NormalMHMC(m,sigma,w1,tam): """ Aplica el algoritmo de Metropolis-Hastings considerando como funcion objetivo la distribución normal bivariada con Kerneles híbridos dados por las siguientes propuestas: q1 ((x01, x02)|(x1, x2)) = f_X1|X2(x01|x2)1(x02 = x2) q2 ((x01, x02)|(x1, x2)) = fX2|X1(x02|x1)1(x01 = x1) Input: media, matriz de covarianza, tamaño (array, array, int) Output: muestra (array) """ dim = 2 M = zeros((tam, dim)) m1=m[0,0] m2=m[1,0] s1=np.sqrt(sigma[0,0]) s2=np.sqrt(sigma[1,1]) ro=(sigma[0,1])/(s1*s2) #Inical. M[0,0] = m1 M[0,1] = m2 for i in range(1,tam): if Bernoulli(w1)==1: x2=M[i-1,1] M[i,0]=normal(m1+ro*s1*(x2-m2)/s2, (s1**2)*(1-ro**2)) M[i,1]=M[i-1,1] else: x1=M[i-1,0] M[i,1]=normal(m2+ro*s2*(x1-m1)/s1, (s2**2)*(1-ro**2)) M[i,0]=M[i-1,0] return M def propuesta(numP,a,l,b,c,sigma,T): """ Devuelve distintas propuestas de acuerdo a la opción selecccionada. Input: int (opción deseada) Output: float, float (propuesta, ro) """ n=len(T) if numP>4 or numP<1: raise ValueError("No conozco esa propuesta") if numP==1: sa=0 for t in T: sa = sa + t**a #propuestas lp= gamma(a + n , 1.0/(b + sa)) ap= a #rho ro = 1 return ap,lp,ro elif numP==2: r1=1.0 for t in T: r1 = r1 * t #propuestas ap = gamma(n + 1 , 1.0/(-log(b)-log(r1)+c)) lp = l #rho sap=0 for t in T: sap = sap + t**ap sa=0 for t in T: sa = sa + t**a aux = float(loggamma(a)) + (ap-a)*l - l*sap-float(loggamma(ap)) + l*sa c=min(0,aux) return ap,lp,exp(c) elif numP==3: ap = exponential(c) lp = gamma(ap , 1.0/b) #rho sap=0.0 for t in T: sap = sap + t**ap sa=0.0 for t in T: sa = sa + t**a r1=1.0 for t in T: r1= r1*t aux = n*
lp)/(a*l)) + (ap-a)*log(r1)- lp*sap + l*sa c=min(0,aux) return ap,lp,exp(c) else: ap=a + normal(0,sigma) lp= l suma=0 for t in T: suma = suma + log(t) sap=0 for t in T: sap = sap + t**ap sa=0 for t in T: sa = sa + t**a aux = ap*log(l)-l*(b+sap)+ap*log(b)+n*log(ap)+(ap-1)*suma-c*ap-float(loggamma(ap))-a*log(l)+l*(b+sa)-a*log(b)-n*log(a)-(a-1)*suma+c*a+float(loggamma(a)) c=min(0,aux) return ap,lp,exp(c) def TiemposMHMC(c,b,sigma,tam,T): """ Aplica el algoritmo MH usando Kerneles híbridos para simular valores de la distribución posterior f(α, λ|t ̄) ∝ f(t ̄|α, λ)f(α, λ), considerando las siguientes propuestas: Propuesta 1: λp|α,t ̄∼ Gamaα + n , b +Xni=1tαi! Propuesta 2: αp|λ,t ̄ ∼ Gama (n + 1 , −log(b) − log(r1) + c) Propuesta 3: αp ∼ exp(c) y λp|αp ∼ Gama(αp, b) Propuesta 4 (RWMH): αp = α + , con ∼ N(0, σ) y dejando λ fijo. con distribuciones a priori datos simulado usando α = 1 y λ = 1 con n = 20 c = 1 y b = 1. Input: Output: muestra (array) """ dim = 2 M = zeros((tam, dim)) ef=0.0 #Inical. M[0,0] = exponential(1) M[0,1] = gamma(M[0,0], 1) for i in range(1,tam): a=M[i-1][0] l=M[i-1][1] numP = int(4*rand(1)[0])+1 R = propuesta(numP,a,l,b,c,sigma,T) ap = R[0] lp = R[1] ro = R[2] if Bernoulli(ro) == 1.0: M[i,0] = ap M[i,1] = lp else: M[i,0] = M[i-1,0] M[i,1] = M[i-1,1] ef=ef+1 print ("Se rechazaron el "+ str(ef*100.0/tam)+ "% de las propuestas") return M def bombasAguaMHMC(a,c,d,tam,w1): """ Simula valores de la distribución posterior f(λ1, . . . , λn, β|p ̄), usando un kernel híbrido que considera las propuestas: λi ̄∼ Gama(t_ip_i + α , β + 1) β ∼ Gama(nα + γ , δ + Sum(λ) Con parámetros a priori dados Input: a (float), c(float), d (float), tam(int), w1(float)
log((ap*
conditional_block
tarea8.py
) #rho sap=0.0 for t in T: sap = sap + t**ap sa=0.0 for t in T: sa = sa + t**a r1=1.0 for t in T: r1= r1*t aux = n*log((ap*lp)/(a*l)) + (ap-a)*log(r1)- lp*sap + l*sa c=min(0,aux) return ap,lp,exp(c) else: ap=a + normal(0,sigma) lp= l suma=0 for t in T: suma = suma + log(t) sap=0 for t in T: sap = sap + t**ap sa=0 for t in T: sa = sa + t**a aux = ap*log(l)-l*(b+sap)+ap*log(b)+n*log(ap)+(ap-1)*suma-c*ap-float(loggamma(ap))-a*log(l)+l*(b+sa)-a*log(b)-n*log(a)-(a-1)*suma+c*a+float(loggamma(a)) c=min(0,aux) return ap,lp,exp(c) def TiemposMHMC(c,b,sigma,tam,T): """ Aplica el algoritmo MH usando Kerneles híbridos para simular valores de la distribución posterior f(α, λ|t ̄) ∝ f(t ̄|α, λ)f(α, λ), considerando las siguientes propuestas: Propuesta 1: λp|α,t ̄∼ Gamaα + n , b +Xni=1tαi! Propuesta 2: αp|λ,t ̄ ∼ Gama (n + 1 , −log(b) − log(r1) + c) Propuesta 3: αp ∼ exp(c) y λp|αp ∼ Gama(αp, b) Propuesta 4 (RWMH): αp = α + , con ∼ N(0, σ) y dejando λ fijo. con distribuciones a priori datos simulado usando α = 1 y λ = 1 con n = 20 c = 1 y b = 1. Input: Output: muestra (array) """ dim = 2 M = zeros((tam, dim)) ef=0.0 #Inical. M[0,0] = exponential(1) M[0,1] = gamma(M[0,0], 1) for i in range(1,tam): a=M[i-1][0] l=M[i-1][1] numP = int(4*rand(1)[0])+1 R = propuesta(numP,a,l,b,c,sigma,T) ap = R[0] lp = R[1] ro = R[2] if Bernoulli(ro) == 1.0: M[i,0] = ap M[i,1] = lp else: M[i,0] = M[i-1,0] M[i,1] = M[i-1,1] ef=ef+1 print ("Se rechazaron el "+ str(ef*100.0/tam)+ "% de las propuestas") return M def bombasAguaMHMC(a,c,d,tam,w1): """ Simula valores de la distribución posterior f(λ1, . . . , λn, β|p ̄), usando un kernel híbrido que considera las propuestas: λi ̄∼ Gama(t_ip_i + α , β + 1) β ∼ Gama(nα + γ , δ + Sum(λ) Con parámetros a priori dados Input: a (float), c(float), d (float), tam(int), w1(float) Output: array(tam x 11) (muestra) """ D = np.array([[94.32, 5],[15.72, 1],[62.88, 5],[125.76, 14],[5.24, 3], [31.44, 19],[1.05, 1],[1.05, 1],[2.1, 4],[10.48, 22]]) n=len(D) dim = n+1 M = zeros((tam, dim)) #Inical. M[0,n] = gamma(c, 1.0/(d)) #Beta a priori M[0,0:n] = gamma(a,1.0/(M[0,n]),n) #Lambdas a priori for i in range(1,tam): b= M[i-1,n] L=M[i-1,0:n] if Bernoulli(w1) == 1.0: #deja betas M[i,n]=b #Mueve lambdas for j in range(0,n): #M[i,j]= gamma(D[j,0]*D[j,1] + a, 1.0/(b+1) ) M[i,j]= gamma(D[j,1] + a, 1.0/(b+D[j,0]) ) else: #Mueve beta M[i,n]=gamma(n*a + c,1.0/(d+sum(L))) #Deja lambdas M[i,0:n] = L return M def graficaColumnas(M): """ Simula valores de la distribución posterior f(λ1, . . . , λn, β|p ̄), usando un kernel híbrido que considera las propuestas: λi ̄∼ Gama(t_ip_i + α , β + 1) β ∼ Gama(nα + γ , δ + Sum(λ) Con parámetros a priori dados Input: a (float), c(float), d (float), tam(int), w1(float) Output: array(tam x 11) (muestra) """ r=4 c=3 f, axarr = plt.subplots(r, c,figsize=(8, 6), dpi=80) C=np.ones(len(M)) for j in range(0,9): p= np.mean(M[10:,j]) axarr[j/c, j%c].plot(M[10:,j],'o',markersize=2.5,alpha=0.3,color='#009999') axarr[j/c, j%c].plot(C*p,linewidth=3,color='#990033') axarr[j/c, j%c].set_title('lambda'+str(j+1)) axarr[j/c, j%c].set_xticklabels([]) print ("El promedio de lambda"+str(j+1)+" es " + str(p)) p= np.mean(M[10:,9]) axarr[3, 1].plot(M[10:,9],'o',markersize=2.5,alpha=0.3,color='#009999') axarr[3, 1].plot(C*np.mean(M[10:,9]),linewidth=3,color='#990033') axarr[3, 1].set_title('lambda10') print ("El promedio de lamda10 es " + str(p)) #Bonito f.subplots_adjust(hspace=0.4) for t in [0,2]: axarr[3,t].spines['bottom'].set_color('white') axarr[3,t].spines['left'].set_color('white') axarr[3,t].spines['top'].set_color('white') axarr[3,t].spines['right'].set_color('white') for s in axarr[3,t].xaxis.get_ticklines(): s.set_color('white') for s in axarr[3,t].yaxis.get_ticklines(): s.set_color('white') plt.setp([axarr[3,t].get_yticklabels()], visible=False) for j in range(0,r): plt.setp([a.get_xticklabels() for a in axarr[j, :]], visible=False) plt.savefig('lambdas.png') plt.show() def evalua(M): n=len(M) x = np.arange(0., n, 1) y = log(M) colors = np.arange(0, 1, 1.0/n) area = 30*np.one
s(n) plt.scatter(x, y, s=area, c=colors, alpha=0.8) plt.show() if __name__ == "__main__": """ 1. Aplique el algoritmo de Metropolis-Hastings considerando c
identifier_body
tarea8.py
area = 50*np.ones(n) plt.scatter(x, y, s=area, c=colors, alpha=0.8) grafBivariada(m,sigma) plt.savefig('bivariadaMH'+str(ro)+'-'+str(n)+'.png') plt.title('Muestra de tamano '+str(n)+ ' con rho = '+ str(ro)) plt.show() def densidad2(a,l,T,b,c): n=len(T) r1=1.0 for t in T: r1 = r1 * t sa=0 for t in T: sa = sa + t**a suma=0 for t in T: suma = suma + log(t) return ((n+a-1)*log(l)) - (l*(b+sa)) + (log(b)*a ) + (log(a)*n) + (a-1)*suma - (c*a) - loggamma(a) def grafTiempos(M,T,b,c): """ Función que grafica bivariada """ A=min(0.1, min(M[:,0])) B=max(M[:,0]) C=min(0.1, min(M[:,1])) D=max(M[:,1]) #Contornos delta = 0.25 x = np.arange(A, B+ 2*delta, delta) y = np.arange(C, D+ 2*delta, delta) X, Y = np.meshgrid(x, y) Z = densidad2(X,Y,T,b,c) ma= np.amax(Z) mi= np.amin(Z) plt.figure() plt.contour(X, Y, Z, levels=np.arange(mi,ma,5)) def NormalMHMC(m,sigma,w1,tam): """ Aplica el algoritmo de Metropolis-Hastings considerando como funcion objetivo la distribución normal bivariada con Kerneles híbridos dados por las siguientes propuestas: q1 ((x01, x02)|(x1, x2)) = f_X1|X2(x01|x2)1(x02 = x2) q2 ((x01, x02)|(x1, x2)) = fX2|X1(x02|x1)1(x01 = x1) Input: media, matriz de covarianza, tamaño (array, array, int) Output: muestra (array) """ dim = 2 M = zeros((tam, dim)) m1=m[0,0] m2=m[1,0] s1=np.sqrt(sigma[0,0]) s2=np.sqrt(sigma[1,1]) ro=(sigma[0,1])/(s1*s2) #Inical. M[0,0] = m1 M[0,1] = m2 for i in range(1,tam): if Bernoulli(w1)==1: x2=M[i-1,1] M[i,0]=normal(m1+ro*s1*(x2-m2)/s2, (s1**2)*(1-ro**2)) M[i,1]=M[i-1,1] else: x1=M[i-1,0] M[i,1]=normal(m2+ro*s2*(x1-m1)/s1, (s2**2)*(1-ro**2)) M[i,0]=M[i-1,0] return M def propuesta(numP,a,l,b,c,sigma,T): """ Devuelve distintas propuestas de acuerdo a la opción selecccionada. Input: int (opción deseada) Output: float, float (propuesta, ro) """ n=len(T) if numP>4 or numP<1: raise ValueError("No conozco esa propuesta") if numP==1: sa=0 for t in T: sa = sa + t**a #propuestas lp= gamma(a + n , 1.0/(b + sa)) ap= a #rho ro = 1 return ap,lp,ro elif numP==2: r1=1.0 for t in T: r1 = r1 * t #propuestas ap = gamma(n + 1 , 1.0/(-log(b)-log(r1)+c)) lp = l #rho sap=0 for t in T: sap = sap + t**ap sa=0 for t in T: sa = sa + t**a aux = float(loggamma(a)) + (ap-a)*l - l*sap-float(loggamma(ap)) + l*sa c=min(0,aux) return ap,lp,exp(c) elif numP==3: ap = exponential(c) lp = gamma(ap , 1.0/b) #rho sap=0.0 for t in T: sap = sap + t**ap sa=0.0 for t in T: sa = sa + t**a r1=1.0 for t in T: r1= r1*t aux = n*log((ap*lp)/(a*l)) + (ap-a)*log(r1)- lp*sap + l*sa c=min(0,aux) return ap,lp,exp(c) else: ap=a + normal(0,sigma) lp= l suma=0 for t in T: suma = suma + log(t) sap=0 for t in T: sap = sap + t**ap sa=0 for t in T: sa = sa + t**a aux = ap*log(l)-l*(b+sap)+ap*log(b)+n*log(ap)+(ap-1)*suma-c*ap-float(loggamma(ap))-a*log(l)+l*(b+sa)-a*log(b)-n*log(a)-(a-1)*suma+c*a+float(loggamma(a)) c=min(0,aux) return ap,lp,exp(c) def TiemposMHMC(c,b,sigma,tam,T): """ Aplica el algoritmo MH usando Kerneles híbridos para simular valores de
Propuesta 1: λp|α,t ̄∼ Gamaα + n , b +Xni=1tαi! Propuesta 2: αp|λ,t ̄ ∼ Gama (n + 1 , −log(b) − log(r1) + c) Propuesta 3: αp ∼ exp(c) y λp|αp ∼ Gama(αp, b) Propuesta 4 (RWMH): αp = α + , con ∼ N(0, σ) y dejando λ fijo. con distribuciones a priori datos simulado usando α = 1 y λ = 1 con n = 20 c = 1 y b = 1. Input: Output: muestra (array) """ dim = 2 M = zeros((tam, dim)) ef=0.0 #Inical. M[0,0] = exponential(1) M[0,1] = gamma(M[0,0], 1) for i in range(1,tam): a=M[i-1][0] l=M[i-1][1] numP = int(4*rand(1)[0])+1 R = propuesta(numP,a,l,b,c,sigma,T) ap = R[0] lp = R[1] ro = R[2] if Bernoulli(ro) == 1.0: M[i,0] = ap M[i,1] = lp else: M[i,0] = M[i-1,0] M[i,1] = M[i-1,1] ef=ef+1 print ("Se rechazaron el "+ str(ef*100.0/tam)+ "% de las propuestas") return M def bombasAguaMHMC(a,c,d,tam,w1): """ Simula valores de la distribución posterior f(λ1, . . . , λn, β|p ̄), usando un kernel híbrido que considera las propuestas: λi ̄∼ Gama(t_ip_i + α , β + 1) β ∼ Gama(nα + γ , δ + Sum(λ) Con parámetros a priori dados Input: a (float), c(float), d (float), tam(int), w1(float)
la distribución posterior f(α, λ|t ̄) ∝ f(t ̄|α, λ)f(α, λ), considerando las siguientes propuestas:
random_line_split
GraphAlgo.py
except Exception as e: print(e) print("load failed") flag = False finally: return flag def save_to_json(self, file_name: str) -> bool: """ Saves the graph in JSON format to a file @param file_name: The path to the out file @return: True if the save was successful, False o.w. """ flag = True with open(file_name, "w") as jsonFile: try: d = {"Edges": [], "Nodes": []} for src in self._graph.out_edges.keys(): for dst, w in self._graph.all_out_edges_of_node(src).items(): d["Edges"].append({"src": src, "w": w.weight, "dest": dst}) for key, value in self._graph.nodes.items(): if value.location is None: d["Nodes"].append({"id": key}) else: d["Nodes"].append({"pos": str(value.location), "id": key}) s = d.__str__() s = s.replace(" ", "") s = s.replace("'", "\"") jsonFile.write(s) # print("Save Json was succeeded ") except Exception as e: print("Save Json was failed ") print(e) flag = False finally: return flag def shortest_path(self, id1: int, id2: int) -> (float, list): """ * returns the the shortest path between src to dest - as an ordered List of nodes: * src--> n1-->n2-->...dest * Logic only was taken from: https://en.wikipedia.org/wiki/Shortest_path_problem * Note if no such path --> returns null; @Runtime: Regular BFS using a priority queue = O(|V|+|E|). @param id1 - start node @param id2 - end (target) node @return - the path between src and dest if there is one. """ # Edge cases # Either one of the nodes does not exist in the graph. if id1 not in self._graph.get_all_v() or id2 not in self._graph.get_all_v(): return float('inf'), [] if id1 == id2: # The path from a node to itself is empty and the total distance is 0 return 0, [] # Initialization src = id1 dest = id2 self.reset_tags() self.set_weights_infinity() prev_node = dict() # A map that stores: {key(int): caller(Node)} (Which node called which) pq = Queue() # A queue to prioritize nodes with lower weight visited = dict() # Keep track of visited nodes total_dist = 0.0 destination_found = False curr = self._graph.get_node(src) curr.weight = total_dist visited[curr.key] = True pq.put(curr) # Traverse while not pq.empty(): curr = pq.get() # Pop the next node with the lowest weight O(log(n)) neighbors = self._graph.all_out_edges_of_node(curr.key) # Neighbors of curr node for i in neighbors: # Iterate over neighbors of curr out_edge = neighbors[i] # out_edge: EdgeData neighbor = self._graph.get_node(out_edge.dest) # neighbor: NodeData if not visited.get(neighbor.key): # Process node if not visited total_dist = curr.weight + out_edge.weight if total_dist < neighbor.weight: neighbor.weight = total_dist prev_node.__setitem__(neighbor.key, curr) if neighbor not in pq.queue: # If not already in the queue, enqueue neighbor. pq.put(neighbor) # Finished processing curr's neighbors if curr.key == dest: destination_found = True visited[curr.key] = True if destination_found: path = self.rebuild_path(prev_node, src, dest) # A list of nodes that represents the path between id1->id2 total_dist = path[len(path) - 1].weight return total_dist, path return float('inf'), [] def rebuild_path(self, node_map: dict = None, src: int = 0, dest: int = 0) -> list: """ * This method back-tracks, takes a map of int keys and NodeData values * inserts all nodes in the path to a list and return the list """ if node_map is None or src == dest: return None ans = [self._graph.get_node(dest)] next_node = node_map.get(dest) ans.append(next_node) while next_node.key is not src: # Backtrack from dest to src ans.append(node_map.get(next_node.key)) next_node = node_map.get(next_node.key) if self._graph.get_node(src) not in ans: ans.append(self._graph.get_node(src)) ans.reverse() # Inserted from return ans def reset_tags(self): for key in self._graph.get_all_v().keys(): node = self.get_graph().get_node(key) node.tag = 0 def set_weights_infinity(self): for key in self._graph.get_all_v().keys(): node = self._graph.get_node(key) node.weight = float('inf') def connected_component(self, id1: int) -> list: """ * Finds the Strongly Connected Component(SCC) that node id1 is a part of. * Notes: If the graph is None or id1 is not in the graph, the function should return an empty list [] @param id1: The node id @return: The list of nodes in the SCC """ if self._graph is None or self._graph.get_node(id1) is None: return [] self.reset_tags() # This method executes a BFS and tag nodes so reset_tags() must be called. # Traverse the original graph, from node id1, and tag all reachable nodes ans = [] src = id1 # alias original_graph = self.get_graph() self.traverse_breadth_first(src, original_graph) # Transpose/Reverse graph's edges transposed_graph = self.reverse_graph() # Traverse the transposed graph, from node id1, and un-tag all reachable nodes self.traverse_breadth_first(src, transposed_graph) # Iterate over nodes in the transposed graph and find the nodes that are tagged twice! for key in transposed_graph.get_all_v(): node = transposed_graph.get_node(key) if node.tag == 2: ans.append(self._graph.get_node(node.key)) # Append original node return ans def traverse_breadth_first(self, src: int = 0, graph: GraphInterface = None): """ * This method is made to traverse any node in the graph and set tag on them using bfs algorithm. """ if not isinstance(graph, DiGraph) or graph is None or self._graph.get_node(src) is None: return curr = graph.get_node(src) q = Queue() q.put(curr) curr.tag += 1 while not q.empty(): curr = q.get() out_edges = graph.all_out_edges_of_node(curr.key) for i in out_edges: out_edge = out_edges[i] neighbor = graph.get_node(out_edge.dest) # Get curr's neighbor if neighbor.tag == curr.tag - 1: neighbor.tag += 1 # If un-tagged -> tag it. q.put(neighbor) # and enqueue it def reverse_graph(self) -> GraphInterface: """ * This method transposes the given graph. * The new graph will have the same set of vertices V = {v1, v2, .. , v(n)}, * And all transposed edges. E = {(v1,v2), (v2,v6), .. }, E(transposed) = {(v2,v1), (v6,v2), ..}. * @param g - the given graph. * @return a transposed directed_weighted_graph. """ ans = DiGraph() nodes = self._graph.get_all_v() # {key: NodeData} for key in nodes: ans.add_node(key) ans.get_node(key).tag = self._graph.get_node(key).tag for key in nodes: out_edges = self._graph
""" Loads a graph from a json file. @param file_name: The path to the json file @returns: True if the loading was successful, False o.w. """ flag = True try: with open(file_name, 'r') as jsonFile: load = json.load(jsonFile) graphJson = DiGraph() for node in load["Nodes"]: if "pos" in node: posJ = tuple(map(float, str(node["pos"]).split(","))) graphJson.add_node(node_id=node["id"], pos=posJ) else: graphJson.add_node(node_id=node["id"]) for edge in load["Edges"]: graphJson.add_edge(id1=edge["src"], id2=edge["dest"], weight=edge["w"]) self._graph = graphJson # print("load successes")
identifier_body
GraphAlgo.py
next_node.key is not src: # Backtrack from dest to src ans.append(node_map.get(next_node.key)) next_node = node_map.get(next_node.key) if self._graph.get_node(src) not in ans: ans.append(self._graph.get_node(src)) ans.reverse() # Inserted from return ans def reset_tags(self): for key in self._graph.get_all_v().keys(): node = self.get_graph().get_node(key) node.tag = 0 def set_weights_infinity(self): for key in self._graph.get_all_v().keys(): node = self._graph.get_node(key) node.weight = float('inf') def connected_component(self, id1: int) -> list: """ * Finds the Strongly Connected Component(SCC) that node id1 is a part of. * Notes: If the graph is None or id1 is not in the graph, the function should return an empty list [] @param id1: The node id @return: The list of nodes in the SCC """ if self._graph is None or self._graph.get_node(id1) is None: return [] self.reset_tags() # This method executes a BFS and tag nodes so reset_tags() must be called. # Traverse the original graph, from node id1, and tag all reachable nodes ans = [] src = id1 # alias original_graph = self.get_graph() self.traverse_breadth_first(src, original_graph) # Transpose/Reverse graph's edges transposed_graph = self.reverse_graph() # Traverse the transposed graph, from node id1, and un-tag all reachable nodes self.traverse_breadth_first(src, transposed_graph) # Iterate over nodes in the transposed graph and find the nodes that are tagged twice! for key in transposed_graph.get_all_v(): node = transposed_graph.get_node(key) if node.tag == 2: ans.append(self._graph.get_node(node.key)) # Append original node return ans def traverse_breadth_first(self, src: int = 0, graph: GraphInterface = None): """ * This method is made to traverse any node in the graph and set tag on them using bfs algorithm. """ if not isinstance(graph, DiGraph) or graph is None or self._graph.get_node(src) is None: return curr = graph.get_node(src) q = Queue() q.put(curr) curr.tag += 1 while not q.empty(): curr = q.get() out_edges = graph.all_out_edges_of_node(curr.key) for i in out_edges: out_edge = out_edges[i] neighbor = graph.get_node(out_edge.dest) # Get curr's neighbor if neighbor.tag == curr.tag - 1: neighbor.tag += 1 # If un-tagged -> tag it. q.put(neighbor) # and enqueue it def reverse_graph(self) -> GraphInterface: """ * This method transposes the given graph. * The new graph will have the same set of vertices V = {v1, v2, .. , v(n)}, * And all transposed edges. E = {(v1,v2), (v2,v6), .. }, E(transposed) = {(v2,v1), (v6,v2), ..}. * @param g - the given graph. * @return a transposed directed_weighted_graph. """ ans = DiGraph() nodes = self._graph.get_all_v() # {key: NodeData} for key in nodes: ans.add_node(key) ans.get_node(key).tag = self._graph.get_node(key).tag for key in nodes: out_edges = self._graph.all_out_edges_of_node(key) for edge in out_edges: e = out_edges.get(edge) ans.add_edge(e.dest, e.src, e.weight) return ans def connected_components(self) -> List[list]: """ * This method finds all the Strongly Connected Components(SCC) in the graph. * Notes: If the graph is None the function should return an empty list [] @return: The list all SCC """ self.reset_tags() ans = [] visited = dict() # A dictionary of visited nodes for key in self._graph.get_all_v(): if not visited.get(key): path = self.connected_component(key) for node in path: visited.__setitem__(node.key, True) ans.append(path) return ans def plot_graph(self): """ Plots the graph. If the nodes have a position, the nodes will be placed there. Otherwise, they will be placed in a random but elegant manner using get_random_location() function. """ g = self.get_graph() plt.title("Our graph:" + g.__str__()) plt.xlabel("X") plt.ylabel("-<") # I should flip 'Y' letter so I decided to write it by a tricky way. :) for src, node in g.get_all_v().items(): # Print the node point if node.location is None: pos = self.get_random_location() # get a elegant location node.location = GeoLocation(pos) plt.plot(node.location.x, node.location.y, marker='o', markerfacecolor='red', markersize=3, color='yellow') plt.text(node.location.x, node.location.y, str(node.key)) # Print the edge line for dest in g.all_out_edges_of_node(src).keys(): x1 = g.get_all_v()[src].location.x y1 = g.get_all_v()[src].location.y if g.get_all_v()[dest].location is None: pos = self.get_random_location() g.get_all_v()[dest].location = GeoLocation(pos) g.get_all_v()[dest].location = GeoLocation(pos) x2 = g.get_all_v()[dest].location.x y2 = g.get_all_v()[dest].location.y plt.arrow(x1, y1, x2 - x1, y2 - y1, width=0.00001, linewidth=0.05) plt.show() def get_random_location(self): """ * This method was made to return a random location for a node when then node doesn't have any location. * How it work? * We get the max and min of the bounding box and then we set the nodes location on a random range inside it. * if there is no bounding box , which means there is no node location enough to set this bounding box, * so we set the nodes location in a range of x=[32,33],y=[35,36],z=0. """ max_x, max_y, max_z, min_x, min_y, min_z = self.get_max_and_min() if max_x == float('-inf') and min_x == float('inf') and max_y == float('-inf') and min_y == float('inf') and \ max_z == float('-inf') and min_z == float('inf'): x = random.uniform(32, 33) y = random.uniform(35, 36) z = 0 ans = x, y, z return ans counter = 0 for src, node in self._graph.get_all_v().items(): if node.location is not None: counter += 1 x = random.uniform(max_x, min_x) y = random.uniform(max_y, min_y) z = random.uniform(max_z, min_z) if counter == 0: # means all nodes doesn't have any location x = random.uniform(32, 33) y = random.uniform(35, 36) z = 0 ans = x, y, z else: ans = x, y, z return ans def get_max_and_min(self): """ This method get the max and min of the bounding box on current graph. @return max and min of bounding box , o.w -inf&inf """ max_x = float('-inf') min_x = float('inf') max_y = float('-inf') min_y = float('inf') max_z = float('-inf') min_z = float('inf') ans = max_x, max_y, max_z, min_x, min_y, min_z counter = 0 for src, node in self._graph.get_all_v().items(): if node.location is not None: x = node.location.x y = node.location.y z = node.location.z counter += 1 max_x = x if x > max_x else max_x min_x = x if x < min_x else min_x max_y = y if y > max_y else max_y min_y = y if y < min_y else min_y max_z = z if z > max_z else max_z min_z = z if z < min_z else min_z if counter > 4: ans = max_x, max_y, max_z, min_x, min_y, min_z return ans def
__repr__
identifier_name
GraphAlgo.py
def save_to_json(self, file_name: str) -> bool: """ Saves the graph in JSON format to a file @param file_name: The path to the out file @return: True if the save was successful, False o.w. """ flag = True with open(file_name, "w") as jsonFile: try: d = {"Edges": [], "Nodes": []} for src in self._graph.out_edges.keys(): for dst, w in self._graph.all_out_edges_of_node(src).items(): d["Edges"].append({"src": src, "w": w.weight, "dest": dst}) for key, value in self._graph.nodes.items(): if value.location is None: d["Nodes"].append({"id": key}) else: d["Nodes"].append({"pos": str(value.location), "id": key}) s = d.__str__() s = s.replace(" ", "") s = s.replace("'", "\"") jsonFile.write(s) # print("Save Json was succeeded ") except Exception as e: print("Save Json was failed ") print(e) flag = False finally: return flag def shortest_path(self, id1: int, id2: int) -> (float, list): """ * returns the the shortest path between src to dest - as an ordered List of nodes: * src--> n1-->n2-->...dest * Logic only was taken from: https://en.wikipedia.org/wiki/Shortest_path_problem * Note if no such path --> returns null; @Runtime: Regular BFS using a priority queue = O(|V|+|E|). @param id1 - start node @param id2 - end (target) node @return - the path between src and dest if there is one. """ # Edge cases # Either one of the nodes does not exist in the graph. if id1 not in self._graph.get_all_v() or id2 not in self._graph.get_all_v(): return float('inf'), [] if id1 == id2: # The path from a node to itself is empty and the total distance is 0 return 0, [] # Initialization src = id1 dest = id2 self.reset_tags() self.set_weights_infinity() prev_node = dict() # A map that stores: {key(int): caller(Node)} (Which node called which) pq = Queue() # A queue to prioritize nodes with lower weight visited = dict() # Keep track of visited nodes total_dist = 0.0 destination_found = False curr = self._graph.get_node(src) curr.weight = total_dist visited[curr.key] = True pq.put(curr) # Traverse while not pq.empty(): curr = pq.get() # Pop the next node with the lowest weight O(log(n)) neighbors = self._graph.all_out_edges_of_node(curr.key) # Neighbors of curr node for i in neighbors: # Iterate over neighbors of curr out_edge = neighbors[i] # out_edge: EdgeData neighbor = self._graph.get_node(out_edge.dest) # neighbor: NodeData if not visited.get(neighbor.key): # Process node if not visited total_dist = curr.weight + out_edge.weight if total_dist < neighbor.weight: neighbor.weight = total_dist prev_node.__setitem__(neighbor.key, curr) if neighbor not in pq.queue: # If not already in the queue, enqueue neighbor. pq.put(neighbor) # Finished processing curr's neighbors if curr.key == dest: destination_found = True visited[curr.key] = True if destination_found: path = self.rebuild_path(prev_node, src, dest) # A list of nodes that represents the path between id1->id2 total_dist = path[len(path) - 1].weight return total_dist, path return float('inf'), [] def rebuild_path(self, node_map: dict = None, src: int = 0, dest: int = 0) -> list: """ * This method back-tracks, takes a map of int keys and NodeData values * inserts all nodes in the path to a list and return the list """ if node_map is None or src == dest: return None ans = [self._graph.get_node(dest)] next_node = node_map.get(dest) ans.append(next_node) while next_node.key is not src: # Backtrack from dest to src ans.append(node_map.get(next_node.key)) next_node = node_map.get(next_node.key) if self._graph.get_node(src) not in ans:
ans.reverse() # Inserted from return ans def reset_tags(self): for key in self._graph.get_all_v().keys(): node = self.get_graph().get_node(key) node.tag = 0 def set_weights_infinity(self): for key in self._graph.get_all_v().keys(): node = self._graph.get_node(key) node.weight = float('inf') def connected_component(self, id1: int) -> list: """ * Finds the Strongly Connected Component(SCC) that node id1 is a part of. * Notes: If the graph is None or id1 is not in the graph, the function should return an empty list [] @param id1: The node id @return: The list of nodes in the SCC """ if self._graph is None or self._graph.get_node(id1) is None: return [] self.reset_tags() # This method executes a BFS and tag nodes so reset_tags() must be called. # Traverse the original graph, from node id1, and tag all reachable nodes ans = [] src = id1 # alias original_graph = self.get_graph() self.traverse_breadth_first(src, original_graph) # Transpose/Reverse graph's edges transposed_graph = self.reverse_graph() # Traverse the transposed graph, from node id1, and un-tag all reachable nodes self.traverse_breadth_first(src, transposed_graph) # Iterate over nodes in the transposed graph and find the nodes that are tagged twice! for key in transposed_graph.get_all_v(): node = transposed_graph.get_node(key) if node.tag == 2: ans.append(self._graph.get_node(node.key)) # Append original node return ans def traverse_breadth_first(self, src: int = 0, graph: GraphInterface = None): """ * This method is made to traverse any node in the graph and set tag on them using bfs algorithm. """ if not isinstance(graph, DiGraph) or graph is None or self._graph.get_node(src) is None: return curr = graph.get_node(src) q = Queue() q.put(curr) curr.tag += 1 while not q.empty(): curr = q.get() out_edges = graph.all_out_edges_of_node(curr.key) for i in out_edges: out_edge = out_edges[i] neighbor = graph.get_node(out_edge.dest) # Get curr's neighbor if neighbor.tag == curr.tag - 1: neighbor.tag += 1 # If un-tagged -> tag it. q.put(neighbor) # and enqueue it def reverse_graph(self) -> GraphInterface: """ * This method transposes the given graph. * The new graph will have the same set of vertices V = {v1, v2, .. , v(n)}, * And all transposed edges. E = {(v1,v2), (v2,v6), .. }, E(transposed) = {(v2,v1), (v6,v2), ..}. * @param g - the given graph. * @return a transposed directed_weighted_graph. """ ans = DiGraph() nodes = self._graph.get_all_v() # {key: NodeData} for key in nodes: ans.add_node(key) ans.get_node(key).tag = self._graph.get_node(key).tag for key in nodes: out_edges = self._graph.all_out_edges_of_node(key) for edge in out_edges: e = out_edges.get(edge) ans.add_edge(e.dest, e.src, e.weight) return ans def connected_components(self) -> List[list]: """ * This method finds all the Strongly Connected Components(SCC) in the graph. * Notes: If the graph is None the function should return an empty list [] @return: The list all SCC """ self.reset_tags() ans = [] visited = dict() # A dictionary of visited nodes for key in self._graph.get_all_v(): if not visited.get(key): path = self.connected_component(key) for node in path: visited.__setitem__(node.key, True) ans.append(path) return ans def plot_graph(self): """ Plots the graph. If the nodes have a position, the nodes will be placed there. Otherwise, they will be placed in a
ans.append(self._graph.get_node(src))
conditional_block
GraphAlgo.py
def save_to_json(self, file_name: str) -> bool: """ Saves the graph in JSON format to a file @param file_name: The path to the out file @return: True if the save was successful, False o.w. """ flag = True with open(file_name, "w") as jsonFile: try: d = {"Edges": [], "Nodes": []} for src in self._graph.out_edges.keys(): for dst, w in self._graph.all_out_edges_of_node(src).items(): d["Edges"].append({"src": src, "w": w.weight, "dest": dst}) for key, value in self._graph.nodes.items(): if value.location is None: d["Nodes"].append({"id": key}) else: d["Nodes"].append({"pos": str(value.location), "id": key}) s = d.__str__() s = s.replace(" ", "") s = s.replace("'", "\"") jsonFile.write(s) # print("Save Json was succeeded ") except Exception as e: print("Save Json was failed ") print(e) flag = False finally: return flag def shortest_path(self, id1: int, id2: int) -> (float, list): """ * returns the the shortest path between src to dest - as an ordered List of nodes: * src--> n1-->n2-->...dest * Logic only was taken from: https://en.wikipedia.org/wiki/Shortest_path_problem * Note if no such path --> returns null; @Runtime: Regular BFS using a priority queue = O(|V|+|E|). @param id1 - start node @param id2 - end (target) node @return - the path between src and dest if there is one. """ # Edge cases # Either one of the nodes does not exist in the graph. if id1 not in self._graph.get_all_v() or id2 not in self._graph.get_all_v(): return float('inf'), [] if id1 == id2: # The path from a node to itself is empty and the total distance is 0 return 0, [] # Initialization src = id1 dest = id2 self.reset_tags() self.set_weights_infinity() prev_node = dict() # A map that stores: {key(int): caller(Node)} (Which node called which) pq = Queue() # A queue to prioritize nodes with lower weight visited = dict() # Keep track of visited nodes total_dist = 0.0 destination_found = False curr = self._graph.get_node(src) curr.weight = total_dist visited[curr.key] = True pq.put(curr) # Traverse while not pq.empty(): curr = pq.get() # Pop the next node with the lowest weight O(log(n)) neighbors = self._graph.all_out_edges_of_node(curr.key) # Neighbors of curr node for i in neighbors: # Iterate over neighbors of curr out_edge = neighbors[i] # out_edge: EdgeData neighbor = self._graph.get_node(out_edge.dest) # neighbor: NodeData if not visited.get(neighbor.key): # Process node if not visited total_dist = curr.weight + out_edge.weight if total_dist < neighbor.weight: neighbor.weight = total_dist prev_node.__setitem__(neighbor.key, curr) if neighbor not in pq.queue: # If not already in the queue, enqueue neighbor. pq.put(neighbor) # Finished processing curr's neighbors if curr.key == dest: destination_found = True visited[curr.key] = True if destination_found: path = self.rebuild_path(prev_node, src, dest) # A list of nodes that represents the path between id1->id2 total_dist = path[len(path) - 1].weight return total_dist, path return float('inf'), [] def rebuild_path(self, node_map: dict = None, src: int = 0, dest: int = 0) -> list: """ * This method back-tracks, takes a map of int keys and NodeData values * inserts all nodes in the path to a list and return the list """ if node_map is None or src == dest: return None ans = [self._graph.get_node(dest)] next_node = node_map.get(dest) ans.append(next_node) while next_node.key is not src: # Backtrack from dest to src ans.append(node_map.get(next_node.key)) next_node = node_map.get(next_node.key) if self._graph.get_node(src) not in ans: ans.append(self._graph.get_node(src)) ans.reverse() # Inserted from return ans def reset_tags(self): for key in self._graph.get_all_v().keys(): node = self.get_graph().get_node(key) node.tag = 0 def set_weights_infinity(self): for key in self._graph.get_all_v().keys(): node = self._graph.get_node(key) node.weight = float('inf') def connected_component(self, id1: int) -> list: """ * Finds the Strongly Connected Component(SCC) that node id1 is a part of.
if self._graph is None or self._graph.get_node(id1) is None: return [] self.reset_tags() # This method executes a BFS and tag nodes so reset_tags() must be called. # Traverse the original graph, from node id1, and tag all reachable nodes ans = [] src = id1 # alias original_graph = self.get_graph() self.traverse_breadth_first(src, original_graph) # Transpose/Reverse graph's edges transposed_graph = self.reverse_graph() # Traverse the transposed graph, from node id1, and un-tag all reachable nodes self.traverse_breadth_first(src, transposed_graph) # Iterate over nodes in the transposed graph and find the nodes that are tagged twice! for key in transposed_graph.get_all_v(): node = transposed_graph.get_node(key) if node.tag == 2: ans.append(self._graph.get_node(node.key)) # Append original node return ans def traverse_breadth_first(self, src: int = 0, graph: GraphInterface = None): """ * This method is made to traverse any node in the graph and set tag on them using bfs algorithm. """ if not isinstance(graph, DiGraph) or graph is None or self._graph.get_node(src) is None: return curr = graph.get_node(src) q = Queue() q.put(curr) curr.tag += 1 while not q.empty(): curr = q.get() out_edges = graph.all_out_edges_of_node(curr.key) for i in out_edges: out_edge = out_edges[i] neighbor = graph.get_node(out_edge.dest) # Get curr's neighbor if neighbor.tag == curr.tag - 1: neighbor.tag += 1 # If un-tagged -> tag it. q.put(neighbor) # and enqueue it def reverse_graph(self) -> GraphInterface: """ * This method transposes the given graph. * The new graph will have the same set of vertices V = {v1, v2, .. , v(n)}, * And all transposed edges. E = {(v1,v2), (v2,v6), .. }, E(transposed) = {(v2,v1), (v6,v2), ..}. * @param g - the given graph. * @return a transposed directed_weighted_graph. """ ans = DiGraph() nodes = self._graph.get_all_v() # {key: NodeData} for key in nodes: ans.add_node(key) ans.get_node(key).tag = self._graph.get_node(key).tag for key in nodes: out_edges = self._graph.all_out_edges_of_node(key) for edge in out_edges: e = out_edges.get(edge) ans.add_edge(e.dest, e.src, e.weight) return ans def connected_components(self) -> List[list]: """ * This method finds all the Strongly Connected Components(SCC) in the graph. * Notes: If the graph is None the function should return an empty list [] @return: The list all SCC """ self.reset_tags() ans = [] visited = dict() # A dictionary of visited nodes for key in self._graph.get_all_v(): if not visited.get(key): path = self.connected_component(key) for node in path: visited.__setitem__(node.key, True) ans.append(path) return ans def plot_graph(self): """ Plots the graph. If the nodes have a position, the nodes will be placed there. Otherwise, they will be placed in a random
* Notes: If the graph is None or id1 is not in the graph, the function should return an empty list [] @param id1: The node id @return: The list of nodes in the SCC """
random_line_split
cvssV3.controller.js
Factory, PlotService, $sce, $filter, $log) { var vm = this; // these variables hold the Final computed scores that will be displayed // on the page, the Model if you will vm.showAlert = false; vm.impactScore = 'NA'; vm.exploitScore = 'NA'; vm.baseScore = 'NA'; vm.temporalScore = 'NA'; vm.environScore = 'NA'; vm.modImpactScore = 'NA'; // only displayed if Environmental selections made vm.overallScore = 'NA'; vm.cvssString = $sce.trustAsHtml('NA'); vm.cveId = ''; // normally no Vuln Id is present, see Hidden fields vm.initComplete = false; // these data structures contain the Titles and Values associated with each possible score // we expose them here so that the vector titles and selections can be bound to buttons on the page vm.baseData = BaseDataFactory.baseData; console.log('vm.baseData', vm.baseData); vm.baseSelect = BaseDataFactory.baseSelect; vm.temporalData = TemporalDataFactory.temporalData; vm.temporalSelect = TemporalDataFactory.temporalSelect; vm.environData = EnvironDataFactory.environData; vm.environSelect = EnvironDataFactory.environSelect; // Scope functions vm.setScore = setScore; vm.changeBase = changeBase; vm.hiddenCheck = hiddenCheck; vm.initPage = initPage; // used to hide TEST widgets from public view, NORMALLY leave this as false !!!! // see Readme file vm.testing = false; vm.testData = ''; // will only be available in test mode // Define Controller functions function initPage () { // Applying styles to all buttons var formButtons = $('div#cvss-calculator-form input[type="button"]'); angular.forEach(formButtons, function(button) { button.className = 'btn btn-default'; }); // !!!!!!!!!!!!!!!!!!!!!!!! // !!!!!!!! CALL function to initialize plots and check for existence of // !!!!!!!! hidden fields which WILL affect the page display. // Note: we only want the PlotService initialized once because it is // used by both versions. PlotService.plotMethods.init('V3'); // initialize plots and tool tips hiddenCheck(); // Complete vm.initComplete = true; } /*** * Called each time a button is clicked, changes styles of other buttons and calls "setScore()" * @param evnt - $event - Always * @param selectName - name of model, either base, environ, or temporal * @param model - name of model being changed on selectName, like ac, an, etc. * @param choice - value of available item chosen by the user */ function changeBase (evnt, selectName, model, choice) { if (!selectName || !evnt || !model || !choice) { $log.error('changeBase was called with null items'); $log.debug('data: (evnt, selectName, model, choice)', evnt, selectName, model, choice); return; } // checking available data var itemSelect = selectName + 'Select', itemData = selectName + 'Data'; if (selectName !== 'base' && selectName !== 'environ' && selectName !== 'temporal') { $log.error('changeBase was called with bad selectName'); $log.debug('data: (selectName)', selectName); return; } // clearing the styles of the other buttons in that collection var parent = evnt.target.parentElement; var childButtons = $(parent).children('input[type="button"]'); angular.forEach(childButtons, function(childButton) { childButton.className = 'btn btn-default'; }); // applying choice to model vm[itemSelect][model] = vm[itemData][choice].title; // updating button class evnt.target.className = 'btn btn-primary active'; setScore(); } /** * This function is called on EVERY button click to determine if scores need to be * computed. ALL base metric buttons must be selected before any computations will * occur. This is like the 'main' function, kinda. */ function setScore() { console.log('setScore()'); // do we have all the Base selections? if (!(vm.baseSelect.isReady())) { vm.showAlert = true; // show alert message stating Base selections must be made return; } // We are ready to compute scores // clear all model values, hide the alert message clearScores(); vm.showAlert = false; var vectorStr; // compute Base scores var result = BaseCalcService.calculateScores(); var debugStr = result.debugStr; // set model vars from calculation results vm.impactScore = toFixed1(result.impactScore); vm.exploitScore = toFixed1(result.exploitScore); vm.baseScore = toFixed1(result.baseScore); vectorStr = vm.baseSelect.getVector(); vm.overallScore = vm.baseScore; // overall will be Base score for now // compute Temporal scores, IF selections have been made result.temporalProduct = 1; // default if there are no Temporal selections, used by Environ calculation if(vm.temporalSelect.hasSelections()) { result = TemporalCalcService.calculateScores(vm.baseScore); vm.temporalScore = toFixed1(result.temporalScore); vectorStr += vm.temporalSelect.getVector(); debugStr += result.debugStr; vm.overallScore = vm.temporalScore; // temporal will be overall } // compute Environ scores, IF selections have been made if(vm.environSelect.hasSelections()) { result = EnvironCalcService.calculateScores(result.temporalProduct); vm.environScore = toFixed1(result.environScore); vm.modImpactScore = toFixed1(result.mimpactScore); vectorStr += vm.environSelect.getVector(); debugStr += result.debugStr; vm.overallScore = vm.environScore; // environ will be overall } // set the CVSS vector string, will be displayed as a link var href = '/vuln-metrics/cvss/v3-calculator?vector=' + vectorStr; var link = '<a href="' + href + '" target="_blank">' + vectorStr + '</a>'; vm.cvssString = $sce.trustAsHtml(link); updateBarCharts(); // update the page charts }; /** * updateBarCharts() is intended to invoke legacy js code that mostly deals with the bar * chart plotting and some hidden html fields. That functionality was packaged into * an angular service called PlotService */ function updateBarCharts() { // if some sections do not have computed scores (no selections made), then instead of the default // string value, we need to pass empty string '' to the plotservice var temporalTmp = vm.temporalScore; if(temporalTmp == 'NA') temporalTmp = ''; var modImpactTmp = vm.modImpactScore; if(modImpactTmp == 'NA') modImpactTmp = ''; var environTmp = vm.environScore; if(environTmp == 'NA') environTmp = ''; vm.callService = PlotService.plotMethods.displayScoresAngular(vm.baseScore, vm.impactScore, vm.exploitScore, temporalTmp, environTmp, modImpactTmp, vm.overallScore); } /** * Clears the cvss selection data, model scores and charts on the page */ vm.clearAll = function() { // clear user selections vm.baseSelect.clearSelect(); vm.temporalSelect.clearSelect(); vm.environSelect.clearSelect(); vm.showAlert = false; clearScores(); // clear computed scores, the Model variables // reset the bar plots, using the default NA string value does not work // previously we passed 0's but this was causing a 0.0 to appear on the x-axis near the // title so I modified to empty strings vm.callService = PlotService.plotMethods.displayScoresAngular('', '', '', '', '', '', ''); }; /** * Get the tooltip from the ToolTipFactory based on a type string. * @param type * @returns {*} */ vm.getTip = function(type) { var tooltip = TooltipFactory.tooltipData[type]; return tooltip; } // Clears the model variables used on the page function clearScores()
/** * Only used during Testing to test the calculations. This function can be activated via * some button click on a page using ng-click */ vm.testScores = function() { vm.testData = BaseCalcService.testCombs(); }; /** * Utilizes a filter to
{ vm.impactScore = 'NA'; vm.exploitScore = 'NA'; vm.baseScore = 'NA'; vm.temporalScore = 'NA'; vm.environScore = 'NA'; vm.modImpactScore = 'NA'; vm.overallScore = 'NA'; vm.cvssString = $sce.trustAsHtml('NA'); }
identifier_body
cvssV3.controller.js
Factory, PlotService, $sce, $filter, $log) { var vm = this; // these variables hold the Final computed scores that will be displayed // on the page, the Model if you will vm.showAlert = false; vm.impactScore = 'NA'; vm.exploitScore = 'NA'; vm.baseScore = 'NA'; vm.temporalScore = 'NA'; vm.environScore = 'NA'; vm.modImpactScore = 'NA'; // only displayed if Environmental selections made vm.overallScore = 'NA'; vm.cvssString = $sce.trustAsHtml('NA'); vm.cveId = ''; // normally no Vuln Id is present, see Hidden fields vm.initComplete = false; // these data structures contain the Titles and Values associated with each possible score // we expose them here so that the vector titles and selections can be bound to buttons on the page vm.baseData = BaseDataFactory.baseData; console.log('vm.baseData', vm.baseData); vm.baseSelect = BaseDataFactory.baseSelect; vm.temporalData = TemporalDataFactory.temporalData; vm.temporalSelect = TemporalDataFactory.temporalSelect; vm.environData = EnvironDataFactory.environData; vm.environSelect = EnvironDataFactory.environSelect; // Scope functions vm.setScore = setScore; vm.changeBase = changeBase; vm.hiddenCheck = hiddenCheck; vm.initPage = initPage; // used to hide TEST widgets from public view, NORMALLY leave this as false !!!! // see Readme file vm.testing = false; vm.testData = ''; // will only be available in test mode // Define Controller functions function initPage () { // Applying styles to all buttons var formButtons = $('div#cvss-calculator-form input[type="button"]'); angular.forEach(formButtons, function(button) { button.className = 'btn btn-default'; }); // !!!!!!!!!!!!!!!!!!!!!!!! // !!!!!!!! CALL function to initialize plots and check for existence of // !!!!!!!! hidden fields which WILL affect the page display. // Note: we only want the PlotService initialized once because it is // used by both versions. PlotService.plotMethods.init('V3'); // initialize plots and tool tips hiddenCheck(); // Complete vm.initComplete = true; } /*** * Called each time a button is clicked, changes styles of other buttons and calls "setScore()" * @param evnt - $event - Always * @param selectName - name of model, either base, environ, or temporal * @param model - name of model being changed on selectName, like ac, an, etc. * @param choice - value of available item chosen by the user */ function changeBase (evnt, selectName, model, choice) { if (!selectName || !evnt || !model || !choice) { $log.error('changeBase was called with null items'); $log.debug('data: (evnt, selectName, model, choice)', evnt, selectName, model, choice); return; } // checking available data var itemSelect = selectName + 'Select', itemData = selectName + 'Data'; if (selectName !== 'base' && selectName !== 'environ' && selectName !== 'temporal') { $log.error('changeBase was called with bad selectName'); $log.debug('data: (selectName)', selectName); return; } // clearing the styles of the other buttons in that collection var parent = evnt.target.parentElement; var childButtons = $(parent).children('input[type="button"]'); angular.forEach(childButtons, function(childButton) { childButton.className = 'btn btn-default'; }); // applying choice to model vm[itemSelect][model] = vm[itemData][choice].title; // updating button class evnt.target.className = 'btn btn-primary active'; setScore(); } /** * This function is called on EVERY button click to determine if scores need to be * computed. ALL base metric buttons must be selected before any computations will * occur. This is like the 'main' function, kinda. */ function
() { console.log('setScore()'); // do we have all the Base selections? if (!(vm.baseSelect.isReady())) { vm.showAlert = true; // show alert message stating Base selections must be made return; } // We are ready to compute scores // clear all model values, hide the alert message clearScores(); vm.showAlert = false; var vectorStr; // compute Base scores var result = BaseCalcService.calculateScores(); var debugStr = result.debugStr; // set model vars from calculation results vm.impactScore = toFixed1(result.impactScore); vm.exploitScore = toFixed1(result.exploitScore); vm.baseScore = toFixed1(result.baseScore); vectorStr = vm.baseSelect.getVector(); vm.overallScore = vm.baseScore; // overall will be Base score for now // compute Temporal scores, IF selections have been made result.temporalProduct = 1; // default if there are no Temporal selections, used by Environ calculation if(vm.temporalSelect.hasSelections()) { result = TemporalCalcService.calculateScores(vm.baseScore); vm.temporalScore = toFixed1(result.temporalScore); vectorStr += vm.temporalSelect.getVector(); debugStr += result.debugStr; vm.overallScore = vm.temporalScore; // temporal will be overall } // compute Environ scores, IF selections have been made if(vm.environSelect.hasSelections()) { result = EnvironCalcService.calculateScores(result.temporalProduct); vm.environScore = toFixed1(result.environScore); vm.modImpactScore = toFixed1(result.mimpactScore); vectorStr += vm.environSelect.getVector(); debugStr += result.debugStr; vm.overallScore = vm.environScore; // environ will be overall } // set the CVSS vector string, will be displayed as a link var href = '/vuln-metrics/cvss/v3-calculator?vector=' + vectorStr; var link = '<a href="' + href + '" target="_blank">' + vectorStr + '</a>'; vm.cvssString = $sce.trustAsHtml(link); updateBarCharts(); // update the page charts }; /** * updateBarCharts() is intended to invoke legacy js code that mostly deals with the bar * chart plotting and some hidden html fields. That functionality was packaged into * an angular service called PlotService */ function updateBarCharts() { // if some sections do not have computed scores (no selections made), then instead of the default // string value, we need to pass empty string '' to the plotservice var temporalTmp = vm.temporalScore; if(temporalTmp == 'NA') temporalTmp = ''; var modImpactTmp = vm.modImpactScore; if(modImpactTmp == 'NA') modImpactTmp = ''; var environTmp = vm.environScore; if(environTmp == 'NA') environTmp = ''; vm.callService = PlotService.plotMethods.displayScoresAngular(vm.baseScore, vm.impactScore, vm.exploitScore, temporalTmp, environTmp, modImpactTmp, vm.overallScore); } /** * Clears the cvss selection data, model scores and charts on the page */ vm.clearAll = function() { // clear user selections vm.baseSelect.clearSelect(); vm.temporalSelect.clearSelect(); vm.environSelect.clearSelect(); vm.showAlert = false; clearScores(); // clear computed scores, the Model variables // reset the bar plots, using the default NA string value does not work // previously we passed 0's but this was causing a 0.0 to appear on the x-axis near the // title so I modified to empty strings vm.callService = PlotService.plotMethods.displayScoresAngular('', '', '', '', '', '', ''); }; /** * Get the tooltip from the ToolTipFactory based on a type string. * @param type * @returns {*} */ vm.getTip = function(type) { var tooltip = TooltipFactory.tooltipData[type]; return tooltip; } // Clears the model variables used on the page function clearScores() { vm.impactScore = 'NA'; vm.exploitScore = 'NA'; vm.baseScore = 'NA'; vm.temporalScore = 'NA'; vm.environScore = 'NA'; vm.modImpactScore = 'NA'; vm.overallScore = 'NA'; vm.cvssString = $sce.trustAsHtml('NA'); } /** * Only used during Testing to test the calculations. This function can be activated via * some button click on a page using ng-click */ vm.testScores = function() { vm.testData = BaseCalcService.testCombs(); }; /** * Utilizes a filter to
setScore
identifier_name
cvssV3.controller.js
vm.temporalData = TemporalDataFactory.temporalData; vm.temporalSelect = TemporalDataFactory.temporalSelect; vm.environData = EnvironDataFactory.environData; vm.environSelect = EnvironDataFactory.environSelect; // Scope functions vm.setScore = setScore; vm.changeBase = changeBase; vm.hiddenCheck = hiddenCheck; vm.initPage = initPage; // used to hide TEST widgets from public view, NORMALLY leave this as false !!!! // see Readme file vm.testing = false; vm.testData = ''; // will only be available in test mode // Define Controller functions function initPage () { // Applying styles to all buttons var formButtons = $('div#cvss-calculator-form input[type="button"]'); angular.forEach(formButtons, function(button) { button.className = 'btn btn-default'; }); // !!!!!!!!!!!!!!!!!!!!!!!! // !!!!!!!! CALL function to initialize plots and check for existence of // !!!!!!!! hidden fields which WILL affect the page display. // Note: we only want the PlotService initialized once because it is // used by both versions. PlotService.plotMethods.init('V3'); // initialize plots and tool tips hiddenCheck(); // Complete vm.initComplete = true; } /*** * Called each time a button is clicked, changes styles of other buttons and calls "setScore()" * @param evnt - $event - Always * @param selectName - name of model, either base, environ, or temporal * @param model - name of model being changed on selectName, like ac, an, etc. * @param choice - value of available item chosen by the user */ function changeBase (evnt, selectName, model, choice) { if (!selectName || !evnt || !model || !choice) { $log.error('changeBase was called with null items'); $log.debug('data: (evnt, selectName, model, choice)', evnt, selectName, model, choice); return; } // checking available data var itemSelect = selectName + 'Select', itemData = selectName + 'Data'; if (selectName !== 'base' && selectName !== 'environ' && selectName !== 'temporal') { $log.error('changeBase was called with bad selectName'); $log.debug('data: (selectName)', selectName); return; } // clearing the styles of the other buttons in that collection var parent = evnt.target.parentElement; var childButtons = $(parent).children('input[type="button"]'); angular.forEach(childButtons, function(childButton) { childButton.className = 'btn btn-default'; }); // applying choice to model vm[itemSelect][model] = vm[itemData][choice].title; // updating button class evnt.target.className = 'btn btn-primary active'; setScore(); } /** * This function is called on EVERY button click to determine if scores need to be * computed. ALL base metric buttons must be selected before any computations will * occur. This is like the 'main' function, kinda. */ function setScore() { console.log('setScore()'); // do we have all the Base selections? if (!(vm.baseSelect.isReady())) { vm.showAlert = true; // show alert message stating Base selections must be made return; } // We are ready to compute scores // clear all model values, hide the alert message clearScores(); vm.showAlert = false; var vectorStr; // compute Base scores var result = BaseCalcService.calculateScores(); var debugStr = result.debugStr; // set model vars from calculation results vm.impactScore = toFixed1(result.impactScore); vm.exploitScore = toFixed1(result.exploitScore); vm.baseScore = toFixed1(result.baseScore); vectorStr = vm.baseSelect.getVector(); vm.overallScore = vm.baseScore; // overall will be Base score for now // compute Temporal scores, IF selections have been made result.temporalProduct = 1; // default if there are no Temporal selections, used by Environ calculation if(vm.temporalSelect.hasSelections()) { result = TemporalCalcService.calculateScores(vm.baseScore); vm.temporalScore = toFixed1(result.temporalScore); vectorStr += vm.temporalSelect.getVector(); debugStr += result.debugStr; vm.overallScore = vm.temporalScore; // temporal will be overall } // compute Environ scores, IF selections have been made if(vm.environSelect.hasSelections()) { result = EnvironCalcService.calculateScores(result.temporalProduct); vm.environScore = toFixed1(result.environScore); vm.modImpactScore = toFixed1(result.mimpactScore); vectorStr += vm.environSelect.getVector(); debugStr += result.debugStr; vm.overallScore = vm.environScore; // environ will be overall } // set the CVSS vector string, will be displayed as a link var href = '/vuln-metrics/cvss/v3-calculator?vector=' + vectorStr; var link = '<a href="' + href + '" target="_blank">' + vectorStr + '</a>'; vm.cvssString = $sce.trustAsHtml(link); updateBarCharts(); // update the page charts }; /** * updateBarCharts() is intended to invoke legacy js code that mostly deals with the bar * chart plotting and some hidden html fields. That functionality was packaged into * an angular service called PlotService */ function updateBarCharts() { // if some sections do not have computed scores (no selections made), then instead of the default // string value, we need to pass empty string '' to the plotservice var temporalTmp = vm.temporalScore; if(temporalTmp == 'NA') temporalTmp = ''; var modImpactTmp = vm.modImpactScore; if(modImpactTmp == 'NA') modImpactTmp = ''; var environTmp = vm.environScore; if(environTmp == 'NA') environTmp = ''; vm.callService = PlotService.plotMethods.displayScoresAngular(vm.baseScore, vm.impactScore, vm.exploitScore, temporalTmp, environTmp, modImpactTmp, vm.overallScore); } /** * Clears the cvss selection data, model scores and charts on the page */ vm.clearAll = function() { // clear user selections vm.baseSelect.clearSelect(); vm.temporalSelect.clearSelect(); vm.environSelect.clearSelect(); vm.showAlert = false; clearScores(); // clear computed scores, the Model variables // reset the bar plots, using the default NA string value does not work // previously we passed 0's but this was causing a 0.0 to appear on the x-axis near the // title so I modified to empty strings vm.callService = PlotService.plotMethods.displayScoresAngular('', '', '', '', '', '', ''); }; /** * Get the tooltip from the ToolTipFactory based on a type string. * @param type * @returns {*} */ vm.getTip = function(type) { var tooltip = TooltipFactory.tooltipData[type]; return tooltip; } // Clears the model variables used on the page function clearScores() { vm.impactScore = 'NA'; vm.exploitScore = 'NA'; vm.baseScore = 'NA'; vm.temporalScore = 'NA'; vm.environScore = 'NA'; vm.modImpactScore = 'NA'; vm.overallScore = 'NA'; vm.cvssString = $sce.trustAsHtml('NA'); } /** * Only used during Testing to test the calculations. This function can be activated via * some button click on a page using ng-click */ vm.testScores = function() { vm.testData = BaseCalcService.testCombs(); }; /** * Utilizes a filter to display a number with 1 digit after the decimal * point. Thus whole numbers like 5, will be displayed as 5.0 * @param number * @returns {number} */ function toFixed1(number) { return $filter('number')(number, 1); } /** * HIDDEN FIELD handling ========================================================= * Check for the existence of the hidden fields containing data. * One hidden field is a vector string and the other is a Vuln Id (CveId). If these * are present the page needs to be setup to perform computations and display results * based on these hidden field value. */ function hiddenCheck() { if (PlotService.hiddenVector.length > 0) { var vectorStr = PlotService.hiddenVector; // set calculator selections based on the vector string BaseDataFactory.setValues(vectorStr);
TemporalDataFactory.setValues(vectorStr); EnvironDataFactory.setValues(vectorStr); vm.setScore(); // call 'main' function to compute scores and display console.log('baseSelect', vm.baseSelect);
random_line_split
svg.go
func New(sp *svg.SVG, width, height int, font string, fontsize int, background color.RGBA) *SvgGraphics { if font == "" { font = "Helvetica" } if fontsize == 0 { fontsize = 12 } s := SvgGraphics{svg: sp, w: width, h: height, font: font, fs: fontsize, bg: background} return &s } // AddTo returns a new ImageGraphics which will write to (width x height) sized // area starting at (x,y) on the provided SVG func AddTo(sp *svg.SVG, x, y, width, height int, font string, fontsize int, background color.RGBA) *SvgGraphics { s := New(sp, width, height, font, fontsize, background) s.tx, s.ty = x, y return s } func (sg *SvgGraphics) Options() chart.PlotOptions { return nil } func (sg *SvgGraphics) Begin() { font, fs := sg.font, sg.fs if font == "" { font = "Helvetica" } if fs == 0 { fs = 12 } sg.svg.Gstyle(fmt.Sprintf("font-family: %s; font-size: %d", font, fs)) if sg.tx != 0 || sg.ty != 0 { sg.svg.Gtransform(fmt.Sprintf("translate(%d %d)", sg.tx, sg.ty)) } bgc := fmt.Sprintf("#%02x%02x%02x", sg.bg.R, sg.bg.G, sg.bg.B) opa := fmt.Sprintf("%.4f", float64(sg.bg.A)/255) bgs := fmt.Sprintf("stroke: %s; opacity: %s; fill: %s; fill-opacity: %s", bgc, opa, bgc, opa) sg.svg.Rect(0, 0, sg.w, sg.h, bgs) } func (sg *SvgGraphics) End() { sg.svg.Gend() if sg.tx != 0 || sg.ty != 0 { sg.svg.Gend() } } func (sg *SvgGraphics) Background() (r, g, b, a uint8) { return sg.bg.R, sg.bg.G, sg.bg.B, sg.bg.A } func (sg *SvgGraphics) Dimensions() (int, int) { return sg.w, sg.h } func (sg *SvgGraphics) fontheight(font chart.Font) (fh int) { if sg.fs <= 14 { fh = sg.fs + int(font.Size) } else if sg.fs <= 20 { fh = sg.fs + 2*int(font.Size) } else { fh = sg.fs + 3*int(font.Size) } if fh == 0 { fh = 12 } return } func (sg *SvgGraphics) FontMetrics(font chart.Font) (fw float32, fh int, mono bool) { if font.Name == "" { font.Name = sg.font } fh = sg.fontheight(font) switch font.Name { case "Arial": fw, mono = 0.5*float32(fh), false case "Helvetica": fw, mono = 0.5*float32(fh), false case "Times": fw, mono = 0.51*float32(fh), false case "Courier": fw, mono = 0.62*float32(fh), true default: fw, mono = 0.75*float32(fh), false } // fmt.Printf("FontMetric of %s/%d: %.1f x %d %t\n", style.Font, style.FontSize, fw, fh, mono) return } func (sg *SvgGraphics) TextLen(t string, font chart.Font) int { return chart.GenericTextLen(sg, t, font) } var dashlength [][]int = [][]int{[]int{}, []int{4, 1}, []int{1, 1}, []int{4, 1, 1, 1, 1, 1}, []int{4, 4}, []int{1, 3}} func (sg *SvgGraphics) Line(x0, y0, x1, y1 int, style chart.Style) { s := linestyle(style) sg.svg.Line(x0, y0, x1, y1, s) } func (sg *SvgGraphics) Text(x, y int, t string, align string, rot int, f chart.Font) { if len(align) == 1 { align = "c" + align } _, fh, _ := sg.FontMetrics(f) trans := "" if rot != 0 { trans = fmt.Sprintf("transform=\"rotate(%d %d %d)\"", -rot, x, y) } // Hack because baseline alignments in svg often broken switch align[0] { case 'b': y += 0 case 't': y += fh default: y += (4 * fh) / 10 // centered } s := "text-anchor:" switch align[1] { case 'l': s += "begin" case 'r': s += "end" default: s += "middle" } if f.Color != nil { s += "; fill:" + hexcol(f.Color) } if f.Name != "" { s += "; font-family:" + f.Name } if f.Size != 0 { s += fmt.Sprintf("; font-size: %d", fh) } sg.svg.Text(x, y, t, trans, s) } func (sg *SvgGraphics) Symbol(x, y int, style chart.Style) { st := "" filled := "fill:solid" empty := "fill:none" if style.SymbolColor != nil { st += "stroke:" + hexcol(style.SymbolColor) filled = "fill:" + hexcol(style.SymbolColor) } f := style.SymbolSize if f == 0 { f = 1 } lw := 1 if style.LineWidth > 1 { lw = style.LineWidth } const n = 5 // default size a := int(n*f + 0.5) // standard b := int(n/2*f + 0.5) // smaller c := int(1.155*n*f + 0.5) // triangel long sist d := int(0.577*n*f + 0.5) // triangle short dist e := int(0.866*n*f + 0.5) // diagonal sg.svg.Gstyle(fmt.Sprintf("%s; stroke-width: %d", st, lw)) switch style.Symbol { case '*': sg.svg.Line(x-e, y-e, x+e, y+e) sg.svg.Line(x-e, y+e, x+e, y-e) fallthrough case '+': sg.svg.Line(x-a, y, x+a, y) sg.svg.Line(x, y-a, x, y+a) case 'X': sg.svg.Line(x-e, y-e, x+e, y+e) sg.svg.Line(x-e, y+e, x+e, y-e) case 'o': sg.svg.Circle(x, y, a, empty) case '0': sg.svg.Circle(x, y, a, empty) sg.svg.Circle(x, y, b, empty) case '.': if b >= 4 { b /= 2 } sg.svg.Circle(x, y, b, empty) case '@': sg.svg.Circle(x, y, a, filled) case '=': sg.svg.Rect(x-e, y-e, 2*e, 2*e, empty) case '#': sg.svg.Rect(x-e, y-e, 2*e, 2*e, filled) case 'A': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y + d, y + d, y - c}, filled) case '%': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y + d, y + d, y - c}, empty) case 'W': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y - c, y - c, y + d}, filled) case 'V': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y - c, y - c, y + d}, empty) case 'Z': sg.svg.Polygon([]int{x - e, x, x + e, x}, []int{y, y + e, y, y - e}, filled) case '&': sg.svg.Polygon([]int{x - e, x, x + e, x}, []int{y, y + e, y, y - e}, empty) default: sg.svg.Text(x, y, "?", "text-anchor:middle; alignment-baseline:middle") } sg.svg.Gend() } func (sg *SvgGraphics) Rect(x, y, w, h int,
random_line_split
svg.go
.Font) (fw float32, fh int, mono bool) { if font.Name == "" { font.Name = sg.font } fh = sg.fontheight(font) switch font.Name { case "Arial": fw, mono = 0.5*float32(fh), false case "Helvetica": fw, mono = 0.5*float32(fh), false case "Times": fw, mono = 0.51*float32(fh), false case "Courier": fw, mono = 0.62*float32(fh), true default: fw, mono = 0.75*float32(fh), false } // fmt.Printf("FontMetric of %s/%d: %.1f x %d %t\n", style.Font, style.FontSize, fw, fh, mono) return } func (sg *SvgGraphics) TextLen(t string, font chart.Font) int { return chart.GenericTextLen(sg, t, font) } var dashlength [][]int = [][]int{[]int{}, []int{4, 1}, []int{1, 1}, []int{4, 1, 1, 1, 1, 1}, []int{4, 4}, []int{1, 3}} func (sg *SvgGraphics) Line(x0, y0, x1, y1 int, style chart.Style) { s := linestyle(style) sg.svg.Line(x0, y0, x1, y1, s) } func (sg *SvgGraphics) Text(x, y int, t string, align string, rot int, f chart.Font) { if len(align) == 1 { align = "c" + align } _, fh, _ := sg.FontMetrics(f) trans := "" if rot != 0 { trans = fmt.Sprintf("transform=\"rotate(%d %d %d)\"", -rot, x, y) } // Hack because baseline alignments in svg often broken switch align[0] { case 'b': y += 0 case 't': y += fh default: y += (4 * fh) / 10 // centered } s := "text-anchor:" switch align[1] { case 'l': s += "begin" case 'r': s += "end" default: s += "middle" } if f.Color != nil { s += "; fill:" + hexcol(f.Color) } if f.Name != "" { s += "; font-family:" + f.Name } if f.Size != 0 { s += fmt.Sprintf("; font-size: %d", fh) } sg.svg.Text(x, y, t, trans, s) } func (sg *SvgGraphics) Symbol(x, y int, style chart.Style) { st := "" filled := "fill:solid" empty := "fill:none" if style.SymbolColor != nil { st += "stroke:" + hexcol(style.SymbolColor) filled = "fill:" + hexcol(style.SymbolColor) } f := style.SymbolSize if f == 0 { f = 1 } lw := 1 if style.LineWidth > 1
const n = 5 // default size a := int(n*f + 0.5) // standard b := int(n/2*f + 0.5) // smaller c := int(1.155*n*f + 0.5) // triangel long sist d := int(0.577*n*f + 0.5) // triangle short dist e := int(0.866*n*f + 0.5) // diagonal sg.svg.Gstyle(fmt.Sprintf("%s; stroke-width: %d", st, lw)) switch style.Symbol { case '*': sg.svg.Line(x-e, y-e, x+e, y+e) sg.svg.Line(x-e, y+e, x+e, y-e) fallthrough case '+': sg.svg.Line(x-a, y, x+a, y) sg.svg.Line(x, y-a, x, y+a) case 'X': sg.svg.Line(x-e, y-e, x+e, y+e) sg.svg.Line(x-e, y+e, x+e, y-e) case 'o': sg.svg.Circle(x, y, a, empty) case '0': sg.svg.Circle(x, y, a, empty) sg.svg.Circle(x, y, b, empty) case '.': if b >= 4 { b /= 2 } sg.svg.Circle(x, y, b, empty) case '@': sg.svg.Circle(x, y, a, filled) case '=': sg.svg.Rect(x-e, y-e, 2*e, 2*e, empty) case '#': sg.svg.Rect(x-e, y-e, 2*e, 2*e, filled) case 'A': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y + d, y + d, y - c}, filled) case '%': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y + d, y + d, y - c}, empty) case 'W': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y - c, y - c, y + d}, filled) case 'V': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y - c, y - c, y + d}, empty) case 'Z': sg.svg.Polygon([]int{x - e, x, x + e, x}, []int{y, y + e, y, y - e}, filled) case '&': sg.svg.Polygon([]int{x - e, x, x + e, x}, []int{y, y + e, y, y - e}, empty) default: sg.svg.Text(x, y, "?", "text-anchor:middle; alignment-baseline:middle") } sg.svg.Gend() } func (sg *SvgGraphics) Rect(x, y, w, h int, style chart.Style) { var s string x, y, w, h = chart.SanitizeRect(x, y, w, h, style.LineWidth) linecol := style.LineColor if linecol != nil { s = fmt.Sprintf("stroke:%s; ", hexcol(linecol)) s += fmt.Sprintf("stroke-opacity: %s; ", alpha(linecol)) } else { s = "stroke:#808080; " } s += fmt.Sprintf("stroke-width: %d; ", style.LineWidth) if style.FillColor != nil { s += fmt.Sprintf("fill: %s; fill-opacity: %s", hexcol(style.FillColor), alpha(style.FillColor)) } else { s += "fill-opacity: 0" } sg.svg.Rect(x, y, w, h, s) // GenericRect(sg, x, y, w, h, style) // TODO } func (sg *SvgGraphics) Path(x, y []int, style chart.Style) { n := len(x) if len(y) < n { n = len(y) } path := fmt.Sprintf("M %d,%d", x[0], y[0]) for i := 1; i < n; i++ { path += fmt.Sprintf("L %d,%d", x[i], y[i]) } st := linestyle(style) sg.svg.Path(path, st) } func (sg *SvgGraphics) Wedge(x, y, ro, ri int, phi, psi float64, style chart.Style) { panic("No Wedge() for SvgGraphics.") } func (sg *SvgGraphics) XAxis(xr chart.Range, ys, yms int, options chart.PlotOptions) { chart.GenericXAxis(sg, xr, ys, yms, options) } func (sg *SvgGraphics) YAxis(yr chart.Range, xs, xms int, options chart.PlotOptions) { chart.GenericYAxis(sg, yr, xs, xms, options) } func linestyle(style chart.Style) (s string) { lw := style.LineWidth if style.LineColor != nil { s = fmt.Sprintf("stroke:%s; ", hexcol(style.LineColor)) } s += fmt.Sprintf("stroke-width: %d; fill:none; ", lw) s += fmt.Sprintf("opacity: %s; ", alpha(style.LineColor)) if style.LineStyle != chart.SolidLine { s += fmt.Sprintf("stroke-dasharray:") for _, d := range dashlength[style.LineStyle] { s += fmt.Sprintf(" %d", d*lw) } } return } func (sg *SvgGraphics) Scatter(points []chart.EPoint, plotstyle chart.PlotStyle, style chart.Style) { chart.GenericScatter(sg, points, plotstyle, style) /*********************************************** // First pass: Error bars ebs := style ebs.LineColor, ebs.LineWidth, ebs.Line
{ lw = style.LineWidth }
conditional_block
svg.go
.Font) (fw float32, fh int, mono bool) { if font.Name == "" { font.Name = sg.font } fh = sg.fontheight(font) switch font.Name { case "Arial": fw, mono = 0.5*float32(fh), false case "Helvetica": fw, mono = 0.5*float32(fh), false case "Times": fw, mono = 0.51*float32(fh), false case "Courier": fw, mono = 0.62*float32(fh), true default: fw, mono = 0.75*float32(fh), false } // fmt.Printf("FontMetric of %s/%d: %.1f x %d %t\n", style.Font, style.FontSize, fw, fh, mono) return } func (sg *SvgGraphics) TextLen(t string, font chart.Font) int { return chart.GenericTextLen(sg, t, font) } var dashlength [][]int = [][]int{[]int{}, []int{4, 1}, []int{1, 1}, []int{4, 1, 1, 1, 1, 1}, []int{4, 4}, []int{1, 3}} func (sg *SvgGraphics) Line(x0, y0, x1, y1 int, style chart.Style) { s := linestyle(style) sg.svg.Line(x0, y0, x1, y1, s) } func (sg *SvgGraphics)
(x, y int, t string, align string, rot int, f chart.Font) { if len(align) == 1 { align = "c" + align } _, fh, _ := sg.FontMetrics(f) trans := "" if rot != 0 { trans = fmt.Sprintf("transform=\"rotate(%d %d %d)\"", -rot, x, y) } // Hack because baseline alignments in svg often broken switch align[0] { case 'b': y += 0 case 't': y += fh default: y += (4 * fh) / 10 // centered } s := "text-anchor:" switch align[1] { case 'l': s += "begin" case 'r': s += "end" default: s += "middle" } if f.Color != nil { s += "; fill:" + hexcol(f.Color) } if f.Name != "" { s += "; font-family:" + f.Name } if f.Size != 0 { s += fmt.Sprintf("; font-size: %d", fh) } sg.svg.Text(x, y, t, trans, s) } func (sg *SvgGraphics) Symbol(x, y int, style chart.Style) { st := "" filled := "fill:solid" empty := "fill:none" if style.SymbolColor != nil { st += "stroke:" + hexcol(style.SymbolColor) filled = "fill:" + hexcol(style.SymbolColor) } f := style.SymbolSize if f == 0 { f = 1 } lw := 1 if style.LineWidth > 1 { lw = style.LineWidth } const n = 5 // default size a := int(n*f + 0.5) // standard b := int(n/2*f + 0.5) // smaller c := int(1.155*n*f + 0.5) // triangel long sist d := int(0.577*n*f + 0.5) // triangle short dist e := int(0.866*n*f + 0.5) // diagonal sg.svg.Gstyle(fmt.Sprintf("%s; stroke-width: %d", st, lw)) switch style.Symbol { case '*': sg.svg.Line(x-e, y-e, x+e, y+e) sg.svg.Line(x-e, y+e, x+e, y-e) fallthrough case '+': sg.svg.Line(x-a, y, x+a, y) sg.svg.Line(x, y-a, x, y+a) case 'X': sg.svg.Line(x-e, y-e, x+e, y+e) sg.svg.Line(x-e, y+e, x+e, y-e) case 'o': sg.svg.Circle(x, y, a, empty) case '0': sg.svg.Circle(x, y, a, empty) sg.svg.Circle(x, y, b, empty) case '.': if b >= 4 { b /= 2 } sg.svg.Circle(x, y, b, empty) case '@': sg.svg.Circle(x, y, a, filled) case '=': sg.svg.Rect(x-e, y-e, 2*e, 2*e, empty) case '#': sg.svg.Rect(x-e, y-e, 2*e, 2*e, filled) case 'A': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y + d, y + d, y - c}, filled) case '%': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y + d, y + d, y - c}, empty) case 'W': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y - c, y - c, y + d}, filled) case 'V': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y - c, y - c, y + d}, empty) case 'Z': sg.svg.Polygon([]int{x - e, x, x + e, x}, []int{y, y + e, y, y - e}, filled) case '&': sg.svg.Polygon([]int{x - e, x, x + e, x}, []int{y, y + e, y, y - e}, empty) default: sg.svg.Text(x, y, "?", "text-anchor:middle; alignment-baseline:middle") } sg.svg.Gend() } func (sg *SvgGraphics) Rect(x, y, w, h int, style chart.Style) { var s string x, y, w, h = chart.SanitizeRect(x, y, w, h, style.LineWidth) linecol := style.LineColor if linecol != nil { s = fmt.Sprintf("stroke:%s; ", hexcol(linecol)) s += fmt.Sprintf("stroke-opacity: %s; ", alpha(linecol)) } else { s = "stroke:#808080; " } s += fmt.Sprintf("stroke-width: %d; ", style.LineWidth) if style.FillColor != nil { s += fmt.Sprintf("fill: %s; fill-opacity: %s", hexcol(style.FillColor), alpha(style.FillColor)) } else { s += "fill-opacity: 0" } sg.svg.Rect(x, y, w, h, s) // GenericRect(sg, x, y, w, h, style) // TODO } func (sg *SvgGraphics) Path(x, y []int, style chart.Style) { n := len(x) if len(y) < n { n = len(y) } path := fmt.Sprintf("M %d,%d", x[0], y[0]) for i := 1; i < n; i++ { path += fmt.Sprintf("L %d,%d", x[i], y[i]) } st := linestyle(style) sg.svg.Path(path, st) } func (sg *SvgGraphics) Wedge(x, y, ro, ri int, phi, psi float64, style chart.Style) { panic("No Wedge() for SvgGraphics.") } func (sg *SvgGraphics) XAxis(xr chart.Range, ys, yms int, options chart.PlotOptions) { chart.GenericXAxis(sg, xr, ys, yms, options) } func (sg *SvgGraphics) YAxis(yr chart.Range, xs, xms int, options chart.PlotOptions) { chart.GenericYAxis(sg, yr, xs, xms, options) } func linestyle(style chart.Style) (s string) { lw := style.LineWidth if style.LineColor != nil { s = fmt.Sprintf("stroke:%s; ", hexcol(style.LineColor)) } s += fmt.Sprintf("stroke-width: %d; fill:none; ", lw) s += fmt.Sprintf("opacity: %s; ", alpha(style.LineColor)) if style.LineStyle != chart.SolidLine { s += fmt.Sprintf("stroke-dasharray:") for _, d := range dashlength[style.LineStyle] { s += fmt.Sprintf(" %d", d*lw) } } return } func (sg *SvgGraphics) Scatter(points []chart.EPoint, plotstyle chart.PlotStyle, style chart.Style) { chart.GenericScatter(sg, points, plotstyle, style) /*********************************************** // First pass: Error bars ebs := style ebs.LineColor, ebs.LineWidth, ebs.LineStyle
Text
identifier_name
svg.go
.Font) (fw float32, fh int, mono bool) { if font.Name == "" { font.Name = sg.font } fh = sg.fontheight(font) switch font.Name { case "Arial": fw, mono = 0.5*float32(fh), false case "Helvetica": fw, mono = 0.5*float32(fh), false case "Times": fw, mono = 0.51*float32(fh), false case "Courier": fw, mono = 0.62*float32(fh), true default: fw, mono = 0.75*float32(fh), false } // fmt.Printf("FontMetric of %s/%d: %.1f x %d %t\n", style.Font, style.FontSize, fw, fh, mono) return } func (sg *SvgGraphics) TextLen(t string, font chart.Font) int { return chart.GenericTextLen(sg, t, font) } var dashlength [][]int = [][]int{[]int{}, []int{4, 1}, []int{1, 1}, []int{4, 1, 1, 1, 1, 1}, []int{4, 4}, []int{1, 3}} func (sg *SvgGraphics) Line(x0, y0, x1, y1 int, style chart.Style) { s := linestyle(style) sg.svg.Line(x0, y0, x1, y1, s) } func (sg *SvgGraphics) Text(x, y int, t string, align string, rot int, f chart.Font) { if len(align) == 1 { align = "c" + align } _, fh, _ := sg.FontMetrics(f) trans := "" if rot != 0 { trans = fmt.Sprintf("transform=\"rotate(%d %d %d)\"", -rot, x, y) } // Hack because baseline alignments in svg often broken switch align[0] { case 'b': y += 0 case 't': y += fh default: y += (4 * fh) / 10 // centered } s := "text-anchor:" switch align[1] { case 'l': s += "begin" case 'r': s += "end" default: s += "middle" } if f.Color != nil { s += "; fill:" + hexcol(f.Color) } if f.Name != "" { s += "; font-family:" + f.Name } if f.Size != 0 { s += fmt.Sprintf("; font-size: %d", fh) } sg.svg.Text(x, y, t, trans, s) } func (sg *SvgGraphics) Symbol(x, y int, style chart.Style) { st := "" filled := "fill:solid" empty := "fill:none" if style.SymbolColor != nil { st += "stroke:" + hexcol(style.SymbolColor) filled = "fill:" + hexcol(style.SymbolColor) } f := style.SymbolSize if f == 0 { f = 1 } lw := 1 if style.LineWidth > 1 { lw = style.LineWidth } const n = 5 // default size a := int(n*f + 0.5) // standard b := int(n/2*f + 0.5) // smaller c := int(1.155*n*f + 0.5) // triangel long sist d := int(0.577*n*f + 0.5) // triangle short dist e := int(0.866*n*f + 0.5) // diagonal sg.svg.Gstyle(fmt.Sprintf("%s; stroke-width: %d", st, lw)) switch style.Symbol { case '*': sg.svg.Line(x-e, y-e, x+e, y+e) sg.svg.Line(x-e, y+e, x+e, y-e) fallthrough case '+': sg.svg.Line(x-a, y, x+a, y) sg.svg.Line(x, y-a, x, y+a) case 'X': sg.svg.Line(x-e, y-e, x+e, y+e) sg.svg.Line(x-e, y+e, x+e, y-e) case 'o': sg.svg.Circle(x, y, a, empty) case '0': sg.svg.Circle(x, y, a, empty) sg.svg.Circle(x, y, b, empty) case '.': if b >= 4 { b /= 2 } sg.svg.Circle(x, y, b, empty) case '@': sg.svg.Circle(x, y, a, filled) case '=': sg.svg.Rect(x-e, y-e, 2*e, 2*e, empty) case '#': sg.svg.Rect(x-e, y-e, 2*e, 2*e, filled) case 'A': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y + d, y + d, y - c}, filled) case '%': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y + d, y + d, y - c}, empty) case 'W': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y - c, y - c, y + d}, filled) case 'V': sg.svg.Polygon([]int{x - a, x + a, x}, []int{y - c, y - c, y + d}, empty) case 'Z': sg.svg.Polygon([]int{x - e, x, x + e, x}, []int{y, y + e, y, y - e}, filled) case '&': sg.svg.Polygon([]int{x - e, x, x + e, x}, []int{y, y + e, y, y - e}, empty) default: sg.svg.Text(x, y, "?", "text-anchor:middle; alignment-baseline:middle") } sg.svg.Gend() } func (sg *SvgGraphics) Rect(x, y, w, h int, style chart.Style) { var s string x, y, w, h = chart.SanitizeRect(x, y, w, h, style.LineWidth) linecol := style.LineColor if linecol != nil { s = fmt.Sprintf("stroke:%s; ", hexcol(linecol)) s += fmt.Sprintf("stroke-opacity: %s; ", alpha(linecol)) } else { s = "stroke:#808080; " } s += fmt.Sprintf("stroke-width: %d; ", style.LineWidth) if style.FillColor != nil { s += fmt.Sprintf("fill: %s; fill-opacity: %s", hexcol(style.FillColor), alpha(style.FillColor)) } else { s += "fill-opacity: 0" } sg.svg.Rect(x, y, w, h, s) // GenericRect(sg, x, y, w, h, style) // TODO } func (sg *SvgGraphics) Path(x, y []int, style chart.Style) { n := len(x) if len(y) < n { n = len(y) } path := fmt.Sprintf("M %d,%d", x[0], y[0]) for i := 1; i < n; i++ { path += fmt.Sprintf("L %d,%d", x[i], y[i]) } st := linestyle(style) sg.svg.Path(path, st) } func (sg *SvgGraphics) Wedge(x, y, ro, ri int, phi, psi float64, style chart.Style) { panic("No Wedge() for SvgGraphics.") } func (sg *SvgGraphics) XAxis(xr chart.Range, ys, yms int, options chart.PlotOptions) { chart.GenericXAxis(sg, xr, ys, yms, options) } func (sg *SvgGraphics) YAxis(yr chart.Range, xs, xms int, options chart.PlotOptions) { chart.GenericYAxis(sg, yr, xs, xms, options) } func linestyle(style chart.Style) (s string)
func (sg *SvgGraphics) Scatter(points []chart.EPoint, plotstyle chart.PlotStyle, style chart.Style) { chart.GenericScatter(sg, points, plotstyle, style) /*********************************************** // First pass: Error bars ebs := style ebs.LineColor, ebs.LineWidth, ebs.Line
{ lw := style.LineWidth if style.LineColor != nil { s = fmt.Sprintf("stroke:%s; ", hexcol(style.LineColor)) } s += fmt.Sprintf("stroke-width: %d; fill:none; ", lw) s += fmt.Sprintf("opacity: %s; ", alpha(style.LineColor)) if style.LineStyle != chart.SolidLine { s += fmt.Sprintf("stroke-dasharray:") for _, d := range dashlength[style.LineStyle] { s += fmt.Sprintf(" %d", d*lw) } } return }
identifier_body
transport_test.go
Option { return func(options *transportOptions) { options.jitter = func(n int64) int64 { return n } } } type peerExpectation struct { id string subscribers []string } func createPeerIdentifierMap(ids []string) map[string]peer.Identifier { pids := make(map[string]peer.Identifier, len(ids)) for _, id := range ids { pids[id] = &testIdentifier{id} } return pids } func
(t *testing.T) { type testStruct struct { msg string // identifiers defines all the Identifiers that will be used in // the actions up from so they can be generated and passed as deps identifiers []string // subscriberDefs defines all the Subscribers that will be used in // the actions up from so they can be generated and passed as deps subscriberDefs []SubscriberDefinition // actions are the actions that will be applied against the transport actions []TransportAction // expectedPeers are a list of peers (and those peer's subscribers) // that are expected on the transport after the actions expectedPeers []peerExpectation } tests := []testStruct{ { msg: "one retain", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1"}}, }, }, { msg: "one retain one release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s1"}, }, }, { msg: "three retains", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1", "s2", "s3"}}, }, }, { msg: "three retains one release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2r"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2r", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s2r"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1", "s3"}}, }, }, { msg: "three retains, three release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s2"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s3"}, }, }, { msg: "no retains one release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, }, actions: []TransportAction{ ReleaseAction{ InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedErrType: peer.ErrTransportHasNoReferenceToPeer{}, }, }, }, { msg: "one retains, one release (from different subscriber)", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, ReleaseAction{ InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedErrType: peer.ErrPeerHasNoReferenceToSubscriber{}, }, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1"}}, }, }, { msg: "multi peer retain/release", identifiers: []string{"i1", "i2", "i3", "i4r", "i5r"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, {ID: "s4"}, {ID: "s5rnd"}, {ID: "s6rnd"}, {ID: "s7rnd"}, }, actions: []TransportAction{ // Retains/Releases of i1 (Retain/Release the random peers at the end) RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s5rnd", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s6rnd", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s5rnd"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s6rnd"}, // Retains/Releases of i2 (Retain then Release then Retain again) RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s2", ExpectedPeerID: "i2"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s3", ExpectedPeerID: "i2"}, ReleaseAction{InputIdentifierID: "i2", InputSubscriberID: "s2"}, ReleaseAction{InputIdentifierID: "i2", InputSubscriberID: "s3"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s2", ExpectedPeerID: "i2"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s3", ExpectedPeerID: "i2"}, // Retains/Releases of i3 (Retain/Release unrelated sub, then retain two) RetainAction{InputIdentifierID: "i3", InputSubscriberID: "s7rnd", ExpectedPeerID: "i3"}, ReleaseAction{InputIdentifierID: "i3", InputSubscriberID: "s7rnd"}, RetainAction{InputIdentifierID: "i3", InputSubscriberID: "s3", ExpectedPeerID: "i3"}, RetainAction{InputIdentifierID
TestTransport
identifier_name
transport_test.go
string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s1"}, }, }, { msg: "three retains", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1", "s2", "s3"}}, }, }, { msg: "three retains one release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2r"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2r", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s2r"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1", "s3"}}, }, }, { msg: "three retains, three release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s2"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s3"}, }, }, { msg: "no retains one release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, }, actions: []TransportAction{ ReleaseAction{ InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedErrType: peer.ErrTransportHasNoReferenceToPeer{}, }, }, }, { msg: "one retains, one release (from different subscriber)", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, ReleaseAction{ InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedErrType: peer.ErrPeerHasNoReferenceToSubscriber{}, }, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1"}}, }, }, { msg: "multi peer retain/release", identifiers: []string{"i1", "i2", "i3", "i4r", "i5r"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, {ID: "s4"}, {ID: "s5rnd"}, {ID: "s6rnd"}, {ID: "s7rnd"}, }, actions: []TransportAction{ // Retains/Releases of i1 (Retain/Release the random peers at the end) RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s5rnd", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s6rnd", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s5rnd"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s6rnd"}, // Retains/Releases of i2 (Retain then Release then Retain again) RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s2", ExpectedPeerID: "i2"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s3", ExpectedPeerID: "i2"}, ReleaseAction{InputIdentifierID: "i2", InputSubscriberID: "s2"}, ReleaseAction{InputIdentifierID: "i2", InputSubscriberID: "s3"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s2", ExpectedPeerID: "i2"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s3", ExpectedPeerID: "i2"}, // Retains/Releases of i3 (Retain/Release unrelated sub, then retain two) RetainAction{InputIdentifierID: "i3", InputSubscriberID: "s7rnd", ExpectedPeerID: "i3"}, ReleaseAction{InputIdentifierID: "i3", InputSubscriberID: "s7rnd"}, RetainAction{InputIdentifierID: "i3", InputSubscriberID: "s3", ExpectedPeerID: "i3"}, RetainAction{InputIdentifierID: "i3", InputSubscriberID: "s4", ExpectedPeerID: "i3"}, // Retain/Release i4r on random subscriber RetainAction{InputIdentifierID: "i4r", InputSubscriberID: "s5rnd", ExpectedPeerID: "i4r"}, ReleaseAction{InputIdentifierID: "i4r", InputSubscriberID: "s5rnd"}, // Retain/Release i5r on already used subscriber RetainAction{InputIdentifierID: "i5r", InputSubscriberID: "s3", ExpectedPeerID: "i5r"}, ReleaseAction{InputIdentifierID: "i5r", InputSubscriberID: "s3"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1", "s2"}}, {id: "i2", subscribers: []string{"s2", "s3"}}, {id: "i3", subscribers: []string{"s3", "s4"}}, }, }, } for _, tt := range tests
{ t.Run(tt.msg, func(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() transport := NewTransport() defer transport.Stop() deps := TransportDeps{ PeerIdentifiers: createPeerIdentifierMap(tt.identifiers), Subscribers: CreateSubscriberMap(mockCtrl, tt.subscriberDefs), } ApplyTransportActions(t, transport, tt.actions, deps) assert.Len(t, transport.peers, len(tt.expectedPeers)) for _, expectedPeerNode := range tt.expectedPeers { p, ok := transport.peers[expectedPeerNode.id] assert.True(t, ok) if assert.NotNil(t, p) {
conditional_block
transport_test.go
Option { return func(options *transportOptions) { options.jitter = func(n int64) int64 { return n } } } type peerExpectation struct { id string subscribers []string } func createPeerIdentifierMap(ids []string) map[string]peer.Identifier
func TestTransport(t *testing.T) { type testStruct struct { msg string // identifiers defines all the Identifiers that will be used in // the actions up from so they can be generated and passed as deps identifiers []string // subscriberDefs defines all the Subscribers that will be used in // the actions up from so they can be generated and passed as deps subscriberDefs []SubscriberDefinition // actions are the actions that will be applied against the transport actions []TransportAction // expectedPeers are a list of peers (and those peer's subscribers) // that are expected on the transport after the actions expectedPeers []peerExpectation } tests := []testStruct{ { msg: "one retain", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1"}}, }, }, { msg: "one retain one release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s1"}, }, }, { msg: "three retains", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1", "s2", "s3"}}, }, }, { msg: "three retains one release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2r"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2r", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s2r"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1", "s3"}}, }, }, { msg: "three retains, three release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s2"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s3"}, }, }, { msg: "no retains one release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, }, actions: []TransportAction{ ReleaseAction{ InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedErrType: peer.ErrTransportHasNoReferenceToPeer{}, }, }, }, { msg: "one retains, one release (from different subscriber)", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, ReleaseAction{ InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedErrType: peer.ErrPeerHasNoReferenceToSubscriber{}, }, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1"}}, }, }, { msg: "multi peer retain/release", identifiers: []string{"i1", "i2", "i3", "i4r", "i5r"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, {ID: "s4"}, {ID: "s5rnd"}, {ID: "s6rnd"}, {ID: "s7rnd"}, }, actions: []TransportAction{ // Retains/Releases of i1 (Retain/Release the random peers at the end) RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s5rnd", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s6rnd", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s5rnd"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s6rnd"}, // Retains/Releases of i2 (Retain then Release then Retain again) RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s2", ExpectedPeerID: "i2"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s3", ExpectedPeerID: "i2"}, ReleaseAction{InputIdentifierID: "i2", InputSubscriberID: "s2"}, ReleaseAction{InputIdentifierID: "i2", InputSubscriberID: "s3"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s2", ExpectedPeerID: "i2"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s3", ExpectedPeerID: "i2"}, // Retains/Releases of i3 (Retain/Release unrelated sub, then retain two) RetainAction{InputIdentifierID: "i3", InputSubscriberID: "s7rnd", ExpectedPeerID: "i3"}, ReleaseAction{InputIdentifierID: "i3", InputSubscriberID: "s7rnd"}, RetainAction{InputIdentifierID: "i3", InputSubscriberID: "s3", ExpectedPeerID: "i3"}, RetainAction{InputIdentifier
{ pids := make(map[string]peer.Identifier, len(ids)) for _, id := range ids { pids[id] = &testIdentifier{id} } return pids }
identifier_body
transport_test.go
Option { return func(options *transportOptions) { options.jitter = func(n int64) int64 { return n } } } type peerExpectation struct { id string subscribers []string } func createPeerIdentifierMap(ids []string) map[string]peer.Identifier { pids := make(map[string]peer.Identifier, len(ids)) for _, id := range ids { pids[id] = &testIdentifier{id} } return pids } func TestTransport(t *testing.T) { type testStruct struct { msg string // identifiers defines all the Identifiers that will be used in // the actions up from so they can be generated and passed as deps identifiers []string // subscriberDefs defines all the Subscribers that will be used in // the actions up from so they can be generated and passed as deps subscriberDefs []SubscriberDefinition // actions are the actions that will be applied against the transport actions []TransportAction // expectedPeers are a list of peers (and those peer's subscribers) // that are expected on the transport after the actions expectedPeers []peerExpectation } tests := []testStruct{ { msg: "one retain", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1"}}, }, }, { msg: "one retain one release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s1"}, }, }, { msg: "three retains", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1", "s2", "s3"}}, }, }, { msg: "three retains one release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2r"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2r", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s2r"}, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1", "s3"}}, }, }, { msg: "three retains, three release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s3", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s2"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s3"}, }, }, { msg: "no retains one release", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, }, actions: []TransportAction{
InputSubscriberID: "s1", ExpectedErrType: peer.ErrTransportHasNoReferenceToPeer{}, }, }, }, { msg: "one retains, one release (from different subscriber)", identifiers: []string{"i1"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, }, actions: []TransportAction{ RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, ReleaseAction{ InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedErrType: peer.ErrPeerHasNoReferenceToSubscriber{}, }, }, expectedPeers: []peerExpectation{ {id: "i1", subscribers: []string{"s1"}}, }, }, { msg: "multi peer retain/release", identifiers: []string{"i1", "i2", "i3", "i4r", "i5r"}, subscriberDefs: []SubscriberDefinition{ {ID: "s1"}, {ID: "s2"}, {ID: "s3"}, {ID: "s4"}, {ID: "s5rnd"}, {ID: "s6rnd"}, {ID: "s7rnd"}, }, actions: []TransportAction{ // Retains/Releases of i1 (Retain/Release the random peers at the end) RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s5rnd", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s6rnd", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s1", ExpectedPeerID: "i1"}, RetainAction{InputIdentifierID: "i1", InputSubscriberID: "s2", ExpectedPeerID: "i1"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s5rnd"}, ReleaseAction{InputIdentifierID: "i1", InputSubscriberID: "s6rnd"}, // Retains/Releases of i2 (Retain then Release then Retain again) RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s2", ExpectedPeerID: "i2"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s3", ExpectedPeerID: "i2"}, ReleaseAction{InputIdentifierID: "i2", InputSubscriberID: "s2"}, ReleaseAction{InputIdentifierID: "i2", InputSubscriberID: "s3"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s2", ExpectedPeerID: "i2"}, RetainAction{InputIdentifierID: "i2", InputSubscriberID: "s3", ExpectedPeerID: "i2"}, // Retains/Releases of i3 (Retain/Release unrelated sub, then retain two) RetainAction{InputIdentifierID: "i3", InputSubscriberID: "s7rnd", ExpectedPeerID: "i3"}, ReleaseAction{InputIdentifierID: "i3", InputSubscriberID: "s7rnd"}, RetainAction{InputIdentifierID: "i3", InputSubscriberID: "s3", ExpectedPeerID: "i3"}, RetainAction{InputIdentifierID:
ReleaseAction{ InputIdentifierID: "i1",
random_line_split
data_feature_engineering.py
drop_index): # labels为空直接返回无需操作 if type(labels) != str: return np.NaN # 确保不要出现连续的标签以第一个出现标签的文本为准 try: if drop_index > 1 and type(group_datas.loc[drop_index - 1, 'labels']) == str: return np.NaN except: return labels role = group_datas.loc[drop_index]['role'] # role为MEMBER且label不为空 # 则判断后续文本行的role是否还是MEMBER try: # 这里会报错的原因是drop_index是最后一个 # 加1的话则超出了group_datas的界限 if role != 'CUSTOMER': if group_datas.loc[drop_index + 1]['role'] != 'CUSTOMER': return np.NaN else: role_num = 1 for i in range(1, 3): if group_datas.loc[drop_index + i]['role'] == role: role_num += 1 # 如果标签文本对应的role连续三行都一致则返回NaN if role_num == 3: return np.NaN except: return np.NaN return labels # 将聊天框ID为一组数据进行处理 # 首先解决的是为包含关键词的 行文本 添加对应关键词标签 # 其次解决的是有标签的行文本是否值得保留标签 for drop_id in drop_ids: group_datas = drop_datas.loc[drop_datas['id'] == drop_id] group_datas['index'] = group_datas.index.values # 初选label group_datas['labels'] = group_datas.apply( lambda x: add_labels(x['text']), axis=1 ) group_datas['drop_labels'] = group_datas['labels'] # 现将group_datas.index按照长度设置之后需要将他的index变为原样 group_datas['labels'] = group_datas.apply( lambda x: drop_labels(x['drop_labels'], x['index']), axis=1 ) group_datas.drop(['drop_labels'], axis=1, inplace=True) # 其次下文三行内没有相关关键词和结束词的不要 drop_index_3 = list(group_datas.dropna(subset=['labels']).index.values) for d_index3 in drop_index_3: mk_num = 0 lk_num = 0 label = group_datas.loc[d_index3]['labels'] label_keywords = labels_keywords.loc[labels_keywords['tag_lv2'] == label]['关键词'].values try: for d_num in range(1, 4): text = str(group_datas.loc[d_index3 + d_num]['text']) if lk_num <= 1: for lk in label_keywords: if lk in text: lk_num += 1 if mk_num <= 1: for mw in mark_words: if mw in text: mk_num += 1 if (lk_num + mk_num) == 2: break if (lk_num + mk_num) != 2: group_datas.loc[group_datas.index == d_index3, 'labels'] = np.NaN except: group_datas.loc[group_datas.index == d_index3, 'labels'] = np.NaN # 将以聊天框ID为维度的数据新添加的标签合并到数据中 group_datas = group_datas[['chat_id', 'labels']] group_datas.columns = ['chat_id', 'new_labels'] group_datas.dropna(subset=['new_labels'], inplace=True) groups_datas = pd.concat([groups_datas, group_datas], sort=True) return groups_datas merge_datas = omit_mark() datas = pd.merge(datas, merge_datas, on=['chat_id'], how='left') datas.to_excel('C:\\Users\\tzl17\\Desktop\\show.xlsx') def choose_labels(labels, new_labels): if type(labels) == str: return labels if type(new_labels) == str: return new_labels return np.NaN datas['labels'] = datas.apply( lambda x: choose_labels(x['labels'], x['new_labels']), axis=1 ) datas.drop('new_labels', axis=1, inplace=True) datas_labels = datas.dropna(subset=['labels']) datas_labels['index'] = datas_labels.index.values datas_labels = datas_labels[['id', 'chat_id', 'labels', 'index', 'count_pos_word']] def link_text(label_index): mark_num = 1 text = str(datas.loc[label_index]['text']) chat_id = datas.loc[label_index]['id'] solve = str(datas.loc[label_index]['solve']) index = label_index while mark_num <= 5: # 限制长度, 最硬性的标准如果超过了则直接反回 if len(text) > 130: text = text[:126] return text + str(solve) index = index + 1 # 如果标签对应文本的下行文本不属于同一个id则直接返回None # 如果不是下一行则返回当前连接的text if datas.loc[index]['id'] != chat_id: if index == label_index + 1: return None else: return text + solve text_pro = datas.loc[index]['text'] try: text_pro2 = datas.loc[index + 1]['text'] # 判断文本中是否出现mark_words做出相应的处理 text = text + ' ' + text_pro for mark_word in mark_words: if mark_word in text_pro: # 如果文本中出现了mark_words则直接连接到后面 mark_word2_num = 0 for mark_word2 in mark_words: if mark_word2 in text_pro2: mark_word2_num = 1 break if mark_word2_num == 0: return text + solve else: break mark_num = mark_num + 1 except: return None return text + solve datas_labels['text'] = datas_labels['index'].apply( lambda x: link_text(x) ) # 由于部分文本的solve并不是对应的标签所处的列因此需要特殊处理 # solve所处的位置为text的最后三位 datas_labels.dropna(subset=['text'], inplace=True) datas_labels['solve'] = datas_labels['text'].apply( lambda x: x[-3:] ) datas_labels['text'] = datas_labels['text'].apply( lambda x: x[:-3] ) # 拆分, 分配, 连接(多标签) # 找到并将多标签文本单独拎出来 more_labels_df = datas_labels.loc[datas_labels['labels'].str.contains(','), :] # 剔除多标签文本保留单标签文本 datas_labels.loc[datas_labels['labels'].str.contains(','), 'text'] = np.nan datas_labels.dropna(subset=['text'], inplace=True) more_labels_df['labels'] = more_labels_df['labels'].apply( lambda x: x.split(',') if ',' in x else x ) df = pd.DataFrame({'text': more_labels_df.text.repeat(more_labels_df.labels.str.len()), 'count_pos_word': more_labels_df.count_pos_word.repeat(more_labels_df.labels.str.len()), 'id': more_labels_df.solve.repeat(more_labels_df.labels.str.len()), 'chat_id': more_labels_df.chat_id.repeat(more_labels_df.labels.str.len()), 'solve': more_labels_df.solve.repeat(more_labels_df.labels.str.len()), 'labels': np.concatenate(more_labels_df.labels.values)}) df_one_label = datas_labels # 标签只有一个的文本 df_more_labels = df # 标签有多个的文本 def shuffle_text(text): # 将对应多个标签的文本内容打乱顺序 text = text.split(' ') index = np.arange(len(text)) np.random.shuffle(index) text = "".join(text[i] for i in index).strip() return text df_more_labels['text'] = df_more_labels.apply( lambda x: shuffle_text(x['text']), axis=1 ) df = pd.concat([df_one_label, df_more_labels], ignore_index=True) datas_labels = pd.concat([datas_labels, df], ignore_index=True) datas_labels.drop_duplicates(subset=['text', 'labels'], keep='first', inplace=True) datas_labels.drop(columns='index', inplace=True) datas_labels = datas_labels.sample(frac=1.0) datas_labels['count_pos_word'].fillna(0, inplace=True) def end_link(text, labels, pos_count): text = text + ' ' + str(labels) + ' ' + str(pos_count) return text datas_labels['text'] = datas_labels.apply( lambda x: end_link(x['text'], x['labels'], x['count_pos_word']), axis=1) return datas_labels def engineer_datas(datas, fit=False): datas = feature_engineering(datas, fit) datas['text_size'] = datas['text'].apply( lambda x: np.nan if len(x) > 130 else len(x) ) datas.dropna(subset=['text_size'], inplace=True) return datas
identifier_name
data_feature_engineering.py
标签文本则保留下文中有对应关键词出现的文本 if len(datas.loc[index]['labels'].split(',')) >= 2: labels = labels.split(',') # 将下文四行文本连接 text = datas.loc[index:(index + 4), 'text'].values.sum() new_labels = [] for label in labels: if label == '蜘蛛智选': continue label_keywords = labels_keywords.loc[labels_keywords['tag_lv2'] == label, '关键词'].values for word in label_keywords: if word in text: new_labels.append(label) break new_labels = ','.join(i for i in new_labels) datas.loc[datas.index == index, 'labels'] = new_labels return # 如果标签下文中出现了标签对应关键词则保留该标签 label_keywords = labels_keywords.loc[labels_keywords['tag_lv2'] == labels, '关键词'].values for i in range(1, 6): text = datas.loc[index + i]['text'] lk_num = 0 mk_num = 0 if type(text) != str: continue if lk_num == 0: for word in label_keywords: if word in text: lk_num = 1 if mk_num == 0: for word in mark_words: if word in text: mk_num = 1 if lk_num + mk_num == 2: return datas.loc[datas.index == index, 'labels'] = np.NaN return for index in error_labels_index: bad_mark(index) def omit_mark(): # 针对遗漏标记做处理 # 去除文本中的标签文本(包含标签对应的下文五行内容) omit_label_index = list(datas.dropna(subset=['labels'])['labels'].index.values) drop_indexs = [i + j for i in omit_label_index for j in range(5)] drop_indexs = sorted(list(set(drop_indexs))) drop_datas = datas.drop(index=drop_indexs[:-5]) drop_ids = drop_datas['id'].unique() groups_datas = pd.DataFrame({}) def add_labels(texts): if len(texts) > 50 or len(texts) <= 4: return np.NaN for lk_indexs, word in enumerate(labels_keywords['关键词']): # index的作用是找到word对应的label也就是tag_lv2 if word in str(texts): labels = labels_keywords.loc[lk_indexs]['tag_lv2'] return labels return np.NaN def drop_labels(labels, drop_index): # labels为空直接返回无需操作 if type(labels) != str: return np.NaN # 确保不要出现连续的标签以第一个出现标签的文本为准 try: if drop_index > 1 and type(group_datas.loc[drop_index - 1, 'labels']) == str: return np.NaN except: return labels role = group_datas.loc[drop_index]['role'] # role为MEMBER且label不为空 # 则判断后续文本行的role是否还是MEMBER try: # 这里会报错的原因是drop_index是最后一个 # 加1的话则超出了group_datas的界限 if role != 'CUSTOMER': if group_datas.loc[drop_index + 1]['role'] != 'CUSTOMER': return np.NaN else: role_num = 1 for i in range(1, 3): if group_datas.loc[drop_index + i]['role'] == role: role_num += 1 # 如果标签文本对应的role连续三行都一致则返回NaN if role_num == 3: return np.NaN except: return np.NaN return labels # 将聊天框ID为一组数据进行处理 # 首先解决的是为包含关键词的 行文本 添加对应关键词标签 # 其次解决的是有标签的行文本是否值得保留标签 for drop_id in drop_ids: group_datas = drop_datas.loc[drop_datas['id'] == drop_id] group_datas['index'] = group_datas.index.values # 初选label group_datas['labels'] = group_datas.apply( lambda x: add_labels(x['text']), axis=1 ) group_datas['drop_labels'] = group_datas['labels'] # 现将group_datas.index按照长度设置之后需要将他的index变为原样 group_datas['labels'] = group_datas.apply( lambda x: drop_labels(x['drop_labels'], x['index']), axis=1 ) group_datas.drop(['drop_labels'], axis=1, inplace=True) # 其次下文三行内没有相关关键词和结束词的不要 drop_index_3 = list(group_datas.dropna(subset=['labels']).index.values) for d_index3 in drop_index_3: mk_num = 0 lk_num = 0 label = group_datas.loc[d_index3]['labels'] label_keywords = labels_keywords.loc[labels_keywords['tag_lv2'] == label]['关键词'].values try: for d_num in range(1, 4): text = str(group_datas.loc[d_index3 + d_num]['text']) if lk_num <= 1: for lk in label_keywords: if lk in text: lk_num += 1 if mk_num <= 1: for mw in mark_words: if mw in text: mk_num += 1 if (lk_num + mk_num) == 2: break if (lk_num + mk_num) != 2: group_datas.loc[group_datas.index == d_index3, 'labels'] = np.NaN except: group_datas.loc[group_datas.index == d_index3, 'labels'] = np.NaN # 将以聊天框ID为维度的数据新添加的标签合并到数据中 group_datas = group_datas[['chat_id', 'labels']] group_datas.columns = ['chat_id', 'new_labels'] group_datas.dropna(subset=['new_labels'], inplace=True) groups_datas = pd.concat([groups_datas, group_datas], sort=True) return groups_datas merge_datas = omit_mark() datas = pd.merge(datas, merge_datas, on=['chat_id'], how='left') datas.to_excel('C:\\Users\\tzl17\\Desktop\\show.xlsx') def choose_labels(labels, new_labels): if type(labels) == str: return labels if type(new_labels) == str: return new_labels return np.NaN datas['labels'] = datas.apply( lambda x: choose_labels(x['labels'], x['new_labels']), axis=1 ) datas.drop('new_labels', axis=1, inplace=True) datas_labels = datas.dropna(subset=['labels']) datas_labels['index'] = datas_labels.index.values datas_labels = datas_labels[['id', 'chat_id', 'labels', 'index', 'count_pos_word']] def link_text(label_index): mark_num = 1 text = str(datas.loc[label_index]['text']) chat_id = datas.loc[label_index]['id'] solve = str(datas.loc[label_index]['solve']) index = label_index while mark_num <= 5: # 限制长度, 最硬性的标准如果超过了则直接反回 if len(text) > 130: text = text[:126] return text + str(solve) index = index + 1 # 如果标签对应文本的下行文本不属于同一个id则直接返回None # 如果不是下一行则返回当前连接的text if datas.loc[index]['id'] != chat_id: if index == label_index + 1: return None else: return text + solve text_pro = datas.loc[index]['text'] try: text_pro2 = datas.loc[index + 1]['text'] # 判断文本中是否出现mark_words做出相应的处理 text = text + ' ' + text_pro for mark_word in mark_words: if mark_word in text_pro: # 如果文本中出现了mark_words则直接连接到后面 mark_word2_num = 0 for mark_word2 in mark_words: if mark_word2 in text_pro2: mark_word2_num = 1 break if mark_word2_num == 0: return text + solve else: break mark_num = mark_num + 1 except: return None return text + solve datas_labels['text'] = datas_labels['index'].apply( lambda x: link_text(x) ) # 由于部分文本的solve并不是对应的标签所处的列因此需要特殊处理 # solve所处的位置为text的最后三位 datas_labels.dropna(subset=['text'], inplace=True) datas_labels['solve'] = datas_labels['text'].apply( lambda x: x[-3:] ) datas_labels['text'] = datas_labels['text'].apply( lambda x: x[:-3] ) # 拆分, 分配, 连接(多标签)
# 找到并将多标签文本单独拎出来
conditional_block
data_feature_engineering.py
'count_pos_word']], on=['id'], how='left') # 连接 针对原始训练文本处理 # datas['labels'].fillna("remove", inplace=True) if fit: datas['solve'].fillna(-1, inplace=True) datas_labels = datas.dropna(subset=['labels']) datas_labels['index'] = datas_labels.index.values datas_labels = datas_labels[['id', 'chat_id', 'labels', 'index', 'count_pos_word']] def link_text(indexs): start_index = -1.0 end_index = -1.0 solve = -1.0 index = indexs while start_index == -1.0: if datas.iloc[index].solve != -1.0: solve = datas.iloc[index].solve start_index = in
else: error_labels_index = list(datas.dropna(subset=['labels']).index.values) labels_keywords = pd.read_excel('../test_data/label_keywords.xlsx') mark_words = ['没问题', '不客气', '仅限', '有的', '是的', '好的', '可以', '不了', '不到', '谢谢', '对的', '没空', '不错', '没车', '到店', '没呢', '清楚', '明白', '确认', '没法', '不到', '了解', '都是', '还没', '比较', '地址', '不多', '没有', '放心', '嗯', '恩', '行', '没', '有'] # 针对标签制定逻辑(错标) def bad_mark(index): # 针对错误标记 chart_id = datas.loc[index]['id'] labels = datas.loc[index]['labels'] if type(labels) != str: # 会出现NAN的原因是下面的逻辑设置 return # 如果下文聊天框ID不同则删除该标签 try: if str(chart_id) != str(datas.loc[index + 5]['id']): datas.loc[datas.index == index, 'labels'] = np.NaN return except: return # 如果标签对应文本长度过长则删除该标签 if len(datas.loc[index]['text']) > 26: datas.loc[datas.index == index, 'labels'] = np.NaN return # 若连续出现标签,则放弃连续出现的标签 if (index + 1) in error_labels_index: datas.loc[datas.index == index, 'labels'] = np.NaN datas.loc[datas.index == index + 1, 'labels'] = np.NaN return # 如果文本对应的标签数目大于三个以上则放弃该标签 if len(datas.loc[index]['labels'].split(',')) > 3: datas.loc[datas.index == index, 'labels'] = np.NaN return # 三个标签文本则保留下文中有对应关键词出现的文本 if len(datas.loc[index]['labels'].split(',')) >= 2: labels = labels.split(',') # 将下文四行文本连接 text = datas.loc[index:(index + 4), 'text'].values.sum() new_labels = [] for label in labels: if label == '蜘蛛智选': continue label_keywords = labels_keywords.loc[labels_keywords['tag_lv2'] == label, '关键词'].values for word in label_keywords: if word in text: new_labels.append(label) break new_labels = ','.join(i for i in new_labels) datas.loc[datas.index == index, 'labels'] = new_labels return # 如果标签下文中出现了标签对应关键词则保留该标签 label_keywords = labels_keywords.loc[labels_keywords['tag_lv2'] == labels, '关键词'].values for i in range(1, 6): text = datas.loc[index + i]['text'] lk_num = 0 mk_num = 0 if type(text) != str: continue if lk_num == 0: for word in label_keywords: if word in text: lk_num = 1 if mk_num == 0: for word in mark_words: if word in text: mk_num = 1 if lk_num + mk_num == 2: return datas.loc[datas.index == index, 'labels'] = np.NaN return for index in error_labels_index: bad_mark(index) def omit_mark(): # 针对遗漏标记做处理 # 去除文本中的标签文本(包含标签对应的下文五行内容) omit_label_index = list(datas.dropna(subset=['labels'])['labels'].index.values) drop_indexs = [i + j for i in omit_label_index for j in range(5)] drop_indexs = sorted(list(set(drop_indexs))) drop_datas = datas.drop(index=drop_indexs[:-5]) drop_ids = drop_datas['id'].unique() groups_datas = pd.DataFrame({}) def add_labels(texts): if len(texts) > 50 or len(texts) <= 4: return np.NaN for lk_indexs, word in enumerate(labels_keywords['关键词']): # index的作用是找到word对应的label也就是tag_lv2 if word in str(texts): labels = labels_keywords.loc[lk_indexs]['tag_lv2'] return labels return np.NaN def drop_labels(labels, drop_index): # labels为空直接返回无需操作 if type(labels) != str: return np.NaN # 确保不要出现连续的标签以第一个出现标签的文本为准 try: if drop_index > 1 and type(group_datas.loc[drop_index - 1, 'labels']) == str: return np.NaN except: return labels role = group_datas.loc[drop_index]['role'] # role为MEMBER且label不为空 # 则判断后续文本行的role是否还是MEMBER try: # 这里会报错的原因是drop_index是最后一个 # 加1的话则超出了group_datas的界限 if role != 'CUSTOMER': if group_datas.loc[drop_index + 1]['role'] != 'CUSTOMER': return np.NaN else: role_num = 1 for i in range(1, 3): if group_datas.loc[drop_index + i]['role'] == role: role_num += 1 # 如果标签文本对应的role连续三行都一致则返回NaN if role_num == 3: return np.NaN except: return np.NaN return labels # 将聊天框ID为一组数据进行处理 # 首先解决的是为包含关键词的 行文本 添加对应关键词标签 # 其次解决的是有标签的行文本是否值得保留标签 for drop_id in drop_ids: group_datas = drop_datas.loc[drop_datas['id'] == drop_id] group_datas['index'] = group_datas.index.values # 初选label group_datas['labels'] = group_datas.apply( lambda x: add_labels(x['text']), axis=1 ) group_datas['drop_labels'] = group_datas['labels'] # 现将group_datas.index按照长度设置之后需要将他的index变为原样 group_datas['labels'] = group_datas.apply( lambda x: drop_labels(x['drop_labels'], x['index']), axis=1 ) group_datas.drop(['drop_labels'], axis=1, inplace=True) # 其次下文三行内没有相关关键词和结束词的不要 drop_index_3 = list(group_datas.dropna(subset=['labels']).index.values) for d_index3 in drop_index_3: mk_num = 0 lk_num = 0 label = group_datas.loc[d_index3]['labels'] label_keywords = labels_keywords.loc[labels_keywords['tag_lv2'] == label]['关键词'].values try: for d_num in range(1, 4): text = str(group_datas.loc[d_index3 + d_num]['text']) if lk_num <= 1: for lk in label_keywords: if lk in text: lk_num += 1 if mk_num <= 1: for mw in mark_words: if mw in text: mk_num += 1 if (lk_num + mk_num) == 2: break if (lk_num + mk_num) != 2: group_datas.loc[group_datas.index == d_index3, 'labels'] = np.NaN except: group_datas.loc[group_datas.index == d_index3, 'labels'] = np.NaN
dex index = index - 1 index = indexs + 1 while end_index == -1.0: if datas.iloc[index].solve != -1.0: end_index = index index = index + 1 text = '' for i in range(start_index, end_index + 1): text = text + " " + datas.iloc[i].text return text + str(solve) datas_labels['text'] = datas_labels['index'].apply( lambda x: link_text(x) )
identifier_body
data_feature_engineering.py
}) id_count_words.columns = ['count_pos_word'] id_count_words.reset_index(inplace=True) id_count_words = pd.DataFrame(id_count_words) datas = pd.merge(datas, id_count_words[['id', 'count_pos_word']], on=['id'], how='left') # 连接 针对原始训练文本处理 # datas['labels'].fillna("remove", inplace=True) if fit: datas['solve'].fillna(-1, inplace=True) datas_labels = datas.dropna(subset=['labels']) datas_labels['index'] = datas_labels.index.values datas_labels = datas_labels[['id', 'chat_id', 'labels', 'index', 'count_pos_word']] def link_text(indexs): start_index = -1.0 end_index = -1.0 solve = -1.0 index = indexs while start_index == -1.0: if datas.iloc[index].solve != -1.0: solve = datas.iloc[index].solve start_index = index index = index - 1 index = indexs + 1 while end_index == -1.0: if datas.iloc[index].solve != -1.0: end_index = index index = index + 1 text = '' for i in range(start_index, end_index + 1): text = text + " " + datas.iloc[i].text return text + str(solve) datas_labels['text'] = datas_labels['index'].apply( lambda x: link_text(x) ) else: error_labels_index = list(datas.dropna(subset=['labels']).index.values) labels_keywords = pd.read_excel('../test_data/label_keywords.xlsx') mark_words = ['没问题', '不客气', '仅限', '有的', '是的', '好的', '可以', '不了', '不到', '谢谢', '对的', '没空', '不错', '没车', '到店', '没呢', '清楚', '明白', '确认', '没法', '不到', '了解', '都是', '还没', '比较', '地址', '不多', '没有', '放心', '嗯', '恩', '行', '没', '有'] # 针对标签制定逻辑(错标) def bad_mark(index): # 针对错误标记 chart_id = datas.loc[index]['id'] labels = datas.loc[index]['labels'] if type(labels) != str: # 会出现NAN的原因是下面的逻辑设置 return # 如果下文聊天框ID不同则删除该标签 try: if str(chart_id) != str(datas.loc[index + 5]['id']): datas.loc[datas.index == index, 'labels'] = np.NaN return except: return # 如果标签对应文本长度过长则删除该标签 if len(datas.loc[index]['text']) > 26: datas.loc[datas.index == index, 'labels'] = np.NaN return # 若连续出现标签,则放弃连续出现的标签 if (index + 1) in error_labels_index: datas.loc[datas.index == index, 'labels'] = np.NaN datas.loc[datas.index == index + 1, 'labels'] = np.NaN return # 如果文本对应的标签数目大于三个以上则放弃该标签 if len(datas.loc[index]['labels'].split(',')) > 3: datas.loc[datas.index == index, 'labels'] = np.NaN return # 三个标签文本则保留下文中有对应关键词出现的文本 if len(datas.loc[index]['labels'].split(',')) >= 2: labels = labels.split(',') # 将下文四行文本连接 text = datas.loc[index:(index + 4), 'text'].values.sum() new_labels = [] for label in labels: if label == '蜘蛛智选': continue label_keywords = labels_keywords.loc[labels_keywords['tag_lv2'] == label, '关键词'].values for word in label_keywords: if word in text: new_labels.append(label) break new_labels = ','.join(i for i in new_labels) datas.loc[datas.index == index, 'labels'] = new_labels return # 如果标签下文中出现了标签对应关键词则保留该标签 label_keywords = labels_keywords.loc[labels_keywords['tag_lv2'] == labels, '关键词'].values for i in range(1, 6): text = datas.loc[index + i]['text'] lk_num = 0 mk_num = 0 if type(text) != str: continue if lk_num == 0: for word in label_keywords: if word in text: lk_num = 1 if mk_num == 0: for word in mark_words: if word in text: mk_num = 1 if lk_num + mk_num == 2: return datas.loc[datas.index == index, 'labels'] = np.NaN return for index in error_labels_index: bad_mark(index) def omit_mark(): # 针对遗漏标记做处理 # 去除文本中的标签文本(包含标签对应的下文五行内容) omit_label_index = list(datas.dropna(subset=['labels'])['labels'].index.values) drop_indexs = [i + j for i in omit_label_index for j in range(5)] drop_indexs = sorted(list(set(drop_indexs))) drop_datas = datas.drop(index=drop_indexs[:-5]) drop_ids = drop_datas['id'].unique() groups_datas = pd.DataFrame({}) def add_labels(texts): if len(texts) > 50 or len(texts) <= 4: return np.NaN for lk_indexs, word in enumerate(labels_keywords['关键词']): # index的作用是找到word对应的label也就是tag_lv2 if word in str(texts): labels = labels_keywords.loc[lk_indexs]['tag_lv2'] return labels return np.NaN def drop_labels(labels, drop_index): # labels为空直接返回无需操作 if type(labels) != str: return np.NaN # 确保不要出现连续的标签以第一个出现标签的文本为准 try: if drop_index > 1 and type(group_datas.loc[drop_index - 1, 'labels']) == str: return np.NaN except: return labels role = group_datas.loc[drop_index]['role'] # role为MEMBER且label不为空 # 则判断后续文本行的role是否还是MEMBER try: # 这里会报错的原因是drop_index是最后一个 # 加1的话则超出了group_datas的界限 if role != 'CUSTOMER': if group_datas.loc[drop_index + 1]['role'] != 'CUSTOMER': return np.NaN else: role_num = 1 for i in range(1, 3): if group_datas.loc[drop_index + i]['role'] == role: role_num += 1 # 如果标签文本对应的role连续三行都一致则返回NaN if role_num == 3: return np.NaN except: return np.NaN return labels # 将聊天框ID为一组数据进行处理 # 首先解决的是为包含关键词的 行文本 添加对应关键词标签 # 其次解决的是有标签的行文本是否值得保留标签 for drop_id in drop_ids: group_datas = drop_datas.loc[drop_datas['id'] == drop_id] group_datas['index'] = group_datas.index.values # 初选label group_datas['labels'] = group_datas.apply( lambda x: add_labels(x['text']), axis=1 ) group_datas['drop_labels'] = group_datas['labels'] # 现将group_datas.index按照长度设置之后需要将他的index变为原样 group_datas['labels'] = group_datas.apply( lambda x: drop_labels(x['drop_labels'], x['index']), axis=1 ) group_datas.drop(['drop_labels'], axis=1, inplace=True) # 其次下文三行内没有相关关键词和结束词的不要 drop_index_3 = list(group_datas.dropna(subset=['labels']).index.values) for d_index3 in drop_index_3: mk_num = 0 lk_num = 0 label = group_datas.loc[d_index3]['labels'] label_keywords = labels_keywords.loc[labels_keywords['tag_lv2'] == label]['关键词'].values try: for d_num in range(1, 4): text = str(group_datas.loc[d_index3 + d_num]['text']) if lk_num <= 1: for lk in label_keywords: if lk in text: lk_num += 1 if mk_num <= 1: for mw in mark_words: if mw in text: mk_num += 1 if (lk_num + mk_num)
# 以文本id为维度, 获取文本中出了几次 正向词
random_line_split
Menu.ts
OMBlock"; import { Renderer as LabelRenderer } from "./Renderers/Controls/Label"; /** Mouse hover timeout after which to show/hide sub menu */ const HOVER_TIMEOUT = 200; /** Current hover timeout ID */ var _hoverTimer: number | undefined; /** Item height, in pixels, of one menu item; used for calculating whether the menu fits on screen vertically, before displaying the menu below or above the mouse cursor or control element */ var _itemHeight = 28; /** Display a modal context menu */ Menu.displayContextMenu = function (options: Menu.Option[], event: PointerEvent): PromiseLike<string | number> { return new DOMMenuComponent(options).display(event.clientX!, event.clientY!); }; /** Display a modal dropdown menu */ Menu.displayDropdown = function (options: Menu.Option[], ref: Component): PromiseLike<string | number> { var out = ref.getLastRenderedOutput(); if (out && out.element) { var r = (<HTMLElement>out.element).getBoundingClientRect(); return new DOMMenuComponent(options) .display(r.right, r.bottom, true, r.bottom - r.top); } return Async.Promise.reject(new Error()); }; /** Remove current menu */ Menu.dismiss = function () { var page = Page.getCurrentPage(); page && page.getComponentsByType(DOMMenuComponent) .forEach(c => Screen.remove(c)); }; /** Component that contains a menu DOM element */ class DOMMenuComponent extends DOMBlock { /** Create DOM dropdown menu */ constructor(options: Menu.Option[] = []) { super(); this.options = options; // create UL node var menu = document.createElement("ul"); menu.style.cssFloat = "none"; menu.style.position = "static"; menu.style.boxShadow = "none"; menu.style.margin = "0"; menu.className = "dropdown-menu show"; // v3 this._menu = menu; this.nodes.push(menu); // set display options this.displayOptions = { modal: true, // trigger table wrapper onEsc: () => { // onEsc: reject promise and remove menu Screen.remove(this); this._rejector(new Error()); } }; // add mouseout handler to stop sub menus from showing menu.onmouseout = event => { if (_hoverTimer) window.clearTimeout(_hoverTimer); _hoverTimer = undefined; if (event.target === menu && !this._subMenuShown) { _hoverTimer = window.setTimeout(() => { this._clearSubMenus(); _hoverTimer = undefined; }, HOVER_TIMEOUT) } }; } /** Menu options for this (sub) menu */ public options: Menu.Option[]; /** Set to true to force this menu to be displayed on the left side */ public forceLeft?: boolean; /** Display this menu at the given location on screen, either below or above; vert/horz push define the amount to displace the menu if showing on top/left; appended as child of given DOM node, or displayed on screen in its own layer */ public display(x: number, y: number, force?: boolean, vertPush?: number, horizPush?: number, parent?: Node) { this._isBase = !parent; while (this._menu.firstChild) this._menu.removeChild(this._menu.firstChild); var result = new Async.Promise<string | number>((resolve, reject) => { this._resolver = resolve; this._rejector = reject; }); // set position around given x, y + displacement coordinates this._setPosition(x, y, force, vertPush, horizPush); // add options from array var hasIcon = this.options.some(option => option && !!option.icon); this.options.forEach((option, i) => { if (!option) return; // create list item element with divider or option link var li = document.createElement("li"); this._menu.appendChild(li); if (option.disabled) li.className = "disabled"; if (option.divider) { // create a divider li.className = "divider dropdown-divider"; } else
LabelRenderer.renderInto(a, option.icon, iw, option.label); if (hasIcon) a.style.paddingLeft = ".5rem"; // render far side icon if (option.sideIcon) { var r = document.createElement("span"); r.className = "bidi_floatEnd"; LabelRenderer.renderInto(r, option.sideIcon); a.appendChild(r); } } }); // update element height estimate after display this.Rendered.connectOnce(out => { Async.sleep(30).then(() => { var elt: HTMLElement = out.element; if (elt) { var max = 0; for (var li of <any>elt.querySelectorAll("li")) max = Math.max(max, li.offsetHeight); if (max && _itemHeight !== max) { _itemHeight = max; this._setPosition(x, y, force, vertPush, horizPush); } } }); }); // add menu as a child or display as new modal layer var out = this.out; if (parent && out) parent.appendChild(out.element); else Screen.displayAsync(this); return result; } private _setPosition(x: number, y: number, forceLeft?: boolean, vertPush?: number, horizPush?: number) { // position menu on left or right of given coordinates if (x + 200 < window.innerWidth && (!forceLeft || x < 200)) { this.style.set({ left: x + "px", right: "auto" }); } else { var right = (window.innerWidth - x + (horizPush || 0)) + "px"; this.style.set({ left: "auto", right }); } // position menu on top or bottom of given coordinates if (y + this.options.length * _itemHeight + 10 < window.innerHeight) { this.style.set({ top: y + "px", bottom: "auto" }); } else { var bottom = (window.innerHeight - y + (vertPush || 0)) + "px"; this.style.set({ top: "auto", bottom }); var checkTop = () => { // if pushed above window top, move to very top if (this._menu.offsetTop < 0) this.style.set({ top: "0", bottom: "auto" }); }; window.setTimeout(checkTop, 10); window.setTimeout(checkTop, 50); window.setTimeout(checkTop, 100); } } private _clearSubMenus() { this._subMenuShown = undefined; var out = this.getLastRenderedOutput(); var elt: HTMLElement = out && out.element; while (elt && elt.nextSibling) elt.parentNode!.removeChild(elt.nextSibling); } private _addLinkClickHandler(elt: HTMLAnchorElement, i: number) { var option = this.options[i]; elt.onclick = event => { event.preventDefault(); if (this._isBase) Screen.remove(this); this._resolver(option.key || (i + 1)); } } private _addLinkHoverHandler(elt: HTMLAnchorElement, i: number) { var option = this.options[i]; elt.onmouseover = () => { // clear timer to show/hide (other) sub menu if (_hoverTimer) { window.clearTimeout(_hoverTimer); _hoverTimer = undefined; } // set timer to show current sub menu, if not already shown if (this._subMenuShown !== option) { _hoverTimer = window.setTimeout(() => { this._clearSubMenus(); _hoverTimer = undefined; if (!option.subMenu) return; this._subMenuShown = option; // create and display sub menu var r = (<Element>elt.parentNode).getBoundingClientRect(); var out = this.getLastRenderedOutput(); var parentNode = out && out.element.parentNode; var p = new DOMMenuComponent(option.subMenu) .display(r.right - 5, r.top, false, -_itemHeight, r.right - r.left - 10, parentNode); p.then(choice => { if (this._isBase) Screen.remove(this); if (typeof choice == "number" && choice > 0) { var base = 100; while (base <= choice) base *= 100; choice = (i + 1) * base + <number>choice; } this._resolver(choice); }); }, HOVER_TIMEOUT); } }; } private _subMenuShown?: Menu.Option; private _resolver:
{ // create a text option, add click handler var a = document.createElement("a"); a.className = "dropdown-item"; a.href = "#"; if (option.disabled) { a.className += " disabled"; a.style.cursor = "default"; a.onclick = event => { event.preventDefault() }; } else { a.tabIndex = 0; a.style.cursor = "pointer"; if (!option.subMenu) this._addLinkClickHandler(a, i); } this._addLinkHoverHandler(a, i); li.appendChild(a); // render text into link var iw = hasIcon ? 1.5 : 0;
conditional_block
Menu.ts
OMBlock"; import { Renderer as LabelRenderer } from "./Renderers/Controls/Label"; /** Mouse hover timeout after which to show/hide sub menu */ const HOVER_TIMEOUT = 200; /** Current hover timeout ID */ var _hoverTimer: number | undefined; /** Item height, in pixels, of one menu item; used for calculating whether the menu fits on screen vertically, before displaying the menu below or above the mouse cursor or control element */ var _itemHeight = 28; /** Display a modal context menu */ Menu.displayContextMenu = function (options: Menu.Option[], event: PointerEvent): PromiseLike<string | number> { return new DOMMenuComponent(options).display(event.clientX!, event.clientY!); }; /** Display a modal dropdown menu */ Menu.displayDropdown = function (options: Menu.Option[], ref: Component): PromiseLike<string | number> { var out = ref.getLastRenderedOutput(); if (out && out.element) { var r = (<HTMLElement>out.element).getBoundingClientRect(); return new DOMMenuComponent(options) .display(r.right, r.bottom, true, r.bottom - r.top); } return Async.Promise.reject(new Error()); }; /** Remove current menu */ Menu.dismiss = function () { var page = Page.getCurrentPage(); page && page.getComponentsByType(DOMMenuComponent) .forEach(c => Screen.remove(c)); }; /** Component that contains a menu DOM element */ class DOMMenuComponent extends DOMBlock { /** Create DOM dropdown menu */
(options: Menu.Option[] = []) { super(); this.options = options; // create UL node var menu = document.createElement("ul"); menu.style.cssFloat = "none"; menu.style.position = "static"; menu.style.boxShadow = "none"; menu.style.margin = "0"; menu.className = "dropdown-menu show"; // v3 this._menu = menu; this.nodes.push(menu); // set display options this.displayOptions = { modal: true, // trigger table wrapper onEsc: () => { // onEsc: reject promise and remove menu Screen.remove(this); this._rejector(new Error()); } }; // add mouseout handler to stop sub menus from showing menu.onmouseout = event => { if (_hoverTimer) window.clearTimeout(_hoverTimer); _hoverTimer = undefined; if (event.target === menu && !this._subMenuShown) { _hoverTimer = window.setTimeout(() => { this._clearSubMenus(); _hoverTimer = undefined; }, HOVER_TIMEOUT) } }; } /** Menu options for this (sub) menu */ public options: Menu.Option[]; /** Set to true to force this menu to be displayed on the left side */ public forceLeft?: boolean; /** Display this menu at the given location on screen, either below or above; vert/horz push define the amount to displace the menu if showing on top/left; appended as child of given DOM node, or displayed on screen in its own layer */ public display(x: number, y: number, force?: boolean, vertPush?: number, horizPush?: number, parent?: Node) { this._isBase = !parent; while (this._menu.firstChild) this._menu.removeChild(this._menu.firstChild); var result = new Async.Promise<string | number>((resolve, reject) => { this._resolver = resolve; this._rejector = reject; }); // set position around given x, y + displacement coordinates this._setPosition(x, y, force, vertPush, horizPush); // add options from array var hasIcon = this.options.some(option => option && !!option.icon); this.options.forEach((option, i) => { if (!option) return; // create list item element with divider or option link var li = document.createElement("li"); this._menu.appendChild(li); if (option.disabled) li.className = "disabled"; if (option.divider) { // create a divider li.className = "divider dropdown-divider"; } else { // create a text option, add click handler var a = document.createElement("a"); a.className = "dropdown-item"; a.href = "#"; if (option.disabled) { a.className += " disabled"; a.style.cursor = "default"; a.onclick = event => { event.preventDefault() }; } else { a.tabIndex = 0; a.style.cursor = "pointer"; if (!option.subMenu) this._addLinkClickHandler(a, i); } this._addLinkHoverHandler(a, i); li.appendChild(a); // render text into link var iw = hasIcon ? 1.5 : 0; LabelRenderer.renderInto(a, option.icon, iw, option.label); if (hasIcon) a.style.paddingLeft = ".5rem"; // render far side icon if (option.sideIcon) { var r = document.createElement("span"); r.className = "bidi_floatEnd"; LabelRenderer.renderInto(r, option.sideIcon); a.appendChild(r); } } }); // update element height estimate after display this.Rendered.connectOnce(out => { Async.sleep(30).then(() => { var elt: HTMLElement = out.element; if (elt) { var max = 0; for (var li of <any>elt.querySelectorAll("li")) max = Math.max(max, li.offsetHeight); if (max && _itemHeight !== max) { _itemHeight = max; this._setPosition(x, y, force, vertPush, horizPush); } } }); }); // add menu as a child or display as new modal layer var out = this.out; if (parent && out) parent.appendChild(out.element); else Screen.displayAsync(this); return result; } private _setPosition(x: number, y: number, forceLeft?: boolean, vertPush?: number, horizPush?: number) { // position menu on left or right of given coordinates if (x + 200 < window.innerWidth && (!forceLeft || x < 200)) { this.style.set({ left: x + "px", right: "auto" }); } else { var right = (window.innerWidth - x + (horizPush || 0)) + "px"; this.style.set({ left: "auto", right }); } // position menu on top or bottom of given coordinates if (y + this.options.length * _itemHeight + 10 < window.innerHeight) { this.style.set({ top: y + "px", bottom: "auto" }); } else { var bottom = (window.innerHeight - y + (vertPush || 0)) + "px"; this.style.set({ top: "auto", bottom }); var checkTop = () => { // if pushed above window top, move to very top if (this._menu.offsetTop < 0) this.style.set({ top: "0", bottom: "auto" }); }; window.setTimeout(checkTop, 10); window.setTimeout(checkTop, 50); window.setTimeout(checkTop, 100); } } private _clearSubMenus() { this._subMenuShown = undefined; var out = this.getLastRenderedOutput(); var elt: HTMLElement = out && out.element; while (elt && elt.nextSibling) elt.parentNode!.removeChild(elt.nextSibling); } private _addLinkClickHandler(elt: HTMLAnchorElement, i: number) { var option = this.options[i]; elt.onclick = event => { event.preventDefault(); if (this._isBase) Screen.remove(this); this._resolver(option.key || (i + 1)); } } private _addLinkHoverHandler(elt: HTMLAnchorElement, i: number) { var option = this.options[i]; elt.onmouseover = () => { // clear timer to show/hide (other) sub menu if (_hoverTimer) { window.clearTimeout(_hoverTimer); _hoverTimer = undefined; } // set timer to show current sub menu, if not already shown if (this._subMenuShown !== option) { _hoverTimer = window.setTimeout(() => { this._clearSubMenus(); _hoverTimer = undefined; if (!option.subMenu) return; this._subMenuShown = option; // create and display sub menu var r = (<Element>elt.parentNode).getBoundingClientRect(); var out = this.getLastRenderedOutput(); var parentNode = out && out.element.parentNode; var p = new DOMMenuComponent(option.subMenu) .display(r.right - 5, r.top, false, -_itemHeight, r.right - r.left - 10, parentNode); p.then(choice => { if (this._isBase) Screen.remove(this); if (typeof choice == "number" && choice > 0) { var base = 100; while (base <= choice) base *= 100; choice = (i + 1) * base + <number>choice; } this._resolver(choice); }); }, HOVER_TIMEOUT); } }; } private _subMenuShown?: Menu.Option; private _resolver: (
constructor
identifier_name
Menu.ts
OMBlock"; import { Renderer as LabelRenderer } from "./Renderers/Controls/Label"; /** Mouse hover timeout after which to show/hide sub menu */ const HOVER_TIMEOUT = 200; /** Current hover timeout ID */ var _hoverTimer: number | undefined; /** Item height, in pixels, of one menu item; used for calculating whether the menu fits on screen vertically, before displaying the menu below or above the mouse cursor or control element */ var _itemHeight = 28; /** Display a modal context menu */ Menu.displayContextMenu = function (options: Menu.Option[], event: PointerEvent): PromiseLike<string | number> { return new DOMMenuComponent(options).display(event.clientX!, event.clientY!); }; /** Display a modal dropdown menu */ Menu.displayDropdown = function (options: Menu.Option[], ref: Component): PromiseLike<string | number> { var out = ref.getLastRenderedOutput(); if (out && out.element) { var r = (<HTMLElement>out.element).getBoundingClientRect(); return new DOMMenuComponent(options) .display(r.right, r.bottom, true, r.bottom - r.top); } return Async.Promise.reject(new Error()); }; /** Remove current menu */ Menu.dismiss = function () { var page = Page.getCurrentPage(); page && page.getComponentsByType(DOMMenuComponent) .forEach(c => Screen.remove(c)); }; /** Component that contains a menu DOM element */ class DOMMenuComponent extends DOMBlock { /** Create DOM dropdown menu */
super(); this.options = options; // create UL node var menu = document.createElement("ul"); menu.style.cssFloat = "none"; menu.style.position = "static"; menu.style.boxShadow = "none"; menu.style.margin = "0"; menu.className = "dropdown-menu show"; // v3 this._menu = menu; this.nodes.push(menu); // set display options this.displayOptions = { modal: true, // trigger table wrapper onEsc: () => { // onEsc: reject promise and remove menu Screen.remove(this); this._rejector(new Error()); } }; // add mouseout handler to stop sub menus from showing menu.onmouseout = event => { if (_hoverTimer) window.clearTimeout(_hoverTimer); _hoverTimer = undefined; if (event.target === menu && !this._subMenuShown) { _hoverTimer = window.setTimeout(() => { this._clearSubMenus(); _hoverTimer = undefined; }, HOVER_TIMEOUT) } }; } /** Menu options for this (sub) menu */ public options: Menu.Option[]; /** Set to true to force this menu to be displayed on the left side */ public forceLeft?: boolean; /** Display this menu at the given location on screen, either below or above; vert/horz push define the amount to displace the menu if showing on top/left; appended as child of given DOM node, or displayed on screen in its own layer */ public display(x: number, y: number, force?: boolean, vertPush?: number, horizPush?: number, parent?: Node) { this._isBase = !parent; while (this._menu.firstChild) this._menu.removeChild(this._menu.firstChild); var result = new Async.Promise<string | number>((resolve, reject) => { this._resolver = resolve; this._rejector = reject; }); // set position around given x, y + displacement coordinates this._setPosition(x, y, force, vertPush, horizPush); // add options from array var hasIcon = this.options.some(option => option && !!option.icon); this.options.forEach((option, i) => { if (!option) return; // create list item element with divider or option link var li = document.createElement("li"); this._menu.appendChild(li); if (option.disabled) li.className = "disabled"; if (option.divider) { // create a divider li.className = "divider dropdown-divider"; } else { // create a text option, add click handler var a = document.createElement("a"); a.className = "dropdown-item"; a.href = "#"; if (option.disabled) { a.className += " disabled"; a.style.cursor = "default"; a.onclick = event => { event.preventDefault() }; } else { a.tabIndex = 0; a.style.cursor = "pointer"; if (!option.subMenu) this._addLinkClickHandler(a, i); } this._addLinkHoverHandler(a, i); li.appendChild(a); // render text into link var iw = hasIcon ? 1.5 : 0; LabelRenderer.renderInto(a, option.icon, iw, option.label); if (hasIcon) a.style.paddingLeft = ".5rem"; // render far side icon if (option.sideIcon) { var r = document.createElement("span"); r.className = "bidi_floatEnd"; LabelRenderer.renderInto(r, option.sideIcon); a.appendChild(r); } } }); // update element height estimate after display this.Rendered.connectOnce(out => { Async.sleep(30).then(() => { var elt: HTMLElement = out.element; if (elt) { var max = 0; for (var li of <any>elt.querySelectorAll("li")) max = Math.max(max, li.offsetHeight); if (max && _itemHeight !== max) { _itemHeight = max; this._setPosition(x, y, force, vertPush, horizPush); } } }); }); // add menu as a child or display as new modal layer var out = this.out; if (parent && out) parent.appendChild(out.element); else Screen.displayAsync(this); return result; } private _setPosition(x: number, y: number, forceLeft?: boolean, vertPush?: number, horizPush?: number) { // position menu on left or right of given coordinates if (x + 200 < window.innerWidth && (!forceLeft || x < 200)) { this.style.set({ left: x + "px", right: "auto" }); } else { var right = (window.innerWidth - x + (horizPush || 0)) + "px"; this.style.set({ left: "auto", right }); } // position menu on top or bottom of given coordinates if (y + this.options.length * _itemHeight + 10 < window.innerHeight) { this.style.set({ top: y + "px", bottom: "auto" }); } else { var bottom = (window.innerHeight - y + (vertPush || 0)) + "px"; this.style.set({ top: "auto", bottom }); var checkTop = () => { // if pushed above window top, move to very top if (this._menu.offsetTop < 0) this.style.set({ top: "0", bottom: "auto" }); }; window.setTimeout(checkTop, 10); window.setTimeout(checkTop, 50); window.setTimeout(checkTop, 100); } } private _clearSubMenus() { this._subMenuShown = undefined; var out = this.getLastRenderedOutput(); var elt: HTMLElement = out && out.element; while (elt && elt.nextSibling) elt.parentNode!.removeChild(elt.nextSibling); } private _addLinkClickHandler(elt: HTMLAnchorElement, i: number) { var option = this.options[i]; elt.onclick = event => { event.preventDefault(); if (this._isBase) Screen.remove(this); this._resolver(option.key || (i + 1)); } } private _addLinkHoverHandler(elt: HTMLAnchorElement, i: number) { var option = this.options[i]; elt.onmouseover = () => { // clear timer to show/hide (other) sub menu if (_hoverTimer) { window.clearTimeout(_hoverTimer); _hoverTimer = undefined; } // set timer to show current sub menu, if not already shown if (this._subMenuShown !== option) { _hoverTimer = window.setTimeout(() => { this._clearSubMenus(); _hoverTimer = undefined; if (!option.subMenu) return; this._subMenuShown = option; // create and display sub menu var r = (<Element>elt.parentNode).getBoundingClientRect(); var out = this.getLastRenderedOutput(); var parentNode = out && out.element.parentNode; var p = new DOMMenuComponent(option.subMenu) .display(r.right - 5, r.top, false, -_itemHeight, r.right - r.left - 10, parentNode); p.then(choice => { if (this._isBase) Screen.remove(this); if (typeof choice == "number" && choice > 0) { var base = 100; while (base <= choice) base *= 100; choice = (i + 1) * base + <number>choice; } this._resolver(choice); }); }, HOVER_TIMEOUT); } }; } private _subMenuShown?: Menu.Option; private _resolver: (
constructor(options: Menu.Option[] = []) {
random_line_split
Menu.ts
OMBlock"; import { Renderer as LabelRenderer } from "./Renderers/Controls/Label"; /** Mouse hover timeout after which to show/hide sub menu */ const HOVER_TIMEOUT = 200; /** Current hover timeout ID */ var _hoverTimer: number | undefined; /** Item height, in pixels, of one menu item; used for calculating whether the menu fits on screen vertically, before displaying the menu below or above the mouse cursor or control element */ var _itemHeight = 28; /** Display a modal context menu */ Menu.displayContextMenu = function (options: Menu.Option[], event: PointerEvent): PromiseLike<string | number> { return new DOMMenuComponent(options).display(event.clientX!, event.clientY!); }; /** Display a modal dropdown menu */ Menu.displayDropdown = function (options: Menu.Option[], ref: Component): PromiseLike<string | number> { var out = ref.getLastRenderedOutput(); if (out && out.element) { var r = (<HTMLElement>out.element).getBoundingClientRect(); return new DOMMenuComponent(options) .display(r.right, r.bottom, true, r.bottom - r.top); } return Async.Promise.reject(new Error()); }; /** Remove current menu */ Menu.dismiss = function () { var page = Page.getCurrentPage(); page && page.getComponentsByType(DOMMenuComponent) .forEach(c => Screen.remove(c)); }; /** Component that contains a menu DOM element */ class DOMMenuComponent extends DOMBlock { /** Create DOM dropdown menu */ constructor(options: Menu.Option[] = []) { super(); this.options = options; // create UL node var menu = document.createElement("ul"); menu.style.cssFloat = "none"; menu.style.position = "static"; menu.style.boxShadow = "none"; menu.style.margin = "0"; menu.className = "dropdown-menu show"; // v3 this._menu = menu; this.nodes.push(menu); // set display options this.displayOptions = { modal: true, // trigger table wrapper onEsc: () => { // onEsc: reject promise and remove menu Screen.remove(this); this._rejector(new Error()); } }; // add mouseout handler to stop sub menus from showing menu.onmouseout = event => { if (_hoverTimer) window.clearTimeout(_hoverTimer); _hoverTimer = undefined; if (event.target === menu && !this._subMenuShown) { _hoverTimer = window.setTimeout(() => { this._clearSubMenus(); _hoverTimer = undefined; }, HOVER_TIMEOUT) } }; } /** Menu options for this (sub) menu */ public options: Menu.Option[]; /** Set to true to force this menu to be displayed on the left side */ public forceLeft?: boolean; /** Display this menu at the given location on screen, either below or above; vert/horz push define the amount to displace the menu if showing on top/left; appended as child of given DOM node, or displayed on screen in its own layer */ public display(x: number, y: number, force?: boolean, vertPush?: number, horizPush?: number, parent?: Node) { this._isBase = !parent; while (this._menu.firstChild) this._menu.removeChild(this._menu.firstChild); var result = new Async.Promise<string | number>((resolve, reject) => { this._resolver = resolve; this._rejector = reject; }); // set position around given x, y + displacement coordinates this._setPosition(x, y, force, vertPush, horizPush); // add options from array var hasIcon = this.options.some(option => option && !!option.icon); this.options.forEach((option, i) => { if (!option) return; // create list item element with divider or option link var li = document.createElement("li"); this._menu.appendChild(li); if (option.disabled) li.className = "disabled"; if (option.divider) { // create a divider li.className = "divider dropdown-divider"; } else { // create a text option, add click handler var a = document.createElement("a"); a.className = "dropdown-item"; a.href = "#"; if (option.disabled) { a.className += " disabled"; a.style.cursor = "default"; a.onclick = event => { event.preventDefault() }; } else { a.tabIndex = 0; a.style.cursor = "pointer"; if (!option.subMenu) this._addLinkClickHandler(a, i); } this._addLinkHoverHandler(a, i); li.appendChild(a); // render text into link var iw = hasIcon ? 1.5 : 0; LabelRenderer.renderInto(a, option.icon, iw, option.label); if (hasIcon) a.style.paddingLeft = ".5rem"; // render far side icon if (option.sideIcon) { var r = document.createElement("span"); r.className = "bidi_floatEnd"; LabelRenderer.renderInto(r, option.sideIcon); a.appendChild(r); } } }); // update element height estimate after display this.Rendered.connectOnce(out => { Async.sleep(30).then(() => { var elt: HTMLElement = out.element; if (elt) { var max = 0; for (var li of <any>elt.querySelectorAll("li")) max = Math.max(max, li.offsetHeight); if (max && _itemHeight !== max) { _itemHeight = max; this._setPosition(x, y, force, vertPush, horizPush); } } }); }); // add menu as a child or display as new modal layer var out = this.out; if (parent && out) parent.appendChild(out.element); else Screen.displayAsync(this); return result; } private _setPosition(x: number, y: number, forceLeft?: boolean, vertPush?: number, horizPush?: number) { // position menu on left or right of given coordinates if (x + 200 < window.innerWidth && (!forceLeft || x < 200)) { this.style.set({ left: x + "px", right: "auto" }); } else { var right = (window.innerWidth - x + (horizPush || 0)) + "px"; this.style.set({ left: "auto", right }); } // position menu on top or bottom of given coordinates if (y + this.options.length * _itemHeight + 10 < window.innerHeight) { this.style.set({ top: y + "px", bottom: "auto" }); } else { var bottom = (window.innerHeight - y + (vertPush || 0)) + "px"; this.style.set({ top: "auto", bottom }); var checkTop = () => { // if pushed above window top, move to very top if (this._menu.offsetTop < 0) this.style.set({ top: "0", bottom: "auto" }); }; window.setTimeout(checkTop, 10); window.setTimeout(checkTop, 50); window.setTimeout(checkTop, 100); } } private _clearSubMenus() { this._subMenuShown = undefined; var out = this.getLastRenderedOutput(); var elt: HTMLElement = out && out.element; while (elt && elt.nextSibling) elt.parentNode!.removeChild(elt.nextSibling); } private _addLinkClickHandler(elt: HTMLAnchorElement, i: number)
private _addLinkHoverHandler(elt: HTMLAnchorElement, i: number) { var option = this.options[i]; elt.onmouseover = () => { // clear timer to show/hide (other) sub menu if (_hoverTimer) { window.clearTimeout(_hoverTimer); _hoverTimer = undefined; } // set timer to show current sub menu, if not already shown if (this._subMenuShown !== option) { _hoverTimer = window.setTimeout(() => { this._clearSubMenus(); _hoverTimer = undefined; if (!option.subMenu) return; this._subMenuShown = option; // create and display sub menu var r = (<Element>elt.parentNode).getBoundingClientRect(); var out = this.getLastRenderedOutput(); var parentNode = out && out.element.parentNode; var p = new DOMMenuComponent(option.subMenu) .display(r.right - 5, r.top, false, -_itemHeight, r.right - r.left - 10, parentNode); p.then(choice => { if (this._isBase) Screen.remove(this); if (typeof choice == "number" && choice > 0) { var base = 100; while (base <= choice) base *= 100; choice = (i + 1) * base + <number>choice; } this._resolver(choice); }); }, HOVER_TIMEOUT); } }; } private _subMenuShown?: Menu.Option; private _resolver
{ var option = this.options[i]; elt.onclick = event => { event.preventDefault(); if (this._isBase) Screen.remove(this); this._resolver(option.key || (i + 1)); } }
identifier_body
wallpaper.rs
we need to adjust them */ map_window_rect(wallpaper, wnd).unwrap(); let prev_parent = SetParent(wnd, wallpaper); if prev_parent.is_null() { panic!("SetParent failed, GetLastError says: '{}'", GetLastError()); } ShowWindow(wnd, SW_SHOW); return true; } unsafe fn remove_window_from_wallpaper(wallpaper: HWND, wnd: HWND) -> bool { use winapi::um::winuser::{ SetParent, GetDesktopWindow, InvalidateRect, WS_EX_APPWINDOW, WS_OVERLAPPEDWINDOW, SWP_FRAMECHANGED, SWP_NOMOVE, SWP_NOSIZE, SWP_NOZORDER, SWP_NOOWNERZORDER }; if SetParent(wnd, GetDesktopWindow()).is_null() { eprintln!("SetParent failed, GetLastError says: '{}'", GetLastError()); return false; } let or = WS_OVERLAPPEDWINDOW as i32; let ex_or = WS_EX_APPWINDOW as i32; if !update_window_styles(wnd, -1, -1, or, ex_or) { return false; } SetWindowPos( wnd, null_mut(), 0, 0, 0, 0, SWP_FRAMECHANGED | SWP_NOMOVE | SWP_NOSIZE | SWP_NOZORDER | SWP_NOOWNERZORDER ); InvalidateRect(wallpaper, null_mut(), 1); // wp_id(); /* can sometimes fix leftover unrefreshed portions */ true } unsafe fn set_fullscreen(wallpaper: HWND, wnd: HWND) -> bool { if let Some(current_rect) = get_window_rect(wnd) { let monitor = MonitorFromPoint(POINT {x: current_rect.left, y: current_rect.top}, MONITOR_DEFAULTTONEAREST); if monitor.is_null() { eprintln!("MonitorFromWindow failed, GetLastError says: '{}'", GetLastError()); return false; } let mut mi: MONITORINFO = Default::default(); mi.cbSize = std::mem::size_of::<MONITORINFO>() as u32; let success = GetMonitorInfoW(monitor, &mi as *const MONITORINFO as *mut MONITORINFO); if success == 0 { eprintln!("GetMonitorInfoW failed, GetLastError says: '{}'", GetLastError()); return false; } MapWindowPoints(null_mut(), wallpaper, &mi.rcMonitor as *const RECT as PPOINT, 2); move_window(wnd, mi.rcMonitor); return true; } return false; } unsafe fn list_immediate_children(parent: HWND) -> Vec<HWND> { use winapi::um::winuser::EnumChildWindows; #[repr(C)] struct WindowState { parent: HWND, handles: Vec<HWND>, } let mut s = WindowState { parent, handles: Vec::new() }; extern "system" fn enum_windows(wnd: HWND, lp: LPARAM) -> i32 { use winapi::um::winuser::{GetAncestor, GA_PARENT}; let s: *mut WindowState = lp as *mut WindowState; unsafe { if GetAncestor(wnd, GA_PARENT) == (*s).parent { (*s).handles.push(wnd); } } return 1; } SetLastError(0); EnumChildWindows(parent, Some(enum_windows), &mut s as *mut WindowState as LPARAM); if GetLastError() != 0 { panic!("EnumChildWindows failed, GetLastError says: {}", GetLastError()); } s.handles.sort_unstable(); return s.handles; } unsafe fn find_window_by_pid(pid: u32) -> HWND { use winapi::um::winuser::{EnumWindows, GetWindowThreadProcessId}; use winapi::shared::minwindef::{DWORD, LPDWORD}; #[repr(C)] #[derive(Debug)] struct Data { handle: HWND, pid: u32, } extern "system" fn enum_windows(wnd: HWND, data: LPARAM) -> i32 { let mut data = data as *mut Data; unsafe { let mut this_pid: DWORD = 0; GetWindowThreadProcessId(wnd, &mut this_pid as LPDWORD); if this_pid == (*data).pid { (*data).handle = wnd; return 0; } } return 1; } let mut data = Data {handle: null_mut(), pid}; SetLastError(0); EnumWindows(Some(enum_windows), &mut data as *mut Data as LPARAM); if GetLastError() != 0 { panic!("EnumWindows failed, GetLastError says: {}", GetLastError()); } data.handle } pub fn list_windows() -> Vec<HWND> { use winapi::um::winuser::{ EnumWindows, IsWindowVisible, GetLastActivePopup, GetAncestor, GetWindowTextLengthW, GA_ROOTOWNER, WS_EX_NOREDIRECTIONBITMAP, WS_EX_TOOLWINDOW }; // https://stackoverflow.com/questions/210504/enumerate-windows-like-alt-tab-does unsafe fn should_list(hwnd: HWND) -> bool { // Start at the root owner let mut hwnd_walk = GetAncestor(hwnd, GA_ROOTOWNER); // See if we are the last active visible popup let mut hwnd_try = null_mut(); loop { let hwnd_try_next = GetLastActivePopup(hwnd_walk); if hwnd_try_next == hwnd_try || IsWindowVisible(hwnd_try_next) == 1 { break; } hwnd_try = hwnd_try_next; hwnd_walk = hwnd_try; } return hwnd_walk == hwnd; } extern "system" fn list_windows_callback(hwnd: HWND, lp: LPARAM) -> i32 { let data = lp as *mut Vec<HWND>; unsafe { if IsWindowVisible(hwnd) == 1 && GetWindowTextLengthW(hwnd) > 0 && should_list(hwnd) { let (_, ex_style) = get_window_style(hwnd); if (ex_style as u32 & WS_EX_NOREDIRECTIONBITMAP) == 0 && (ex_style as u32 & WS_EX_TOOLWINDOW) == 0 { (*data).push(hwnd); } } } 1 } let mut data: Vec<HWND> = Vec::new(); unsafe { SetLastError(0); EnumWindows(Some(list_windows_callback), &mut data as *mut Vec<HWND> as LPARAM); if GetLastError() != 0 { panic!("EnumWindows failed, GetLastError says: '{}'", GetLastError()); } } data } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub enum WindowSelector<'a> { WindowTitle(&'a str), None, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct WallpaperProperties { pub fullscreen: bool } #[derive(Debug)] pub enum EngineError { ProgmanNotFound, UnableToSpawnWorker, } #[derive(Debug)] pub struct Engine { progman: HWND, worker: HWND, } impl Engine { pub fn new() -> Result<Engine, EngineError> { let progman_handle = find_window_by_class("Progman"); if progman_handle.is_null() { return Err(EngineError::ProgmanNotFound); } let worker_handle = unsafe { find_or_spawn_worker(progman_handle) }; if worker_handle.is_null() { return Err(EngineError::UnableToSpawnWorker); } Ok(Engine {progman: progman_handle, worker: worker_handle}) } pub fn list_active(&self) -> Vec<HWND> { unsafe { // TODO this is not safe until we add a check for worker validity here. list_immediate_children(self.worker) } } pub fn add_window_by_handle(&self, handle: HWND, properties: WallpaperProperties) -> bool { if !unsafe { add_window_as_wallpaper(self.worker, handle) } { eprintln!("Cannot add window to wallpaper"); return false; } if properties.fullscreen && !unsafe { set_fullscreen(self.worker, handle) } { return false } true } pub fn add_window(&self, command: Option<&mut Command>, selector: WindowSelector, properties: WallpaperProperties, wait_for: u64, attempts: u64 ) -> bool { let process_id = match command { Some(command) => command.spawn().expect("command failed to start").id(), None => { if let WindowSelector::None = selector { eprintln!("One or both of selector and command should be specified"); return false; } 0 } }; let mut handle = null_mut(); for _attempt in 1..=attempts { handle = match selector { WindowSelector::None => unsafe { find_window_by_pid(process_id) }, WindowSelector::WindowTitle(title) => { let windows = list_windows(); *windows.iter().find(|&&hwnd| get_window_name(hwnd) == title).unwrap_or(&null_mut()) }, }; if handle.is_null() { std::thread::sleep(std::time::Duration::from_millis(wait_for)); } else
{ break; }
conditional_block
wallpaper.rs
fn to_wide(s: &str) -> Vec<u16> { OsStr::new(s).encode_wide().chain(once(0)).collect() } pub fn get_window_name(hwnd: HWND) -> String { use winapi::um::winuser::{GetWindowTextLengthW, GetWindowTextW}; if hwnd.is_null() { panic!("Invalid HWND"); } let text = unsafe { let text_length = GetWindowTextLengthW(hwnd); let mut text: Vec<u16> = vec![0; text_length as usize + 1]; GetWindowTextW(hwnd, text.as_mut_ptr(), text_length + 1); OsString::from_wide(&text[..text.iter().position(|&c| c == 0).unwrap()]) }; text.into_string().expect("Failed to convert string to UTF-8") } /** * Spawn a wallpaper window if it doesn't already exists and return handle to it. * * `progman` - a valid handle to the `Progman`. * * This function is unsafe, because user is responsible for providing valid progman handle. */ unsafe fn find_or_spawn_worker(progman: HWND) -> HWND { use winapi::um::winuser::{SendMessageW, EnumWindows}; extern "system" fn find_worker(hwnd: HWND, data: LPARAM) -> i32 { use winapi::um::winuser::FindWindowExW; let data = data as *mut UserData; unsafe { if FindWindowExW(hwnd, null_mut(), (*data).shell_class.as_ptr(), null_mut()).is_null() { return 1; } let worker = FindWindowExW(null_mut(), hwnd, (*data).worker_class.as_ptr(), null_mut()); if worker.is_null() { return 1; } (*data).worker = worker; (*data).parent = hwnd; } return 0; } struct UserData { shell_class: Vec<u16>, worker_class: Vec<u16>, worker: HWND, parent: HWND, } let mut user_data = UserData { shell_class: to_wide("SHELLDLL_DefView"), worker_class: to_wide("WorkerW"), worker: null_mut(), parent: null_mut(), }; SetLastError(0); EnumWindows(Some(find_worker), &mut user_data as *mut UserData as LPARAM); if GetLastError() != 0 { panic!("EnumWindows failed, GetLastError says: '{}'", GetLastError()); } if user_data.worker.is_null() { // this is basically all the magic. it's an undocumented window message that // forces windows to spawn a window with class "WorkerW" behind deskicons SendMessageW(progman, 0x052C, 0xD, 0); SendMessageW(progman, 0x052C, 0xD, 1); SetLastError(0); EnumWindows(Some(find_worker), &mut user_data as *mut UserData as LPARAM); if GetLastError() != 0 { panic!("EnumWindows failed, GetLastError says: '{}'", GetLastError()); } if user_data.worker.is_null() { eprintln!("W: couldn't spawn WorkerW window, trying old method"); SendMessageW(progman, 0x052C, 0, 0); SetLastError(0); EnumWindows(Some(find_worker), &mut user_data as *mut UserData as LPARAM); if GetLastError() != 0 { panic!("EnumWindows failed, GetLastError says: '{}'", GetLastError()); } } } user_data.worker } unsafe fn get_window_style(hwnd: HWND) -> (i32, i32) { use winapi::um::winuser::{GetWindowLongW, GWL_STYLE, GWL_EXSTYLE}; SetLastError(0); let style = GetWindowLongW(hwnd, GWL_STYLE); let ex_style = GetWindowLongW(hwnd, GWL_EXSTYLE); if (style == 0 || ex_style == 0) && GetLastError() != 0 { panic!("GetWindowLongW failed, GetLastError says: '{}'", GetLastError()); } (style, ex_style) } unsafe fn update_window_styles(wnd: HWND, and: i32, ex_and: i32, or: i32, ex_or: i32) -> bool { use winapi::um::winuser::{SetWindowLongW, GWL_STYLE, GWL_EXSTYLE}; let (mut style, mut ex_style) = get_window_style(wnd); style &= and; ex_style &= ex_and; style |= or; ex_style |= ex_or; SetLastError(0); let style = SetWindowLongW(wnd, GWL_STYLE, style); let ex_style = SetWindowLongW(wnd, GWL_EXSTYLE, ex_style); if (style == 0 || ex_style == 0) && GetLastError() != 0 { panic!("SetWindowLongW failed, GetLastError says: '{}'", GetLastError()); } return true; } unsafe fn get_window_rect(wnd: HWND) -> Option<RECT> { let rect: RECT = Default::default(); let failed = GetWindowRect(wnd, &rect as *const RECT as *mut RECT) == 0; if failed { eprintln!("GetWindowRect failed, GetLastError says: '{}'", GetLastError()); return None; } return Some(rect); } unsafe fn map_window_rect(wallpaper: HWND, wnd: HWND) -> Option<RECT> { if let Some(rect) = get_window_rect(wnd) { MapWindowPoints(null_mut(), wallpaper, &rect as *const RECT as PPOINT, 2); return Some(rect); } return None; } unsafe fn move_window(wnd: HWND, rect: RECT) -> bool { let success = SetWindowPos( wnd, null_mut(), rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top, 0 ); if success == 0 { eprintln!("SetWindowPos failed, GetLastError says: '{}'", GetLastError()); return false; } return true; } unsafe fn add_window_as_wallpaper(wallpaper: HWND, wnd: HWND) -> bool { use winapi::um::winuser::{ SetParent, WS_CHILD, WS_CAPTION, WS_THICKFRAME, WS_SYSMENU, WS_MAXIMIZEBOX, WS_MINIMIZEBOX, WS_EX_DLGMODALFRAME, WS_EX_COMPOSITED, WS_EX_WINDOWEDGE, WS_EX_CLIENTEDGE, WS_EX_LAYERED, WS_EX_STATICEDGE, WS_EX_TOOLWINDOW, WS_EX_APPWINDOW, }; let wnd_class = { let wnd_class: &mut [u16] = &mut [0; 512]; GetClassNameW(wnd, wnd_class.as_mut_ptr(), wnd_class.len() as i32 - 1); OsString::from_wide(&wnd_class[..wnd_class.iter().position(|&c| c == 0).unwrap()]) }; if wallpaper == wnd || wnd_class == "Shell_TrayWnd" { eprintln!("can't add this window"); return false; } let is_child = IsChild(wallpaper, wnd) != 0; if is_child { eprintln!("already added"); return false; } /* * styles blacklist taken from https://github.com/Codeusa/Borderless-Gaming/ * blob/2fef4ccc121412f215cd7f185c4351fd634cab8b/BorderlessGaming.Logic/ * Windows/Manipulation.cs#L70 */ /* TODO: somehow save old styles so we can restore them */ let and: i32 = !( WS_CAPTION | WS_THICKFRAME | WS_SYSMENU | WS_MAXIMIZEBOX | WS_MINIMIZEBOX ) as i32; let ex_and: i32 = !( WS_EX_DLGMODALFRAME | WS_EX_COMPOSITED | WS_EX_WINDOWEDGE | WS_EX_CLIENTEDGE | WS_EX_LAYERED | WS_EX_STATICEDGE | WS_EX_TOOLWINDOW | WS_EX_APPWINDOW ) as i32; if !update_window_styles(wnd, and, ex_and, WS_CHILD as i32, 0) { return false; } /* window retains screen coordinates so we need to adjust them */ map_window_rect(wallpaper, wnd).unwrap(); let prev_parent = SetParent(wnd, wallpaper); if prev_parent.is_null() { panic!("SetParent failed, GetLastError says: '{}'", GetLastError()); } ShowWindow(wnd, SW_SHOW); return true; } unsafe fn remove_window_from_wallpaper(wallpaper: HWND, wnd: HWND) -> bool { use winapi::um::winuser::{ SetParent, GetDesktopWindow, InvalidateRect, WS_EX_APPWINDOW, WS_OVERLAPPEDWINDOW, SWP_FRAMECHANGED, SWP_NOMOVE, SWP_NOSIZE, SWP_NOZORDER, SWP_NOOWNERZORDER }; if SetParent(wnd, GetDesktopWindow()).is_null
{ use winapi::um::winuser::FindWindowW; unsafe { FindWindowW(to_wide(class).as_ptr(), null_mut()) } }
identifier_body
wallpaper.rs
shell_class: to_wide("SHELLDLL_DefView"), worker_class: to_wide("WorkerW"), worker: null_mut(), parent: null_mut(), }; SetLastError(0); EnumWindows(Some(find_worker), &mut user_data as *mut UserData as LPARAM); if GetLastError() != 0 { panic!("EnumWindows failed, GetLastError says: '{}'", GetLastError()); } if user_data.worker.is_null() { // this is basically all the magic. it's an undocumented window message that // forces windows to spawn a window with class "WorkerW" behind deskicons SendMessageW(progman, 0x052C, 0xD, 0); SendMessageW(progman, 0x052C, 0xD, 1); SetLastError(0); EnumWindows(Some(find_worker), &mut user_data as *mut UserData as LPARAM); if GetLastError() != 0 { panic!("EnumWindows failed, GetLastError says: '{}'", GetLastError()); } if user_data.worker.is_null() { eprintln!("W: couldn't spawn WorkerW window, trying old method"); SendMessageW(progman, 0x052C, 0, 0); SetLastError(0); EnumWindows(Some(find_worker), &mut user_data as *mut UserData as LPARAM); if GetLastError() != 0 { panic!("EnumWindows failed, GetLastError says: '{}'", GetLastError()); } } } user_data.worker } unsafe fn get_window_style(hwnd: HWND) -> (i32, i32) { use winapi::um::winuser::{GetWindowLongW, GWL_STYLE, GWL_EXSTYLE}; SetLastError(0); let style = GetWindowLongW(hwnd, GWL_STYLE); let ex_style = GetWindowLongW(hwnd, GWL_EXSTYLE); if (style == 0 || ex_style == 0) && GetLastError() != 0 { panic!("GetWindowLongW failed, GetLastError says: '{}'", GetLastError()); } (style, ex_style) } unsafe fn update_window_styles(wnd: HWND, and: i32, ex_and: i32, or: i32, ex_or: i32) -> bool { use winapi::um::winuser::{SetWindowLongW, GWL_STYLE, GWL_EXSTYLE}; let (mut style, mut ex_style) = get_window_style(wnd); style &= and; ex_style &= ex_and; style |= or; ex_style |= ex_or; SetLastError(0); let style = SetWindowLongW(wnd, GWL_STYLE, style); let ex_style = SetWindowLongW(wnd, GWL_EXSTYLE, ex_style); if (style == 0 || ex_style == 0) && GetLastError() != 0 { panic!("SetWindowLongW failed, GetLastError says: '{}'", GetLastError()); } return true; } unsafe fn get_window_rect(wnd: HWND) -> Option<RECT> { let rect: RECT = Default::default(); let failed = GetWindowRect(wnd, &rect as *const RECT as *mut RECT) == 0; if failed { eprintln!("GetWindowRect failed, GetLastError says: '{}'", GetLastError()); return None; } return Some(rect); } unsafe fn map_window_rect(wallpaper: HWND, wnd: HWND) -> Option<RECT> { if let Some(rect) = get_window_rect(wnd) { MapWindowPoints(null_mut(), wallpaper, &rect as *const RECT as PPOINT, 2); return Some(rect); } return None; } unsafe fn move_window(wnd: HWND, rect: RECT) -> bool { let success = SetWindowPos( wnd, null_mut(), rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top, 0 ); if success == 0 { eprintln!("SetWindowPos failed, GetLastError says: '{}'", GetLastError()); return false; } return true; } unsafe fn add_window_as_wallpaper(wallpaper: HWND, wnd: HWND) -> bool { use winapi::um::winuser::{ SetParent, WS_CHILD, WS_CAPTION, WS_THICKFRAME, WS_SYSMENU, WS_MAXIMIZEBOX, WS_MINIMIZEBOX,
let wnd_class = { let wnd_class: &mut [u16] = &mut [0; 512]; GetClassNameW(wnd, wnd_class.as_mut_ptr(), wnd_class.len() as i32 - 1); OsString::from_wide(&wnd_class[..wnd_class.iter().position(|&c| c == 0).unwrap()]) }; if wallpaper == wnd || wnd_class == "Shell_TrayWnd" { eprintln!("can't add this window"); return false; } let is_child = IsChild(wallpaper, wnd) != 0; if is_child { eprintln!("already added"); return false; } /* * styles blacklist taken from https://github.com/Codeusa/Borderless-Gaming/ * blob/2fef4ccc121412f215cd7f185c4351fd634cab8b/BorderlessGaming.Logic/ * Windows/Manipulation.cs#L70 */ /* TODO: somehow save old styles so we can restore them */ let and: i32 = !( WS_CAPTION | WS_THICKFRAME | WS_SYSMENU | WS_MAXIMIZEBOX | WS_MINIMIZEBOX ) as i32; let ex_and: i32 = !( WS_EX_DLGMODALFRAME | WS_EX_COMPOSITED | WS_EX_WINDOWEDGE | WS_EX_CLIENTEDGE | WS_EX_LAYERED | WS_EX_STATICEDGE | WS_EX_TOOLWINDOW | WS_EX_APPWINDOW ) as i32; if !update_window_styles(wnd, and, ex_and, WS_CHILD as i32, 0) { return false; } /* window retains screen coordinates so we need to adjust them */ map_window_rect(wallpaper, wnd).unwrap(); let prev_parent = SetParent(wnd, wallpaper); if prev_parent.is_null() { panic!("SetParent failed, GetLastError says: '{}'", GetLastError()); } ShowWindow(wnd, SW_SHOW); return true; } unsafe fn remove_window_from_wallpaper(wallpaper: HWND, wnd: HWND) -> bool { use winapi::um::winuser::{ SetParent, GetDesktopWindow, InvalidateRect, WS_EX_APPWINDOW, WS_OVERLAPPEDWINDOW, SWP_FRAMECHANGED, SWP_NOMOVE, SWP_NOSIZE, SWP_NOZORDER, SWP_NOOWNERZORDER }; if SetParent(wnd, GetDesktopWindow()).is_null() { eprintln!("SetParent failed, GetLastError says: '{}'", GetLastError()); return false; } let or = WS_OVERLAPPEDWINDOW as i32; let ex_or = WS_EX_APPWINDOW as i32; if !update_window_styles(wnd, -1, -1, or, ex_or) { return false; } SetWindowPos( wnd, null_mut(), 0, 0, 0, 0, SWP_FRAMECHANGED | SWP_NOMOVE | SWP_NOSIZE | SWP_NOZORDER | SWP_NOOWNERZORDER ); InvalidateRect(wallpaper, null_mut(), 1); // wp_id(); /* can sometimes fix leftover unrefreshed portions */ true } unsafe fn set_fullscreen(wallpaper: HWND, wnd: HWND) -> bool { if let Some(current_rect) = get_window_rect(wnd) { let monitor = MonitorFromPoint(POINT {x: current_rect.left, y: current_rect.top}, MONITOR_DEFAULTTONEAREST); if monitor.is_null() { eprintln!("MonitorFromWindow failed, GetLastError says: '{}'", GetLastError()); return false; } let mut mi: MONITORINFO = Default::default(); mi.cbSize = std::mem::size_of::<MONITORINFO>() as u32; let success = GetMonitorInfoW(monitor, &mi as *const MONITORINFO as *mut MONITORINFO); if success == 0 { eprintln!("GetMonitorInfoW failed, GetLastError says: '{}'", GetLastError()); return false; } MapWindowPoints(null_mut(), wallpaper, &mi.rcMonitor as *const RECT as PPOINT, 2); move_window(wnd, mi.rcMonitor); return true; } return false; } unsafe fn list_immediate_children(parent: HWND) -> Vec<HWND> { use winapi::um::winuser::EnumChildWindows; #[repr(C)] struct WindowState { parent: HWND, handles: Vec<HWND>, } let mut s = WindowState { parent, handles: Vec::new() }; extern "system" fn enum_windows(wnd: HWND, lp: LPARAM) -> i32 { use win
WS_EX_DLGMODALFRAME, WS_EX_COMPOSITED, WS_EX_WINDOWEDGE, WS_EX_CLIENTEDGE, WS_EX_LAYERED, WS_EX_STATICEDGE, WS_EX_TOOLWINDOW, WS_EX_APPWINDOW, };
random_line_split
wallpaper.rs
.worker.is_null() { eprintln!("W: couldn't spawn WorkerW window, trying old method"); SendMessageW(progman, 0x052C, 0, 0); SetLastError(0); EnumWindows(Some(find_worker), &mut user_data as *mut UserData as LPARAM); if GetLastError() != 0 { panic!("EnumWindows failed, GetLastError says: '{}'", GetLastError()); } } } user_data.worker } unsafe fn get_window_style(hwnd: HWND) -> (i32, i32) { use winapi::um::winuser::{GetWindowLongW, GWL_STYLE, GWL_EXSTYLE}; SetLastError(0); let style = GetWindowLongW(hwnd, GWL_STYLE); let ex_style = GetWindowLongW(hwnd, GWL_EXSTYLE); if (style == 0 || ex_style == 0) && GetLastError() != 0 { panic!("GetWindowLongW failed, GetLastError says: '{}'", GetLastError()); } (style, ex_style) } unsafe fn update_window_styles(wnd: HWND, and: i32, ex_and: i32, or: i32, ex_or: i32) -> bool { use winapi::um::winuser::{SetWindowLongW, GWL_STYLE, GWL_EXSTYLE}; let (mut style, mut ex_style) = get_window_style(wnd); style &= and; ex_style &= ex_and; style |= or; ex_style |= ex_or; SetLastError(0); let style = SetWindowLongW(wnd, GWL_STYLE, style); let ex_style = SetWindowLongW(wnd, GWL_EXSTYLE, ex_style); if (style == 0 || ex_style == 0) && GetLastError() != 0 { panic!("SetWindowLongW failed, GetLastError says: '{}'", GetLastError()); } return true; } unsafe fn get_window_rect(wnd: HWND) -> Option<RECT> { let rect: RECT = Default::default(); let failed = GetWindowRect(wnd, &rect as *const RECT as *mut RECT) == 0; if failed { eprintln!("GetWindowRect failed, GetLastError says: '{}'", GetLastError()); return None; } return Some(rect); } unsafe fn map_window_rect(wallpaper: HWND, wnd: HWND) -> Option<RECT> { if let Some(rect) = get_window_rect(wnd) { MapWindowPoints(null_mut(), wallpaper, &rect as *const RECT as PPOINT, 2); return Some(rect); } return None; } unsafe fn move_window(wnd: HWND, rect: RECT) -> bool { let success = SetWindowPos( wnd, null_mut(), rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top, 0 ); if success == 0 { eprintln!("SetWindowPos failed, GetLastError says: '{}'", GetLastError()); return false; } return true; } unsafe fn add_window_as_wallpaper(wallpaper: HWND, wnd: HWND) -> bool { use winapi::um::winuser::{ SetParent, WS_CHILD, WS_CAPTION, WS_THICKFRAME, WS_SYSMENU, WS_MAXIMIZEBOX, WS_MINIMIZEBOX, WS_EX_DLGMODALFRAME, WS_EX_COMPOSITED, WS_EX_WINDOWEDGE, WS_EX_CLIENTEDGE, WS_EX_LAYERED, WS_EX_STATICEDGE, WS_EX_TOOLWINDOW, WS_EX_APPWINDOW, }; let wnd_class = { let wnd_class: &mut [u16] = &mut [0; 512]; GetClassNameW(wnd, wnd_class.as_mut_ptr(), wnd_class.len() as i32 - 1); OsString::from_wide(&wnd_class[..wnd_class.iter().position(|&c| c == 0).unwrap()]) }; if wallpaper == wnd || wnd_class == "Shell_TrayWnd" { eprintln!("can't add this window"); return false; } let is_child = IsChild(wallpaper, wnd) != 0; if is_child { eprintln!("already added"); return false; } /* * styles blacklist taken from https://github.com/Codeusa/Borderless-Gaming/ * blob/2fef4ccc121412f215cd7f185c4351fd634cab8b/BorderlessGaming.Logic/ * Windows/Manipulation.cs#L70 */ /* TODO: somehow save old styles so we can restore them */ let and: i32 = !( WS_CAPTION | WS_THICKFRAME | WS_SYSMENU | WS_MAXIMIZEBOX | WS_MINIMIZEBOX ) as i32; let ex_and: i32 = !( WS_EX_DLGMODALFRAME | WS_EX_COMPOSITED | WS_EX_WINDOWEDGE | WS_EX_CLIENTEDGE | WS_EX_LAYERED | WS_EX_STATICEDGE | WS_EX_TOOLWINDOW | WS_EX_APPWINDOW ) as i32; if !update_window_styles(wnd, and, ex_and, WS_CHILD as i32, 0) { return false; } /* window retains screen coordinates so we need to adjust them */ map_window_rect(wallpaper, wnd).unwrap(); let prev_parent = SetParent(wnd, wallpaper); if prev_parent.is_null() { panic!("SetParent failed, GetLastError says: '{}'", GetLastError()); } ShowWindow(wnd, SW_SHOW); return true; } unsafe fn remove_window_from_wallpaper(wallpaper: HWND, wnd: HWND) -> bool { use winapi::um::winuser::{ SetParent, GetDesktopWindow, InvalidateRect, WS_EX_APPWINDOW, WS_OVERLAPPEDWINDOW, SWP_FRAMECHANGED, SWP_NOMOVE, SWP_NOSIZE, SWP_NOZORDER, SWP_NOOWNERZORDER }; if SetParent(wnd, GetDesktopWindow()).is_null() { eprintln!("SetParent failed, GetLastError says: '{}'", GetLastError()); return false; } let or = WS_OVERLAPPEDWINDOW as i32; let ex_or = WS_EX_APPWINDOW as i32; if !update_window_styles(wnd, -1, -1, or, ex_or) { return false; } SetWindowPos( wnd, null_mut(), 0, 0, 0, 0, SWP_FRAMECHANGED | SWP_NOMOVE | SWP_NOSIZE | SWP_NOZORDER | SWP_NOOWNERZORDER ); InvalidateRect(wallpaper, null_mut(), 1); // wp_id(); /* can sometimes fix leftover unrefreshed portions */ true } unsafe fn set_fullscreen(wallpaper: HWND, wnd: HWND) -> bool { if let Some(current_rect) = get_window_rect(wnd) { let monitor = MonitorFromPoint(POINT {x: current_rect.left, y: current_rect.top}, MONITOR_DEFAULTTONEAREST); if monitor.is_null() { eprintln!("MonitorFromWindow failed, GetLastError says: '{}'", GetLastError()); return false; } let mut mi: MONITORINFO = Default::default(); mi.cbSize = std::mem::size_of::<MONITORINFO>() as u32; let success = GetMonitorInfoW(monitor, &mi as *const MONITORINFO as *mut MONITORINFO); if success == 0 { eprintln!("GetMonitorInfoW failed, GetLastError says: '{}'", GetLastError()); return false; } MapWindowPoints(null_mut(), wallpaper, &mi.rcMonitor as *const RECT as PPOINT, 2); move_window(wnd, mi.rcMonitor); return true; } return false; } unsafe fn list_immediate_children(parent: HWND) -> Vec<HWND> { use winapi::um::winuser::EnumChildWindows; #[repr(C)] struct WindowState { parent: HWND, handles: Vec<HWND>, } let mut s = WindowState { parent, handles: Vec::new() }; extern "system" fn enum_windows(wnd: HWND, lp: LPARAM) -> i32 { use winapi::um::winuser::{GetAncestor, GA_PARENT}; let s: *mut WindowState = lp as *mut WindowState; unsafe { if GetAncestor(wnd, GA_PARENT) == (*s).parent { (*s).handles.push(wnd); } } return 1; } SetLastError(0); EnumChildWindows(parent, Some(enum_windows), &mut s as *mut WindowState as LPARAM); if GetLastError() != 0 { panic!("EnumChildWindows failed, GetLastError says: {}", GetLastError()); } s.handles.sort_unstable(); return s.handles; } unsafe fn find_window_by_pid(pid: u32) -> HWND { use winapi::um::winuser::{EnumWindows, GetWindowThreadProcessId}; use winapi::shared::minwindef::{DWORD, LPDWORD}; #[repr(C)] #[derive(Debug)] struct Data { handle: HWND, pid: u32, } extern "system" fn
enum_windows
identifier_name
tile.rs
-> Color { match name.to_lowercase().as_str() { "ground" => GROUND, "yellow" => YELLOW, "green" => GREEN, "russet" => RUSSET, "grey" => GREY, "brown" => BROWN, "red" => RED, "blue" => BLUE, "barrier" => BARRIER, "white" => WHITE, _ => Color { value: "#000000" }, } } } /// Converts a position code to hex coordinates /// /// Converts a position code to a hexagon-space coordinate with its origin in /// the hexagon center. /// /// # Panics /// /// On invalid position code fn edge_to_coordinate(edge: &str) -> na::Vector3<f64> { match edge { "N" => na::Vector3::new( 0.0, 0.5, 0.5), "NE" => na::Vector3::new( 0.5, 0.5, 0.0), "SE" => na::Vector3::new( 0.5, 0.0, -0.5), "S" => na::Vector3::new( 0.0, -0.5, -0.5), "SW" => na::Vector3::new(-0.5, -0.5, 0.0), "NW" => na::Vector3::new(-0.5, 0.0, 0.5), "C" => na::Vector3::new( 0.0, 0.0, 0.0), c => panic!("Invalid edge code {}", c), } } /// Converts a compass direction to a number of degrees of rotation pub fn direction_to_angle(direction: &str) -> f64 { match direction { "N" => 0.0, "NW" => -PI / 3.0, "SW" => -PI * 2.0 / 3.0, "S" => PI, "SE" => PI * 2.0 / 3.0, "NE" => PI / 3.0, c => panic!("Invalid direction {}", c), } } /// Represents named or hex space coordinate #[derive(Clone, Deserialize, Debug)] #[serde(untagged)] pub enum Coordinate { Named(String), HexSpace((f64, f64, f64)), } impl Coordinate { pub fn as_vector(&self) -> na::Vector3<f64> { match self { &Coordinate::Named(ref name) => edge_to_coordinate(name.as_ref()), &Coordinate::HexSpace(ref pos) => na::Vector3::new(pos.0, pos.1, pos.2), } } } /// Attributes that are common between Tile and TileDefinition pub trait TileSpec { fn color(&self) -> colors::Color; fn set_name(&mut self, name: String); fn name(&self) -> &str; /// The paths on the tile. fn paths(&self) -> Vec<Path>; /// The city revenue locations on the tile. fn cities(&self) -> Vec<City>; /// The stop revenue locations on the tile fn stops(&self) -> Vec<Stop>; /// Whether a tile should be drawn as lawson track fn is_lawson(&self) -> bool; /// Arrows on the edge of a tile fn arrows(&self) -> Vec<Coordinate> { vec![] } /// Revenue track on the tile fn revenue_track(&self) -> Option<RevenueTrack> { None } fn terrain(&self) -> Option<Terrain> { None } fn get_text<'a>(&'a self, &'a str) -> &'a str; fn text_position(&self, usize) -> Option<na::Vector3<f64>>; fn text_spec(&self) -> Vec<Text>; /// Rotation of the tile fn orientation(&self) -> f64 { 0.0 } } /// The specification of a tile to be used in the game #[derive(Deserialize)] pub struct Tile { base_tile: String, color: String, text: HashMap<String, String>, #[serde(skip)] definition: Option<TileDefinition>, } impl Tile { pub fn set_definition(&mut self, definition: &TileDefinition) { self.definition = Some(definition.clone()); } pub fn base_tile(&self) -> String { self.base_tile.clone() } } impl Default for Tile { fn default() -> Tile { Tile { base_tile: String::new(), color: String::new(), text: HashMap::new(), definition: None, } } } impl TileSpec for Tile { fn color(&self) -> colors::Color { colors::name_to_color(&self.color) } /// The number of the tile, should be the first text specified fn name(&self) -> &str { self.text.get("number").unwrap() } fn set_name(&mut self, name: String) { self.text.insert("number".to_string(), name); } fn paths(&self) -> Vec<Path> { self.definition.as_ref() .expect("You must call set_definition() before using paths()") .paths() } fn cities(&self) -> Vec<City> { self.definition.as_ref() .expect("You must call set_definition() before using cities()") .cities() } fn stops(&self) -> Vec<Stop> { self.definition.as_ref() .expect("You must call set_definition() before using stops()") .stops() } fn is_lawson(&self) -> bool { self.definition.as_ref() .expect("You must call set_definition() before using is_lawson()") .is_lawson() } fn get_text(&self, id: &str) -> &str { match self.text.get(id) { Some(s) => s, None => "", } } fn text_position(&self, id: usize) -> Option<na::Vector3<f64>> { self.definition.as_ref() .expect("You must call set_definition() before using \ text_position()") .text_position(id) } fn text_spec(&self) -> Vec<Text> { self.definition.as_ref() .expect("You must call set_definition() before using \ text_spec()") .text_spec() } } /// Definition of tile layout, does not include color or name #[derive(Clone, Deserialize, Debug)] #[serde(default)] pub struct TileDefinition { name: String, paths: Vec<Path>, cities: Vec<City>, stops: Vec<Stop>, is_lawson: bool, text: Vec<Text>, } impl Default for TileDefinition { fn default() -> TileDefinition {
} impl TileSpec for TileDefinition { fn paths(&self) -> Vec<Path> { self.paths.clone() } fn cities(&self) -> Vec<City> { self.cities.clone() } fn stops(&self) -> Vec<Stop> { self.stops.clone() } fn is_lawson(&self) -> bool { self.is_lawson } fn color(&self) -> colors::Color { colors::GROUND } fn set_name(&mut self, name: String) { self.name = name; } fn name(&self) -> &str { self.name.as_str() } fn get_text<'a>(&'a self, id: &'a str) -> &'a str { match id { "number" => self.name(), x => x, } } fn text_position(&self, id: usize) -> Option<na::Vector3<f64>> { Some(self.text[id].position()) } fn text_spec(&self) -> Vec<Text> { let tile_number = Text { id: "number".to_string(), position: Coordinate::HexSpace((0.0, 0.0, -0.9)), anchor: TextAnchor::End, size: None, weight: None, }; let mut text = self.text.clone(); text.insert(0, tile_number); text } } /// Path on the tile /// /// A path is a line section that goes between `start point` and `end point`. /// There are two versions of each point `[start|end]` and `[start|end]_pos`, /// the `_pos` variant takes precedence over the non-`_pos` version. The /// non-`_pos` version should always be a position code, while the `_pos` /// version is a 3D position in hexagon-space. #[derive(Deserialize, Debug, Clone)] pub struct Path { start: Coordinate, end: Coordinate, pub start_control: Option<Coordinate>, pub end_control: Option<Coordinate>, #[serde(default)] is_bridge:
TileDefinition { name: "NoName".to_string(), paths: vec![], cities: vec![], stops: vec![], is_lawson: false, text: vec![], } }
identifier_body
tile.rs
::new(), text: HashMap::new(), definition: None, } } } impl TileSpec for Tile { fn color(&self) -> colors::Color { colors::name_to_color(&self.color) } /// The number of the tile, should be the first text specified fn name(&self) -> &str { self.text.get("number").unwrap() } fn set_name(&mut self, name: String) { self.text.insert("number".to_string(), name); } fn paths(&self) -> Vec<Path> { self.definition.as_ref() .expect("You must call set_definition() before using paths()") .paths() } fn cities(&self) -> Vec<City> { self.definition.as_ref() .expect("You must call set_definition() before using cities()") .cities() } fn stops(&self) -> Vec<Stop> { self.definition.as_ref() .expect("You must call set_definition() before using stops()") .stops() } fn is_lawson(&self) -> bool { self.definition.as_ref() .expect("You must call set_definition() before using is_lawson()") .is_lawson() } fn get_text(&self, id: &str) -> &str { match self.text.get(id) { Some(s) => s, None => "", } } fn text_position(&self, id: usize) -> Option<na::Vector3<f64>> { self.definition.as_ref() .expect("You must call set_definition() before using \ text_position()") .text_position(id) } fn text_spec(&self) -> Vec<Text> { self.definition.as_ref() .expect("You must call set_definition() before using \ text_spec()") .text_spec() } } /// Definition of tile layout, does not include color or name #[derive(Clone, Deserialize, Debug)] #[serde(default)] pub struct TileDefinition { name: String, paths: Vec<Path>, cities: Vec<City>, stops: Vec<Stop>, is_lawson: bool, text: Vec<Text>, } impl Default for TileDefinition { fn default() -> TileDefinition { TileDefinition { name: "NoName".to_string(), paths: vec![], cities: vec![], stops: vec![], is_lawson: false, text: vec![], } } } impl TileSpec for TileDefinition { fn paths(&self) -> Vec<Path> { self.paths.clone() } fn cities(&self) -> Vec<City> { self.cities.clone() } fn stops(&self) -> Vec<Stop> { self.stops.clone() } fn is_lawson(&self) -> bool { self.is_lawson } fn color(&self) -> colors::Color { colors::GROUND } fn set_name(&mut self, name: String) { self.name = name; } fn name(&self) -> &str { self.name.as_str() } fn get_text<'a>(&'a self, id: &'a str) -> &'a str { match id { "number" => self.name(), x => x, } } fn text_position(&self, id: usize) -> Option<na::Vector3<f64>> { Some(self.text[id].position()) } fn text_spec(&self) -> Vec<Text> { let tile_number = Text { id: "number".to_string(), position: Coordinate::HexSpace((0.0, 0.0, -0.9)), anchor: TextAnchor::End, size: None, weight: None, }; let mut text = self.text.clone(); text.insert(0, tile_number); text } } /// Path on the tile /// /// A path is a line section that goes between `start point` and `end point`. /// There are two versions of each point `[start|end]` and `[start|end]_pos`, /// the `_pos` variant takes precedence over the non-`_pos` version. The /// non-`_pos` version should always be a position code, while the `_pos` /// version is a 3D position in hexagon-space. #[derive(Deserialize, Debug, Clone)] pub struct Path { start: Coordinate, end: Coordinate, pub start_control: Option<Coordinate>, pub end_control: Option<Coordinate>, #[serde(default)] is_bridge: bool, } impl Path { /// Getter that always returns the start coordinate in hexagon-space. pub fn start(&self) -> na::Vector3<f64> { self.start.as_vector() } /// Getter that always returns the end coordinate in hexagon-space. pub fn end(&self) -> na::Vector3<f64> { self.end.as_vector() } /// Whether the is_bridge flag is set pub fn is_bridge(&self) -> bool { self.is_bridge } /// The radius of the corner made by the path pub fn radius(&self) -> f64 { let gentle_curve = 2.0_f64.sqrt() / 2.0; // Gentle curves have a different radius if let (&Coordinate::Named(ref start), &Coordinate::Named(ref end)) = (&self.start, &self.end) { if start.len() == 2 && end.len() == 2 && start.chars().nth(0) == end.chars().nth(0) { // NW-NE, SW-SE return gentle_curve } else if ((start.len() == 2 && end.len() == 1) || (start.len() == 1 && end.len() == 2)) && start.chars().nth(0) != end.chars().nth(0) { // N-SE, N-SW, etc. return gentle_curve } } // Everything else has a radius of one 1.0 } } /// City on the tile /// /// A city is a collection of circles where tokens can be put down. A city /// requires the specification of the number of circles (a positive integer) /// and the revenue (a positive integer). An optional position can also be /// given. If omitted then the position is assumed to be the center of the /// tile. The position can be given as the `pos` or `position` fields. The /// `pos` field is a coordinate in hexagon-space. The `position` field is a /// position code. #[derive(Deserialize, Debug, Clone)] pub struct City { pub circles: u32, pub text_id: String, pub revenue_position: Coordinate, position: Coordinate, } impl City { /// The coordinate of the city in hexagon-space. pub fn position(&self) -> na::Vector3<f64> { self.position.as_vector() } pub fn revenue_position(&self) -> na::Vector3<f64>{ self.revenue_position.as_vector() } } /// Stop on the tile /// /// A stop is a position with a revenue number. The `position` field is an /// 3D position in hexagon-space. #[derive(Deserialize, Debug, Clone)] pub struct Stop { position: Coordinate, pub text_id: String, pub revenue_angle: i32, } impl Stop { /// The coordinate of the stop in hexagon-space. pub fn position(&self) -> na::Vector3<f64> { self.position.as_vector() } } /// Text anchor position for text on tile #[derive(Deserialize, Debug, Clone)] pub enum TextAnchor { Start, Middle, End, } /// Text on the tile #[derive(Deserialize, Debug, Clone)] pub struct Text { pub id: String, position: Coordinate, size: Option<String>, pub weight: Option<u32>, pub anchor: TextAnchor, } impl Text { /// The coordinate of the text in hexagon-space. pub fn position(&self) -> na::Vector3<f64> { self.position.as_vector() } /// The size of the text pub fn size(&self) -> Option<&str> { match self.size { None => None, Some(ref s) => Some(&s), } } } /// Track which shows revenue for different phases #[derive(Deserialize, Debug, Clone)] pub struct RevenueTrack { position: Coordinate, pub yellow: String, pub green: Option<String>, pub russet: Option<String>, pub grey: Option<String>, } impl RevenueTrack { /// The coordinate of the track in hexagon-space. pub fn position(&self) -> na::Vector3<f64> { self.position.as_vector() } } /// Terrain on a tile #[derive(Clone, Deserialize)] pub struct Terrain { position: Coordinate, #[serde(rename="type")] pub terrain_type: TerrainType, pub cost: String, } impl Terrain { /// The coordinate of the terrain in hexagon-space. pub fn position(&self) -> na::Vector3<f64> { self.position.as_vector() } } /// Types of terrain that can be present #[derive(Clone, Deserialize)] #[serde(rename_all="lowercase")] pub enum T
errainType
identifier_name
tile.rs
//! * `C`: center of hexagon //! //! ![Coordinate system](../../../../axes.svg) extern crate nalgebra as na; extern crate serde_yaml; use std::collections::HashMap; use std::f64::consts::PI; use std::fs; use std::path::PathBuf; use std::fs::File; use std::process; /// Standard colors that can be used pub mod colors { pub struct Color { value: &'static str, } impl Color { pub fn value(&self) -> &str { self.value } } impl Default for Color { fn default() -> Color { GROUND } } pub const GROUND: Color = Color { value: "#FDD9B5" }; // Sandy Tan pub const YELLOW: Color = Color { value: "#FDEE00" }; // Aureolin pub const GREEN: Color = Color { value: "#00A550" }; // Pigment Green pub const RUSSET: Color = Color { value: "#CD7F32" }; // Bronze pub const GREY: Color = Color { value: "#ACACAC" }; // Silver Chalice pub const BROWN: Color = Color { value: "#7B3F00" }; // Chocolate pub const RED: Color = Color { value: "#DC143C" }; // Crimson pub const BLUE: Color = Color { value: "#007FFF" }; // Azure pub const BARRIER: Color = Color { value: "#660000" }; // Blood Red pub const WHITE: Color = Color { value: "#FFFFFF" }; pub fn name_to_color(name: &String) -> Color { match name.to_lowercase().as_str() { "ground" => GROUND, "yellow" => YELLOW, "green" => GREEN, "russet" => RUSSET, "grey" => GREY, "brown" => BROWN, "red" => RED, "blue" => BLUE, "barrier" => BARRIER, "white" => WHITE, _ => Color { value: "#000000" }, } } } /// Converts a position code to hex coordinates /// /// Converts a position code to a hexagon-space coordinate with its origin in /// the hexagon center. /// /// # Panics /// /// On invalid position code fn edge_to_coordinate(edge: &str) -> na::Vector3<f64> { match edge { "N" => na::Vector3::new( 0.0, 0.5, 0.5), "NE" => na::Vector3::new( 0.5, 0.5, 0.0), "SE" => na::Vector3::new( 0.5, 0.0, -0.5), "S" => na::Vector3::new( 0.0, -0.5, -0.5), "SW" => na::Vector3::new(-0.5, -0.5, 0.0), "NW" => na::Vector3::new(-0.5, 0.0, 0.5), "C" => na::Vector3::new( 0.0, 0.0, 0.0), c => panic!("Invalid edge code {}", c), } } /// Converts a compass direction to a number of degrees of rotation pub fn direction_to_angle(direction: &str) -> f64 { match direction { "N" => 0.0, "NW" => -PI / 3.0, "SW" => -PI * 2.0 / 3.0, "S" => PI, "SE" => PI * 2.0 / 3.0, "NE" => PI / 3.0, c => panic!("Invalid direction {}", c), } } /// Represents named or hex space coordinate #[derive(Clone, Deserialize, Debug)] #[serde(untagged)] pub enum Coordinate { Named(String), HexSpace((f64, f64, f64)), } impl Coordinate { pub fn as_vector(&self) -> na::Vector3<f64> { match self { &Coordinate::Named(ref name) => edge_to_coordinate(name.as_ref()), &Coordinate::HexSpace(ref pos) => na::Vector3::new(pos.0, pos.1, pos.2), } } } /// Attributes that are common between Tile and TileDefinition pub trait TileSpec { fn color(&self) -> colors::Color; fn set_name(&mut self, name: String); fn name(&self) -> &str; /// The paths on the tile. fn paths(&self) -> Vec<Path>; /// The city revenue locations on the tile. fn cities(&self) -> Vec<City>; /// The stop revenue locations on the tile fn stops(&self) -> Vec<Stop>; /// Whether a tile should be drawn as lawson track fn is_lawson(&self) -> bool; /// Arrows on the edge of a tile fn arrows(&self) -> Vec<Coordinate> { vec![] } /// Revenue track on the tile fn revenue_track(&self) -> Option<RevenueTrack> { None } fn terrain(&self) -> Option<Terrain> { None } fn get_text<'a>(&'a self, &'a str) -> &'a str; fn text_position(&self, usize) -> Option<na::Vector3<f64>>; fn text_spec(&self) -> Vec<Text>; /// Rotation of the tile fn orientation(&self) -> f64 { 0.0 } } /// The specification of a tile to be used in the game #[derive(Deserialize)] pub struct Tile { base_tile: String, color: String, text: HashMap<String, String>, #[serde(skip)] definition: Option<TileDefinition>, } impl Tile { pub fn set_definition(&mut self, definition: &TileDefinition) { self.definition = Some(definition.clone()); } pub fn base_tile(&self) -> String { self.base_tile.clone() } } impl Default for Tile { fn default() -> Tile { Tile { base_tile: String::new(), color: String::new(), text: HashMap::new(), definition: None, } } } impl TileSpec for Tile { fn color(&self) -> colors::Color { colors::name_to_color(&self.color) } /// The number of the tile, should be the first text specified fn name(&self) -> &str { self.text.get("number").unwrap() } fn set_name(&mut self, name: String) { self.text.insert("number".to_string(), name); } fn paths(&self) -> Vec<Path> { self.definition.as_ref() .expect("You must call set_definition() before using paths()") .paths() } fn cities(&self) -> Vec<City> { self.definition.as_ref() .expect("You must call set_definition() before using cities()") .cities() } fn stops(&self) -> Vec<Stop> { self.definition.as_ref() .expect("You must call set_definition() before using stops()") .stops() } fn is_lawson(&self) -> bool { self.definition.as_ref() .expect("You must call set_definition() before using is_lawson()") .is_lawson() } fn get_text(&self, id: &str) -> &str { match self.text.get(id) { Some(s) => s, None => "", } } fn text_position(&self, id: usize) -> Option<na::Vector3<f64>> { self.definition.as_ref() .expect("You must call set_definition() before using \ text_position()") .text_position(id) } fn text_spec(&self) -> Vec<Text> { self.definition.as_ref() .expect("You must call set_definition() before using \ text_spec()") .text_spec() } } /// Definition of tile layout, does not include color or name #[derive(Clone, Deserialize, Debug)] #[serde(default)] pub struct TileDefinition { name: String, paths: Vec<Path>, cities: Vec<City>, stops: Vec<Stop>, is_lawson: bool, text: Vec<Text>, } impl Default for TileDefinition { fn default() -> TileDefinition { TileDefinition { name: "NoName".to_string(), paths: vec![], cities: vec![], stops: vec![], is_lawson: false, text: vec![], } } } impl TileSpec for TileDefinition { fn paths(&self) -> Vec<Path> { self.paths.clone() } fn cities(&self) -> Vec<City> { self.cities.clone() } fn stops(&self) ->
random_line_split
noxAlarmProcess.py
import EmailAlarmAlert from utils.sms import SmsAlarmAlert from web import create_app from web.models import DBLog """ import socket to thread gateway """ from nox_alarm import zmq_socket_config """ UniPi IO config file """ UNIPI_IO_CONFIG = join(conf_path, 'software', 'nox_unipi_io.json') DEBUG = None # DEBUG = True class NoxAlarm(object): """ La class NoxAlarm est une machine à état qui suit l'état de l'alarme NX640. L'initialisation configure - les entrées (Système en marche, Sirène) - et la sortie de commande (Mise en marche) L'appel de la fonction run_states() provoque le rafraississement des valeurs en entrées. run_states() doit être appelée dans une boucle while à interval régulier (~1 seconde) Lorsque la machine change d'état, la fonction push_socket_event() émet l'info sur un socket. le socket est passé en paramètre à l'init. Un programme externe (appli web) appel la fonction push_socket_state() à intervalle régulier pour connaitre l'état de la machine (on, off, etc). """ states = ['init' , 'off' , 'on' , 'alert' , 'was_alert' ] colors = ['info' , 'success' , 'primary' , 'warning' , 'warning' ] events = ['init' , 'stop' , 'start' , 'detection' , 'serene_stop'] name = 'Nox' cycle_delay = 1 # While Loop cycle delay # --- Init functions --- def init_socket(self): context = zmq.Context() try: # Socket SUB_COMMAND Receive Commands (start stop) from Flask (ThreadExtAlarm) self.SUB_COMMAND = context.socket(zmq.SUB) self.SUB_COMMAND.connect ("tcp://localhost:%s" % zmq_socket_config.port_socket_noxalarm_command) self.SUB_COMMAND.setsockopt_string(zmq.SUBSCRIBE, zmq_socket_config.TOPIC_REQUEST) # Socket PUB_STATUS sends Status updates to Flask (ThreadNoxtAlarm) self.PUB_STATE = context.socket(zmq.PUB) self.PUB_STATE.bind("tcp://*:%s" % zmq_socket_config.port_socket_noxalarm_state) except: msg = 'Failed init ZMQ socket' logger.exception(msg) self.exit(msg, exit_now=True) def init_config(self): try: # load config from json logger.info("Loading config file %s" % (UNIPI_IO_CONFIG)) conf = dict_from_json_file(UNIPI_IO_CONFIG) # declare I/O from config self.in_alert = UnipiInput(conf['in_alert']) self.in_power = UnipiInput(conf['in_power']) self.out_power = UnipiOutput(conf['out_power']) except Exception as e: name = e.__class__.__name__ # name of the exception eg if (name == 'FileNotFoundError'): logger.error('Config file not found %s' % (UNIPI_IO_CONFIG)) elif (name == 'KeyError'): logger.error('config file corrupted') else: # Generic error: log trace logger.exception('Error loading config %s' % (str(sys.exc_info()[0]))) # Whatever the exception, conf failed, exit the program msg = 'Failed init Config' self.exit(msg, exit_now=True) def init_statemachine(self): # State Machine self.machine = Machine(model=self, states=NoxAlarm.states, initial='init') # Transitions self.machine.add_transition('off_to_on', 'off', 'on', before='starting', after='push_socket_state') self.machine.add_transition('any_to_off' , '*' , 'off', before='stopping', after='push_socket_state') self.machine.add_transition('on_to_alert', 'on' , 'alert', before='detection', after='push_socket_state') self.machine.add_transition('alert_to_was_alert', 'alert', 'was_alert', before='serene_stop', after='push_socket_state') def init_state(self): """ Set the real state of the system when the program (re)start Transition init -> any is done callback leave_init() is call manually. No email/sms alert will be sent """ self.read_inputs() if (self.in_power.value == 1) and (self.in_alert.value == 1): self.state = 'alert' elif (self.in_power.value == 1): self.state = 'on' else: self.state = 'off' self.leave_init() def __init__(self): logger.info('Starting ...') self.run = True # Flag to stop while loop. (on KeyboardInterrupt or SIGINT) # Register signal handler # KeyboardInterrupt (SIGINT) managed directly in main loop signal.signal(signal.SIGTERM, self.exit_from_signals) # Supervisor Exit code (15) self.init_socket() self.init_config() self.init_statemachine() self.init_state() # Read inputs and Set state after init def __str__(self): out = '' out += ('Nox State: %s | ' % (self.state)) for input in [self.in_alert, self.in_power]: out += ('%s %s | ' % (input.name, input.value)) return(out) # --- Exit functions (Stop Process) --- def exit_from_signals(self, signal_num, frame): """ Callback called when SIGTERM received from supervisor Do call exit() with detail of the signal_num """ detail = 'signal {}'.format(signal_num) self.exit(detail) def exit(self, detail, exit_now=None): """ Stop the process (log before) 'exit_now': Option to stop now or to set self.run to False (will stop at the end of the current while loop cycle) """ self.make_DBLog('system', 'exit', 'danger', detail=detail) logger.critical('Exit caused by {}'.format(detail)) msg = 'exit' if exit_now: sys.exit() # exit here else: self.run = False # exit after end of current loop # --- Handle commands from web App --- def start_alarm(self): """ Command to start alarm. """ self.out_power.pulse() def stop_alarm(self): """ Command to stop alarm. """ self.out_power.pulse() # --- StateMachine Callbacks (Actions on state change) --- def leave_init(self): """ From state init to any other state This is not a callback (called manually) """ msg = 'init (state: {})'.format(self.state) logger.info(msg) event = 'init' self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog('system', msg, color) def starting(self): event = 'start' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) def stopping(self):
nt = 'stop' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) def detection(self): event = 'detection' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) self.make_alert("Alert", NoxAlarm.name, event) def serene_stop(self): event = 'serene_stop' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) self.make_alert("Info", NoxAlarm.name, event) @staticmethod def make_alert(*args): """ wrapper method to call mail & sms alerts """ try: SmsAlarmAlert(*args) except: logger.exception('Fail calling SmsAlarmAlert()') try: EmailAlarmAlert(*args) except: logger.exception('Fail calling EmailAlarmAlert()') @staticmethod def make_DBLog(subject, event, badge, detail=''): """ wrapper method to call DBLog.new() on alarm event """ app = create_app() with app.app_context(): DBLog.new(subject=subject, scope="nox", badge=badge, message=event, ip='-', user='-', detail=detail) # --- Push info to web App socket --- def push_socket_event(self, event): self.PUB_STATE.send_string(zmq_socket_config.TOPIC_EVENT + " " + event) logger.debug("Noxalarm send event "+ event) def push_socket_state(self): if DEBUG: logger.debug("Noxalarm
eve
identifier_name
noxAlarmProcess.py
s import EmailAlarmAlert from utils.sms import SmsAlarmAlert from web import create_app from web.models import DBLog """ import socket to thread gateway """ from nox_alarm import zmq_socket_config """ UniPi IO config file """ UNIPI_IO_CONFIG = join(conf_path, 'software', 'nox_unipi_io.json') DEBUG = None # DEBUG = True class NoxAlarm(object): """ La class NoxAlarm est une machine à état qui suit l'état de l'alarme NX640. L'initialisation configure - les entrées (Système en marche, Sirène) - et la sortie de commande (Mise en marche) L'appel de la fonction run_states() provoque le rafraississement des valeurs en entrées. run_states() doit être appelée dans une boucle while à interval régulier (~1 seconde) Lorsque la machine change d'état, la fonction push_socket_event() émet l'info sur un socket. le socket est passé en paramètre à l'init. Un programme externe (appli web) appel la fonction push_socket_state() à intervalle régulier pour connaitre l'état de la machine (on, off, etc). """ states = ['init' , 'off' , 'on' , 'alert' , 'was_alert' ] colors = ['info' , 'success' , 'primary' , 'warning' , 'warning' ] events = ['init' , 'stop' , 'start' , 'detection' , 'serene_stop'] name = 'Nox' cycle_delay = 1 # While Loop cycle delay # --- Init functions --- def init_socket(self): context = zmq.Context() try: # Socket SUB_COMMAND Receive Commands (start stop) from Flask (ThreadExtAlarm) self.SUB_COMMAND = context.socket(zmq.SUB) self.SUB_COMMAND.connect ("tcp://localhost:%s" % zmq_socket_config.port_socket_noxalarm_command) self.SUB_COMMAND.setsockopt_string(zmq.SUBSCRIBE, zmq_socket_config.TOPIC_REQUEST) # Socket PUB_STATUS sends Status updates to Flask (ThreadNoxtAlarm) self.PUB_STATE = context.socket(zmq.PUB) self.PUB_STATE.bind("tcp://*:%s" % zmq_socket_config.port_socket_noxalarm_state) except: msg = 'Failed init ZMQ socket' logger.exception(msg) self.exit(msg, exit_now=True) def init_config(self): try: # load config from json logger.info("Loading config file %s" % (UNIPI_IO_CONFIG)) conf = dict_from_json_file(UNIPI_IO_CONFIG) # declare I/O from config self.in_alert = UnipiInput(conf['in_alert']) self.in_power = UnipiInput(conf['in_power']) self.out_power = UnipiOutput(conf['out_power']) except Exception as e: name = e.__class__.__name__ # name of the exception eg if (name == 'FileNotFoundError'): logger.error('Config file not found %s' % (UNIPI_IO_CONFIG)) elif (name == 'KeyError'): logger.error('config file corrupted') else: # Generic error: log trace logger.exception('Error loading config %s' % (str(sys.exc_info()[0]))) # Whatever the exception, conf failed, exit the program msg = 'Failed init Config' self.exit(msg, exit_now=True) def init_statemachine(self): # State Machine self.machine = Machine(model=self, states=NoxAlarm.states, initial='init') # Transitions self.machine.add_transition('off_to_on', 'off', 'on', before='starting', after='push_socket_state') self.machine.add_transition('any_to_off' , '*' , 'off', before='stopping', after='push_socket_state') self.machine.add_transition('on_to_alert', 'on' , 'alert', before='detection', after='push_socket_state') self.machine.add_transition('alert_to_was_alert', 'alert', 'was_alert', before='serene_stop', after='push_socket_state') def init_state(self): """ Set the real state of the system when the program (re)start Transition init -> any is done callback leave_init() is call manually. No email/sms alert will be sent """ self.read_inputs() if (self.in_power.value == 1) and (self.in_alert.value == 1): self.state = 'alert' elif (self.in_power.value == 1): self.state = 'on' else: self.state = 'off' self.leave_init() def __init__(self): logger.info('Starting ...') self.run = True # Flag to stop while loop. (on KeyboardInterrupt or SIGINT) # Register signal handler
signal.signal(signal.SIGTERM, self.exit_from_signals) # Supervisor Exit code (15) self.init_socket() self.init_config() self.init_statemachine() self.init_state() # Read inputs and Set state after init def __str__(self): out = '' out += ('Nox State: %s | ' % (self.state)) for input in [self.in_alert, self.in_power]: out += ('%s %s | ' % (input.name, input.value)) return(out) # --- Exit functions (Stop Process) --- def exit_from_signals(self, signal_num, frame): """ Callback called when SIGTERM received from supervisor Do call exit() with detail of the signal_num """ detail = 'signal {}'.format(signal_num) self.exit(detail) def exit(self, detail, exit_now=None): """ Stop the process (log before) 'exit_now': Option to stop now or to set self.run to False (will stop at the end of the current while loop cycle) """ self.make_DBLog('system', 'exit', 'danger', detail=detail) logger.critical('Exit caused by {}'.format(detail)) msg = 'exit' if exit_now: sys.exit() # exit here else: self.run = False # exit after end of current loop # --- Handle commands from web App --- def start_alarm(self): """ Command to start alarm. """ self.out_power.pulse() def stop_alarm(self): """ Command to stop alarm. """ self.out_power.pulse() # --- StateMachine Callbacks (Actions on state change) --- def leave_init(self): """ From state init to any other state This is not a callback (called manually) """ msg = 'init (state: {})'.format(self.state) logger.info(msg) event = 'init' self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog('system', msg, color) def starting(self): event = 'start' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) def stopping(self): event = 'stop' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) def detection(self): event = 'detection' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) self.make_alert("Alert", NoxAlarm.name, event) def serene_stop(self): event = 'serene_stop' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) self.make_alert("Info", NoxAlarm.name, event) @staticmethod def make_alert(*args): """ wrapper method to call mail & sms alerts """ try: SmsAlarmAlert(*args) except: logger.exception('Fail calling SmsAlarmAlert()') try: EmailAlarmAlert(*args) except: logger.exception('Fail calling EmailAlarmAlert()') @staticmethod def make_DBLog(subject, event, badge, detail=''): """ wrapper method to call DBLog.new() on alarm event """ app = create_app() with app.app_context(): DBLog.new(subject=subject, scope="nox", badge=badge, message=event, ip='-', user='-', detail=detail) # --- Push info to web App socket --- def push_socket_event(self, event): self.PUB_STATE.send_string(zmq_socket_config.TOPIC_EVENT + " " + event) logger.debug("Noxalarm send event "+ event) def push_socket_state(self): if DEBUG: logger.debug("Noxalarm send state "+
# KeyboardInterrupt (SIGINT) managed directly in main loop
random_line_split
noxAlarmProcess.py
. le socket est passé en paramètre à l'init. Un programme externe (appli web) appel la fonction push_socket_state() à intervalle régulier pour connaitre l'état de la machine (on, off, etc). """ states = ['init' , 'off' , 'on' , 'alert' , 'was_alert' ] colors = ['info' , 'success' , 'primary' , 'warning' , 'warning' ] events = ['init' , 'stop' , 'start' , 'detection' , 'serene_stop'] name = 'Nox' cycle_delay = 1 # While Loop cycle delay # --- Init functions --- def init_socket(self): context = zmq.Context() try: # Socket SUB_COMMAND Receive Commands (start stop) from Flask (ThreadExtAlarm) self.SUB_COMMAND = context.socket(zmq.SUB) self.SUB_COMMAND.connect ("tcp://localhost:%s" % zmq_socket_config.port_socket_noxalarm_command) self.SUB_COMMAND.setsockopt_string(zmq.SUBSCRIBE, zmq_socket_config.TOPIC_REQUEST) # Socket PUB_STATUS sends Status updates to Flask (ThreadNoxtAlarm) self.PUB_STATE = context.socket(zmq.PUB) self.PUB_STATE.bind("tcp://*:%s" % zmq_socket_config.port_socket_noxalarm_state) except: msg = 'Failed init ZMQ socket' logger.exception(msg) self.exit(msg, exit_now=True) def init_config(self): try: # load config from json logger.info("Loading config file %s" % (UNIPI_IO_CONFIG)) conf = dict_from_json_file(UNIPI_IO_CONFIG) # declare I/O from config self.in_alert = UnipiInput(conf['in_alert']) self.in_power = UnipiInput(conf['in_power']) self.out_power = UnipiOutput(conf['out_power']) except Exception as e: name = e.__class__.__name__ # name of the exception eg if (name == 'FileNotFoundError'): logger.error('Config file not found %s' % (UNIPI_IO_CONFIG)) elif (name == 'KeyError'): logger.error('config file corrupted') else: # Generic error: log trace logger.exception('Error loading config %s' % (str(sys.exc_info()[0]))) # Whatever the exception, conf failed, exit the program msg = 'Failed init Config' self.exit(msg, exit_now=True) def init_statemachine(self): # State Machine self.machine = Machine(model=self, states=NoxAlarm.states, initial='init') # Transitions self.machine.add_transition('off_to_on', 'off', 'on', before='starting', after='push_socket_state') self.machine.add_transition('any_to_off' , '*' , 'off', before='stopping', after='push_socket_state') self.machine.add_transition('on_to_alert', 'on' , 'alert', before='detection', after='push_socket_state') self.machine.add_transition('alert_to_was_alert', 'alert', 'was_alert', before='serene_stop', after='push_socket_state') def init_state(self): """ Set the real state of the system when the program (re)start Transition init -> any is done callback leave_init() is call manually. No email/sms alert will be sent """ self.read_inputs() if (self.in_power.value == 1) and (self.in_alert.value == 1): self.state = 'alert' elif (self.in_power.value == 1): self.state = 'on' else: self.state = 'off' self.leave_init() def __init__(self): logger.info('Starting ...') self.run = True # Flag to stop while loop. (on KeyboardInterrupt or SIGINT) # Register signal handler # KeyboardInterrupt (SIGINT) managed directly in main loop signal.signal(signal.SIGTERM, self.exit_from_signals) # Supervisor Exit code (15) self.init_socket() self.init_config() self.init_statemachine() self.init_state() # Read inputs and Set state after init def __str__(self): out = '' out += ('Nox State: %s | ' % (self.state)) for input in [self.in_alert, self.in_power]: out += ('%s %s | ' % (input.name, input.value)) return(out) # --- Exit functions (Stop Process) --- def exit_from_signals(self, signal_num, frame): """ Callback called when SIGTERM received from supervisor Do call exit() with detail of the signal_num """ detail = 'signal {}'.format(signal_num) self.exit(detail) def exit(self, detail, exit_now=None): """ Stop the process (log before) 'exit_now': Option to stop now or to set self.run to False (will stop at the end of the current while loop cycle) """ self.make_DBLog('system', 'exit', 'danger', detail=detail) logger.critical('Exit caused by {}'.format(detail)) msg = 'exit' if exit_now: sys.exit() # exit here else: self.run = False # exit after end of current loop # --- Handle commands from web App --- def start_alarm(self): """ Command to start alarm. """ self.out_power.pulse() def stop_alarm(self): """ Command to stop alarm. """ self.out_power.pulse() # --- StateMachine Callbacks (Actions on state change) --- def leave_init(self): """ From state init to any other state This is not a callback (called manually) """ msg = 'init (state: {})'.format(self.state) logger.info(msg) event = 'init' self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog('system', msg, color) def starting(self): event = 'start' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) def stopping(self): event = 'stop' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) def detection(self): event = 'detection' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) self.make_alert("Alert", NoxAlarm.name, event) def serene_stop(self): event = 'serene_stop' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) self.make_alert("Info", NoxAlarm.name, event) @staticmethod def make_alert(*args): """ wrapper method to call mail & sms alerts """ try: SmsAlarmAlert(*args) except: logger.exception('Fail calling SmsAlarmAlert()') try: EmailAlarmAlert(*args) except: logger.exception('Fail calling EmailAlarmAlert()') @staticmethod def make_DBLog(subject, event, badge, detail=''): """ wrapper method to call DBLog.new() on alarm event """ app = create_app() with app.app_context(): DBLog.new(subject=subject, scope="nox", badge=badge, message=event, ip='-', user='-', detail=detail) # --- Push info to web App socket --- def push_socket_event(self, event): self.PUB_STATE.send_string(zmq_socket_config.TOPIC_EVENT + " " + event) logger.debug("Noxalarm send event "+ event) def push_socket_state(self): if DEBUG: logger.debug("Noxalarm send state "+ self.state) self.PUB_STATE.send_string(zmq_socket_config.TOPIC_STATE + " " + self.state) # --- Read UniPi inputs --- def read_inputs(self): """ Read physical IO from Unipi, update class variables. """ self.in_power.read() self.in_alert.read() def run_states(self): """ Process transitions considering UniPi inputs """ if (self.state == "off"): if (self.in_power.value == 1): self.off_to_on() elif self.state == "on": if (self.in_power.value == 0): self.any_to_off() elif (self.in_alert.value == 1): self.on_to_alert() elif self.state == "alert": if (self.in_power.value == 0): self.any_to_off() elif (self.in_alert.value == 0): self.alert_to_was_alert() elif self.state == "was_alert": if (self.in_power.v
alue == 0): self.any_to_off() def receive_r
conditional_block
noxAlarmProcess.py
try: # Socket SUB_COMMAND Receive Commands (start stop) from Flask (ThreadExtAlarm) self.SUB_COMMAND = context.socket(zmq.SUB) self.SUB_COMMAND.connect ("tcp://localhost:%s" % zmq_socket_config.port_socket_noxalarm_command) self.SUB_COMMAND.setsockopt_string(zmq.SUBSCRIBE, zmq_socket_config.TOPIC_REQUEST) # Socket PUB_STATUS sends Status updates to Flask (ThreadNoxtAlarm) self.PUB_STATE = context.socket(zmq.PUB) self.PUB_STATE.bind("tcp://*:%s" % zmq_socket_config.port_socket_noxalarm_state) except: msg = 'Failed init ZMQ socket' logger.exception(msg) self.exit(msg, exit_now=True) def init_config(self): try: # load config from json logger.info("Loading config file %s" % (UNIPI_IO_CONFIG)) conf = dict_from_json_file(UNIPI_IO_CONFIG) # declare I/O from config self.in_alert = UnipiInput(conf['in_alert']) self.in_power = UnipiInput(conf['in_power']) self.out_power = UnipiOutput(conf['out_power']) except Exception as e: name = e.__class__.__name__ # name of the exception eg if (name == 'FileNotFoundError'): logger.error('Config file not found %s' % (UNIPI_IO_CONFIG)) elif (name == 'KeyError'): logger.error('config file corrupted') else: # Generic error: log trace logger.exception('Error loading config %s' % (str(sys.exc_info()[0]))) # Whatever the exception, conf failed, exit the program msg = 'Failed init Config' self.exit(msg, exit_now=True) def init_statemachine(self): # State Machine self.machine = Machine(model=self, states=NoxAlarm.states, initial='init') # Transitions self.machine.add_transition('off_to_on', 'off', 'on', before='starting', after='push_socket_state') self.machine.add_transition('any_to_off' , '*' , 'off', before='stopping', after='push_socket_state') self.machine.add_transition('on_to_alert', 'on' , 'alert', before='detection', after='push_socket_state') self.machine.add_transition('alert_to_was_alert', 'alert', 'was_alert', before='serene_stop', after='push_socket_state') def init_state(self): """ Set the real state of the system when the program (re)start Transition init -> any is done callback leave_init() is call manually. No email/sms alert will be sent """ self.read_inputs() if (self.in_power.value == 1) and (self.in_alert.value == 1): self.state = 'alert' elif (self.in_power.value == 1): self.state = 'on' else: self.state = 'off' self.leave_init() def __init__(self): logger.info('Starting ...') self.run = True # Flag to stop while loop. (on KeyboardInterrupt or SIGINT) # Register signal handler # KeyboardInterrupt (SIGINT) managed directly in main loop signal.signal(signal.SIGTERM, self.exit_from_signals) # Supervisor Exit code (15) self.init_socket() self.init_config() self.init_statemachine() self.init_state() # Read inputs and Set state after init def __str__(self): out = '' out += ('Nox State: %s | ' % (self.state)) for input in [self.in_alert, self.in_power]: out += ('%s %s | ' % (input.name, input.value)) return(out) # --- Exit functions (Stop Process) --- def exit_from_signals(self, signal_num, frame): """ Callback called when SIGTERM received from supervisor Do call exit() with detail of the signal_num """ detail = 'signal {}'.format(signal_num) self.exit(detail) def exit(self, detail, exit_now=None): """ Stop the process (log before) 'exit_now': Option to stop now or to set self.run to False (will stop at the end of the current while loop cycle) """ self.make_DBLog('system', 'exit', 'danger', detail=detail) logger.critical('Exit caused by {}'.format(detail)) msg = 'exit' if exit_now: sys.exit() # exit here else: self.run = False # exit after end of current loop # --- Handle commands from web App --- def start_alarm(self): """ Command to start alarm. """ self.out_power.pulse() def stop_alarm(self): """ Command to stop alarm. """ self.out_power.pulse() # --- StateMachine Callbacks (Actions on state change) --- def leave_init(self): """ From state init to any other state This is not a callback (called manually) """ msg = 'init (state: {})'.format(self.state) logger.info(msg) event = 'init' self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog('system', msg, color) def starting(self): event = 'start' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) def stopping(self): event = 'stop' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) def detection(self): event = 'detection' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) self.make_alert("Alert", NoxAlarm.name, event) def serene_stop(self): event = 'serene_stop' logger.info('state is %s' % (event)) self.push_socket_event(event) color = NoxAlarm.colors[NoxAlarm.events.index(event)] self.make_DBLog("event", event, color) self.make_alert("Info", NoxAlarm.name, event) @staticmethod def make_alert(*args): """ wrapper method to call mail & sms alerts """ try: SmsAlarmAlert(*args) except: logger.exception('Fail calling SmsAlarmAlert()') try: EmailAlarmAlert(*args) except: logger.exception('Fail calling EmailAlarmAlert()') @staticmethod def make_DBLog(subject, event, badge, detail=''): """ wrapper method to call DBLog.new() on alarm event """ app = create_app() with app.app_context(): DBLog.new(subject=subject, scope="nox", badge=badge, message=event, ip='-', user='-', detail=detail) # --- Push info to web App socket --- def push_socket_event(self, event): self.PUB_STATE.send_string(zmq_socket_config.TOPIC_EVENT + " " + event) logger.debug("Noxalarm send event "+ event) def push_socket_state(self): if DEBUG: logger.debug("Noxalarm send state "+ self.state) self.PUB_STATE.send_string(zmq_socket_config.TOPIC_STATE + " " + self.state) # --- Read UniPi inputs --- def read_inputs(self): """ Read physical IO from Unipi, update class variables. """ self.in_power.read() self.in_alert.read() def run_states(self): """ Process transitions considering UniPi inputs """ if (self.state == "off"): if (self.in_power.value == 1): self.off_to_on() elif self.state == "on": if (self.in_power.value == 0): self.any_to_off() elif (self.in_alert.value == 1): self.on_to_alert() elif self.state == "alert": if (self.in_power.value == 0): self.any_to_off() elif (self.in_alert.value == 0): self.alert_to_was_alert() elif self.state == "was_alert": if (self.in_power.value == 0): self.any_to_off() def receive_request(self): """ Check if a requ
est is received and process it Request can be Command (start, stop) Request can be "Status update" requested by web app """ try: payload = self.SUB_COMMAND.recv_string(flags=zmq.NOBLOCK) topic, command = payload.split() if (topic == zmq_socket_config.TOPIC_REQUEST): if (command == zmq_socket_config.COMMAND_START): logger.debug("Noxalarm receive COMMAND_START") self.start_alarm() elif (command == zmq_socket_config.COMMAND_STOP): logger.debug("Noxalarm receive COMMAND_STOP") self.stop_alarm() elif (command == zmq_socket_config.STATUS_UPDATE): logger.debug("Noxalarm receive REQUEST_STATUS_UPDATE") self.push_socket_state() # Else if no command received, do nothing except zmq.error.Again:
identifier_body
account_transform.rs
{ TypedDataField::from_path(bytes_to_path(b"stake")) } /// Account public key field. pub fn field_public_key() -> TypedDataField<ed25519_dalek::PublicKey> { TypedDataField::from_path(bytes_to_path(b"public_key")) } /// Field for a `SendInfo` stored in the sender's data. pub fn field_send(send: Hash<SendInfo>) -> TypedDataField<SendInfo> { let mut path = bytes_to_path(b"send"); path.0.extend(&bytes_to_path(&send.code).0); TypedDataField::from_path(path) } /// Field for tracking whether a `SendInfo` has been received in the receiver's /// data. pub fn field_received(send: Hash<SendInfo>) -> TypedDataField<bool> { let mut path = bytes_to_path(b"received"); path.0.extend(&bytes_to_path(&send.code).0); TypedDataField::from_path(path) } /// A context providing operations related to transforming an account (e.g. /// running actions). pub struct AccountTransform<'a, HL: HashLookup> { /// The `HashLookup` used to look up previous account data. pub hl: &'a HL, /// Whether this account is initializing. pub is_initializing: bool, /// The account being transformed. pub this_account: HashCode, /// The hash code of the last main block. pub last_main: Hash<MainBlock>, /// Which fields have been overwritten so far, and their most recent values. pub fields_set: BTreeMap<HexPath, Vec<u8>>, } #[async_trait] impl<'a, HL: HashLookup> HashLookup for AccountTransform<'a, HL> { async fn lookup_bytes(&self, hash: HashCode) -> Result<Vec<u8>, anyhow::Error> { self.hl.lookup_bytes(hash).await } } impl<'a, HL: HashLookup> AccountTransform<'a, HL> { /// Creates a new `AccountTransform`. pub fn new( hl: &'a HL, is_initializing: bool, this_account: HashCode, last_main: Hash<MainBlock>, ) -> AccountTransform<'a, HL> { AccountTransform { hl, is_initializing, this_account, last_main, fields_set: BTreeMap::new(), } } /// Gets the value of a given data field. async fn get_data_field_bytes( &self, acct: HashCode, field_name: &HexPath, ) -> Result<Option<Vec<u8>>, anyhow::Error> { if acct == self.this_account { match self.fields_set.get(field_name) { Some(x) => { return Ok(Some(x.clone())); } None => {} } } let main = self.lookup(self.last_main).await?; if let Some(acct_node) = lookup_account(self, &main.block.body, self.this_account).await? { lookup_data_in_account(self, &acct_node, field_name).await } else { Ok(None) } } /// Sets the value of a given data field. fn set_data_field_bytes( &mut self, field_name: &HexPath, value: Vec<u8>, ) -> Result<(), anyhow::Error> { self.fields_set.insert(field_name.clone(), value); Ok(()) } /// Gets the value of a given typed data field. async fn get_data_field<T: DeserializeOwned>( &self, acct: HashCode, field: &TypedDataField<T>, ) -> Result<Option<T>, anyhow::Error> { match self.get_data_field_bytes(acct, &field.path).await? { None => Ok(None), Some(bs) => Ok(Some(rmp_serde::from_read(bs.as_slice())?)), } } /// Gets the value of a given typed data field, throwing an error if it is not found. pub async fn get_data_field_or_error<T: DeserializeOwned>( &self, acct: HashCode, field: &TypedDataField<T>, ) -> Result<T, anyhow::Error> { match self.get_data_field(acct, field).await? { None => bail!("data field not found: {:?}", field.path), Some(x) => Ok(x), } } /// Sets the value of a given typed data field. fn set_data_field<T: Serialize>( &mut self, field: &TypedDataField<T>, value: &T, ) -> Result<(), anyhow::Error> { self.set_data_field_bytes(&field.path, rmp_serde::to_vec_named(value)?) } } /// Causes the current account to pay a fee. async fn pay_fee<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, fee: u128, ) -> Result<(), anyhow::Error> { let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; if bal < fee { bail!("not enough balance for fee"); } at.set_data_field(&field_balance(), &(bal - fee)) } /// Causes the current account to send. async fn do_send<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, send: &SendInfo, ) -> Result<(), anyhow::Error> { if send.sender != at.this_account { bail!("sender must be sent by this account"); } if send.last_main != at.last_main { bail!("last main of send must be the current last main"); } let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; if bal < send.send_amount { bail!("not enough balance for send"); } let send_df = field_send(hash(send)); if at .get_data_field(at.this_account, &send_df) .await? .is_some() { bail!("that was already sent"); } at.set_data_field(&field_balance(), &(bal - send.send_amount))?; at.set_data_field(&send_df, send)?; Ok(()) } /// Causes the current account to receive. async fn do_receive<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, sender: HashCode, send_hash: Hash<SendInfo>, ) -> Result<SendInfo, anyhow::Error> { let send = at .get_data_field_or_error(sender, &field_send(send_hash)) .await?; if hash(&send) != send_hash { bail!("send hashes don't match"); } if send.recipient != at.this_account { bail!("recipient of send doesn't match recipient"); } let received_field = field_received(send_hash); let already_received = at.get_data_field(at.this_account, &received_field).await?; if already_received == Some(true) { bail!("tried to receive the same send twice"); } let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; at.set_data_field(&field_balance(), &(bal + send.send_amount))?; at.set_data_field(&received_field, &true)?; Ok(send) } /// Gets an argument out of action arguments. fn get_arg<T: DeserializeOwned>(args: &Vec<Vec<u8>>, i: usize) -> Result<T, anyhow::Error> { if i >= args.len() { bail!("too few arguments"); } Ok(rmp_serde::from_read(args[i].as_slice())?) } /// Verifies that the argument at a given index is a signature of a modified /// version of the action where the signature itself is replaced with /// an empty vector, and also that the signature's account matches the /// given account. fn verify_signature_argument( acct: HashCode, action: &Action, i: usize, ) -> Result<(), anyhow::Error> { let sig: Signature<Action> = get_arg(&action.args, i)?; if sig.account() != acct { bail!("signature account must equal current account"); } let mut act2 = action.clone(); act2.args[i] = Vec::new(); if !verify_sig(&act2, &sig) { bail!("invalid signature"); } Ok(()) } /// Runs an action in a given `AccountTransform` context. pub async fn run_action<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, action: &Action, ) -> Result<(), anyhow::Error> { if at.last_main != action.last_main { bail!("action last main must equal current last main"); } if action.command == b"send" { if at.is_initializing
let recipient: HashCode = get_arg(&action.args, 0)?; let send_amount: u128 = get_arg(&action.args, 1)?; let initialize_spec: Option<Hash<Vec<u8>>> = get_arg(&action.args, 2)?; let message: Vec<u8> = get_arg(&action.args, 3)?; verify_signature_argument(at.this_account, action, 4)?; pay_fee(at, action.fee).await?; let send = SendInfo { last_main: action.last_main, sender: at.this_account,
{ bail!("send can't initialize an account"); }
conditional_block
account_transform.rs
pub fn from_path(path: HexPath) -> TypedDataField<T> { TypedDataField { path, phantom: PhantomData, } } } /// Account balance field. pub fn field_balance() -> TypedDataField<u128> { TypedDataField::from_path(bytes_to_path(b"balance")) } /// Account stake field. pub fn field_stake() -> TypedDataField<u128> { TypedDataField::from_path(bytes_to_path(b"stake")) } /// Account public key field. pub fn field_public_key() -> TypedDataField<ed25519_dalek::PublicKey> { TypedDataField::from_path(bytes_to_path(b"public_key")) } /// Field for a `SendInfo` stored in the sender's data. pub fn field_send(send: Hash<SendInfo>) -> TypedDataField<SendInfo> { let mut path = bytes_to_path(b"send"); path.0.extend(&bytes_to_path(&send.code).0); TypedDataField::from_path(path) } /// Field for tracking whether a `SendInfo` has been received in the receiver's /// data. pub fn field_received(send: Hash<SendInfo>) -> TypedDataField<bool> { let mut path = bytes_to_path(b"received"); path.0.extend(&bytes_to_path(&send.code).0); TypedDataField::from_path(path) } /// A context providing operations related to transforming an account (e.g. /// running actions). pub struct AccountTransform<'a, HL: HashLookup> { /// The `HashLookup` used to look up previous account data. pub hl: &'a HL, /// Whether this account is initializing. pub is_initializing: bool, /// The account being transformed. pub this_account: HashCode, /// The hash code of the last main block. pub last_main: Hash<MainBlock>, /// Which fields have been overwritten so far, and their most recent values. pub fields_set: BTreeMap<HexPath, Vec<u8>>, } #[async_trait] impl<'a, HL: HashLookup> HashLookup for AccountTransform<'a, HL> { async fn lookup_bytes(&self, hash: HashCode) -> Result<Vec<u8>, anyhow::Error> { self.hl.lookup_bytes(hash).await } } impl<'a, HL: HashLookup> AccountTransform<'a, HL> { /// Creates a new `AccountTransform`. pub fn new( hl: &'a HL, is_initializing: bool, this_account: HashCode, last_main: Hash<MainBlock>, ) -> AccountTransform<'a, HL> { AccountTransform { hl, is_initializing, this_account, last_main, fields_set: BTreeMap::new(), } } /// Gets the value of a given data field. async fn get_data_field_bytes( &self, acct: HashCode, field_name: &HexPath, ) -> Result<Option<Vec<u8>>, anyhow::Error> { if acct == self.this_account { match self.fields_set.get(field_name) { Some(x) => { return Ok(Some(x.clone())); } None => {} } } let main = self.lookup(self.last_main).await?; if let Some(acct_node) = lookup_account(self, &main.block.body, self.this_account).await? { lookup_data_in_account(self, &acct_node, field_name).await } else { Ok(None) } } /// Sets the value of a given data field. fn set_data_field_bytes( &mut self, field_name: &HexPath, value: Vec<u8>, ) -> Result<(), anyhow::Error> { self.fields_set.insert(field_name.clone(), value); Ok(()) } /// Gets the value of a given typed data field. async fn get_data_field<T: DeserializeOwned>( &self, acct: HashCode, field: &TypedDataField<T>, ) -> Result<Option<T>, anyhow::Error> { match self.get_data_field_bytes(acct, &field.path).await? { None => Ok(None), Some(bs) => Ok(Some(rmp_serde::from_read(bs.as_slice())?)), } } /// Gets the value of a given typed data field, throwing an error if it is not found. pub async fn get_data_field_or_error<T: DeserializeOwned>( &self, acct: HashCode, field: &TypedDataField<T>, ) -> Result<T, anyhow::Error> { match self.get_data_field(acct, field).await? { None => bail!("data field not found: {:?}", field.path), Some(x) => Ok(x), } } /// Sets the value of a given typed data field. fn set_data_field<T: Serialize>( &mut self, field: &TypedDataField<T>, value: &T, ) -> Result<(), anyhow::Error> { self.set_data_field_bytes(&field.path, rmp_serde::to_vec_named(value)?) } } /// Causes the current account to pay a fee. async fn pay_fee<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, fee: u128, ) -> Result<(), anyhow::Error> { let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; if bal < fee { bail!("not enough balance for fee"); } at.set_data_field(&field_balance(), &(bal - fee)) } /// Causes the current account to send. async fn do_send<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, send: &SendInfo, ) -> Result<(), anyhow::Error> { if send.sender != at.this_account { bail!("sender must be sent by this account"); } if send.last_main != at.last_main { bail!("last main of send must be the current last main"); } let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; if bal < send.send_amount { bail!("not enough balance for send"); } let send_df = field_send(hash(send)); if at .get_data_field(at.this_account, &send_df) .await? .is_some() { bail!("that was already sent"); } at.set_data_field(&field_balance(), &(bal - send.send_amount))?; at.set_data_field(&send_df, send)?; Ok(()) } /// Causes the current account to receive. async fn do_receive<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, sender: HashCode, send_hash: Hash<SendInfo>, ) -> Result<SendInfo, anyhow::Error> { let send = at .get_data_field_or_error(sender, &field_send(send_hash)) .await?; if hash(&send) != send_hash { bail!("send hashes don't match"); } if send.recipient != at.this_account { bail!("recipient of send doesn't match recipient"); } let received_field = field_received(send_hash); let already_received = at.get_data_field(at.this_account, &received_field).await?; if already_received == Some(true) { bail!("tried to receive the same send twice"); } let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; at.set_data_field(&field_balance(), &(bal + send.send_amount))?; at.set_data_field(&received_field, &true)?; Ok(send) } /// Gets an argument out of action arguments. fn get_arg<T: DeserializeOwned>(args: &Vec<Vec<u8>>, i: usize) -> Result<T, anyhow::Error> { if i >= args.len() { bail!("too few arguments"); } Ok(rmp_serde::from_read(args[i].as_slice())?) } /// Verifies that the argument at a given index is a signature of a modified /// version of the action where the signature itself is replaced with /// an empty vector, and also that the signature's account matches the /// given account. fn verify_signature_argument( acct: HashCode, action: &Action, i: usize, ) -> Result<(), anyhow::Error> { let sig: Signature<Action> = get_arg(&action.args, i)?; if sig.account() != acct { bail!("signature account must equal current account"); } let mut act2 = action.clone(); act2.args[i] = Vec::new(); if !verify_sig(&act2, &sig) { bail!("invalid signature"); } Ok(()) } /// Runs an action in a given `AccountTransform` context. pub async fn run_action<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, action: &Action, ) -> Result<(), anyhow::Error> { if at.last_main != action.last_main { bail!("action last main must equal current last main"); } if action.command == b"send" { if at.is_initializing { bail!("send can't initialize an account"); } let recipient: HashCode = get_arg(&action
impl<T> TypedDataField<T> { /// Creates a `TypedDataField` given a path.
random_line_split
account_transform.rs
{ TypedDataField::from_path(bytes_to_path(b"stake")) } /// Account public key field. pub fn field_public_key() -> TypedDataField<ed25519_dalek::PublicKey> { TypedDataField::from_path(bytes_to_path(b"public_key")) } /// Field for a `SendInfo` stored in the sender's data. pub fn field_send(send: Hash<SendInfo>) -> TypedDataField<SendInfo> { let mut path = bytes_to_path(b"send"); path.0.extend(&bytes_to_path(&send.code).0); TypedDataField::from_path(path) } /// Field for tracking whether a `SendInfo` has been received in the receiver's /// data. pub fn field_received(send: Hash<SendInfo>) -> TypedDataField<bool>
/// A context providing operations related to transforming an account (e.g. /// running actions). pub struct AccountTransform<'a, HL: HashLookup> { /// The `HashLookup` used to look up previous account data. pub hl: &'a HL, /// Whether this account is initializing. pub is_initializing: bool, /// The account being transformed. pub this_account: HashCode, /// The hash code of the last main block. pub last_main: Hash<MainBlock>, /// Which fields have been overwritten so far, and their most recent values. pub fields_set: BTreeMap<HexPath, Vec<u8>>, } #[async_trait] impl<'a, HL: HashLookup> HashLookup for AccountTransform<'a, HL> { async fn lookup_bytes(&self, hash: HashCode) -> Result<Vec<u8>, anyhow::Error> { self.hl.lookup_bytes(hash).await } } impl<'a, HL: HashLookup> AccountTransform<'a, HL> { /// Creates a new `AccountTransform`. pub fn new( hl: &'a HL, is_initializing: bool, this_account: HashCode, last_main: Hash<MainBlock>, ) -> AccountTransform<'a, HL> { AccountTransform { hl, is_initializing, this_account, last_main, fields_set: BTreeMap::new(), } } /// Gets the value of a given data field. async fn get_data_field_bytes( &self, acct: HashCode, field_name: &HexPath, ) -> Result<Option<Vec<u8>>, anyhow::Error> { if acct == self.this_account { match self.fields_set.get(field_name) { Some(x) => { return Ok(Some(x.clone())); } None => {} } } let main = self.lookup(self.last_main).await?; if let Some(acct_node) = lookup_account(self, &main.block.body, self.this_account).await? { lookup_data_in_account(self, &acct_node, field_name).await } else { Ok(None) } } /// Sets the value of a given data field. fn set_data_field_bytes( &mut self, field_name: &HexPath, value: Vec<u8>, ) -> Result<(), anyhow::Error> { self.fields_set.insert(field_name.clone(), value); Ok(()) } /// Gets the value of a given typed data field. async fn get_data_field<T: DeserializeOwned>( &self, acct: HashCode, field: &TypedDataField<T>, ) -> Result<Option<T>, anyhow::Error> { match self.get_data_field_bytes(acct, &field.path).await? { None => Ok(None), Some(bs) => Ok(Some(rmp_serde::from_read(bs.as_slice())?)), } } /// Gets the value of a given typed data field, throwing an error if it is not found. pub async fn get_data_field_or_error<T: DeserializeOwned>( &self, acct: HashCode, field: &TypedDataField<T>, ) -> Result<T, anyhow::Error> { match self.get_data_field(acct, field).await? { None => bail!("data field not found: {:?}", field.path), Some(x) => Ok(x), } } /// Sets the value of a given typed data field. fn set_data_field<T: Serialize>( &mut self, field: &TypedDataField<T>, value: &T, ) -> Result<(), anyhow::Error> { self.set_data_field_bytes(&field.path, rmp_serde::to_vec_named(value)?) } } /// Causes the current account to pay a fee. async fn pay_fee<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, fee: u128, ) -> Result<(), anyhow::Error> { let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; if bal < fee { bail!("not enough balance for fee"); } at.set_data_field(&field_balance(), &(bal - fee)) } /// Causes the current account to send. async fn do_send<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, send: &SendInfo, ) -> Result<(), anyhow::Error> { if send.sender != at.this_account { bail!("sender must be sent by this account"); } if send.last_main != at.last_main { bail!("last main of send must be the current last main"); } let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; if bal < send.send_amount { bail!("not enough balance for send"); } let send_df = field_send(hash(send)); if at .get_data_field(at.this_account, &send_df) .await? .is_some() { bail!("that was already sent"); } at.set_data_field(&field_balance(), &(bal - send.send_amount))?; at.set_data_field(&send_df, send)?; Ok(()) } /// Causes the current account to receive. async fn do_receive<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, sender: HashCode, send_hash: Hash<SendInfo>, ) -> Result<SendInfo, anyhow::Error> { let send = at .get_data_field_or_error(sender, &field_send(send_hash)) .await?; if hash(&send) != send_hash { bail!("send hashes don't match"); } if send.recipient != at.this_account { bail!("recipient of send doesn't match recipient"); } let received_field = field_received(send_hash); let already_received = at.get_data_field(at.this_account, &received_field).await?; if already_received == Some(true) { bail!("tried to receive the same send twice"); } let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; at.set_data_field(&field_balance(), &(bal + send.send_amount))?; at.set_data_field(&received_field, &true)?; Ok(send) } /// Gets an argument out of action arguments. fn get_arg<T: DeserializeOwned>(args: &Vec<Vec<u8>>, i: usize) -> Result<T, anyhow::Error> { if i >= args.len() { bail!("too few arguments"); } Ok(rmp_serde::from_read(args[i].as_slice())?) } /// Verifies that the argument at a given index is a signature of a modified /// version of the action where the signature itself is replaced with /// an empty vector, and also that the signature's account matches the /// given account. fn verify_signature_argument( acct: HashCode, action: &Action, i: usize, ) -> Result<(), anyhow::Error> { let sig: Signature<Action> = get_arg(&action.args, i)?; if sig.account() != acct { bail!("signature account must equal current account"); } let mut act2 = action.clone(); act2.args[i] = Vec::new(); if !verify_sig(&act2, &sig) { bail!("invalid signature"); } Ok(()) } /// Runs an action in a given `AccountTransform` context. pub async fn run_action<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, action: &Action, ) -> Result<(), anyhow::Error> { if at.last_main != action.last_main { bail!("action last main must equal current last main"); } if action.command == b"send" { if at.is_initializing { bail!("send can't initialize an account"); } let recipient: HashCode = get_arg(&action.args, 0)?; let send_amount: u128 = get_arg(&action.args, 1)?; let initialize_spec: Option<Hash<Vec<u8>>> = get_arg(&action.args, 2)?; let message: Vec<u8> = get_arg(&action.args, 3)?; verify_signature_argument(at.this_account, action, 4)?; pay_fee(at, action.fee).await?; let send = SendInfo { last_main: action.last_main, sender: at.this_account,
{ let mut path = bytes_to_path(b"received"); path.0.extend(&bytes_to_path(&send.code).0); TypedDataField::from_path(path) }
identifier_body
account_transform.rs
> { TypedDataField::from_path(bytes_to_path(b"stake")) } /// Account public key field. pub fn field_public_key() -> TypedDataField<ed25519_dalek::PublicKey> { TypedDataField::from_path(bytes_to_path(b"public_key")) } /// Field for a `SendInfo` stored in the sender's data. pub fn field_send(send: Hash<SendInfo>) -> TypedDataField<SendInfo> { let mut path = bytes_to_path(b"send"); path.0.extend(&bytes_to_path(&send.code).0); TypedDataField::from_path(path) } /// Field for tracking whether a `SendInfo` has been received in the receiver's /// data. pub fn field_received(send: Hash<SendInfo>) -> TypedDataField<bool> { let mut path = bytes_to_path(b"received"); path.0.extend(&bytes_to_path(&send.code).0); TypedDataField::from_path(path) } /// A context providing operations related to transforming an account (e.g. /// running actions). pub struct
<'a, HL: HashLookup> { /// The `HashLookup` used to look up previous account data. pub hl: &'a HL, /// Whether this account is initializing. pub is_initializing: bool, /// The account being transformed. pub this_account: HashCode, /// The hash code of the last main block. pub last_main: Hash<MainBlock>, /// Which fields have been overwritten so far, and their most recent values. pub fields_set: BTreeMap<HexPath, Vec<u8>>, } #[async_trait] impl<'a, HL: HashLookup> HashLookup for AccountTransform<'a, HL> { async fn lookup_bytes(&self, hash: HashCode) -> Result<Vec<u8>, anyhow::Error> { self.hl.lookup_bytes(hash).await } } impl<'a, HL: HashLookup> AccountTransform<'a, HL> { /// Creates a new `AccountTransform`. pub fn new( hl: &'a HL, is_initializing: bool, this_account: HashCode, last_main: Hash<MainBlock>, ) -> AccountTransform<'a, HL> { AccountTransform { hl, is_initializing, this_account, last_main, fields_set: BTreeMap::new(), } } /// Gets the value of a given data field. async fn get_data_field_bytes( &self, acct: HashCode, field_name: &HexPath, ) -> Result<Option<Vec<u8>>, anyhow::Error> { if acct == self.this_account { match self.fields_set.get(field_name) { Some(x) => { return Ok(Some(x.clone())); } None => {} } } let main = self.lookup(self.last_main).await?; if let Some(acct_node) = lookup_account(self, &main.block.body, self.this_account).await? { lookup_data_in_account(self, &acct_node, field_name).await } else { Ok(None) } } /// Sets the value of a given data field. fn set_data_field_bytes( &mut self, field_name: &HexPath, value: Vec<u8>, ) -> Result<(), anyhow::Error> { self.fields_set.insert(field_name.clone(), value); Ok(()) } /// Gets the value of a given typed data field. async fn get_data_field<T: DeserializeOwned>( &self, acct: HashCode, field: &TypedDataField<T>, ) -> Result<Option<T>, anyhow::Error> { match self.get_data_field_bytes(acct, &field.path).await? { None => Ok(None), Some(bs) => Ok(Some(rmp_serde::from_read(bs.as_slice())?)), } } /// Gets the value of a given typed data field, throwing an error if it is not found. pub async fn get_data_field_or_error<T: DeserializeOwned>( &self, acct: HashCode, field: &TypedDataField<T>, ) -> Result<T, anyhow::Error> { match self.get_data_field(acct, field).await? { None => bail!("data field not found: {:?}", field.path), Some(x) => Ok(x), } } /// Sets the value of a given typed data field. fn set_data_field<T: Serialize>( &mut self, field: &TypedDataField<T>, value: &T, ) -> Result<(), anyhow::Error> { self.set_data_field_bytes(&field.path, rmp_serde::to_vec_named(value)?) } } /// Causes the current account to pay a fee. async fn pay_fee<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, fee: u128, ) -> Result<(), anyhow::Error> { let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; if bal < fee { bail!("not enough balance for fee"); } at.set_data_field(&field_balance(), &(bal - fee)) } /// Causes the current account to send. async fn do_send<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, send: &SendInfo, ) -> Result<(), anyhow::Error> { if send.sender != at.this_account { bail!("sender must be sent by this account"); } if send.last_main != at.last_main { bail!("last main of send must be the current last main"); } let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; if bal < send.send_amount { bail!("not enough balance for send"); } let send_df = field_send(hash(send)); if at .get_data_field(at.this_account, &send_df) .await? .is_some() { bail!("that was already sent"); } at.set_data_field(&field_balance(), &(bal - send.send_amount))?; at.set_data_field(&send_df, send)?; Ok(()) } /// Causes the current account to receive. async fn do_receive<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, sender: HashCode, send_hash: Hash<SendInfo>, ) -> Result<SendInfo, anyhow::Error> { let send = at .get_data_field_or_error(sender, &field_send(send_hash)) .await?; if hash(&send) != send_hash { bail!("send hashes don't match"); } if send.recipient != at.this_account { bail!("recipient of send doesn't match recipient"); } let received_field = field_received(send_hash); let already_received = at.get_data_field(at.this_account, &received_field).await?; if already_received == Some(true) { bail!("tried to receive the same send twice"); } let bal = at .get_data_field_or_error(at.this_account, &field_balance()) .await?; at.set_data_field(&field_balance(), &(bal + send.send_amount))?; at.set_data_field(&received_field, &true)?; Ok(send) } /// Gets an argument out of action arguments. fn get_arg<T: DeserializeOwned>(args: &Vec<Vec<u8>>, i: usize) -> Result<T, anyhow::Error> { if i >= args.len() { bail!("too few arguments"); } Ok(rmp_serde::from_read(args[i].as_slice())?) } /// Verifies that the argument at a given index is a signature of a modified /// version of the action where the signature itself is replaced with /// an empty vector, and also that the signature's account matches the /// given account. fn verify_signature_argument( acct: HashCode, action: &Action, i: usize, ) -> Result<(), anyhow::Error> { let sig: Signature<Action> = get_arg(&action.args, i)?; if sig.account() != acct { bail!("signature account must equal current account"); } let mut act2 = action.clone(); act2.args[i] = Vec::new(); if !verify_sig(&act2, &sig) { bail!("invalid signature"); } Ok(()) } /// Runs an action in a given `AccountTransform` context. pub async fn run_action<'a, HL: HashLookup>( at: &mut AccountTransform<'a, HL>, action: &Action, ) -> Result<(), anyhow::Error> { if at.last_main != action.last_main { bail!("action last main must equal current last main"); } if action.command == b"send" { if at.is_initializing { bail!("send can't initialize an account"); } let recipient: HashCode = get_arg(&action.args, 0)?; let send_amount: u128 = get_arg(&action.args, 1)?; let initialize_spec: Option<Hash<Vec<u8>>> = get_arg(&action.args, 2)?; let message: Vec<u8> = get_arg(&action.args, 3)?; verify_signature_argument(at.this_account, action, 4)?; pay_fee(at, action.fee).await?; let send = SendInfo { last_main: action.last_main, sender: at.this_account,
AccountTransform
identifier_name
runsex_final_VIDEO_Aug2018.py
(pos_ver, pos_hor, img, lado): #if verbose: print 'Radius %i' % radius counts=np.sum(img[pos_ver : pos_ver + lado, pos_hor : pos_hor + lado]) numpix=lado**2 return counts,numpix def get_error_model(img,seg,apmin,apmax,numap): hdufits_ima = fits.open(img) imag_data = hdufits_ima[0].data ver_max,hor_max = imag_data.shape hdufits_seg = fits.open(seg) segm_data = hdufits_seg[0].data filtered_segm_data=ndimage.gaussian_filter(segm_data, 2) segm_mask = (filtered_segm_data > 0) matrix_used = np.zeros(np.shape(segm_data)) pix_count=[] n=0 while n < 3001: #segm_mask = (segm_data > 0) random_center = np.random.random_integers(300 ,np.amin(np.array([ver_max,hor_max]))-300, 2) if (all((~segm_mask[random_center[0], random_center[1]]).flat)) and (np.sum(matrix_used[random_center[0], random_center[1]])==0): pix_count.append(imag_data[random_center[0]][random_center[1]]) n += 1 matrix_used[random_center[0], random_center[1]]=22 pix_count=np.array(pix_count) #pix_count=pix_count[np.where((pix_count<(np.mean(pix_count)+3.5*np.std(pix_count))) & (pix_count>(np.mean(pix_count)-3.5*np.std(pix_count))))] sigma1=np.std(pix_count) #plt.hist(pix_count,50) #plt.savefig(img+'_sigma1_hist.png') #plt.close('all') apertures=np.linspace(apmin,apmax,num=numap) median=[] mean=[] std=[] npix=[] for i in xrange(len(apertures)): median0,mean0,std0,npix0=empty_apertures(img, seg, apertures[i]) median.append(median0) mean.append(mean0) std.append(std0) npix.append(npix0) npix=np.array(npix) npix=npix[np.where(npix>0)] median=np.array(median)[np.where(npix>0)] mean=np.array(mean)[np.where(npix>0)] std=np.array(std)[np.where(npix>0)] N=np.sqrt(npix) x=np.log10(N) y=np.log10(std)-np.log10(sigma1) coefficients=np.polyfit(x,y,1) alpha=10**(coefficients[1]) beta=coefficients[0] #plt.plot(N,std,'r*') #plt.plot(N,sigma1*alpha*(N**beta),'b-') #plt.yscale('log') #plt.savefig(img+'_model.png') #plt.close('all') return (sigma1,alpha,beta) ################################################################### #cargamos la lista que contiene los nombres de los archivos #datos=np.loadtxt('lista_datos.txt',dtype='str').transpose() #definimos el seeing al cual se cambiaran todas las imagenes: sigma0=2.95 #valor del seeing al cual se convertiran las imagenes ################################################################### #se genera la lista que contiene las ganancias y saturaciones para cada chip. GAIN=[3.66,4.25,3.95,4.16,4.24,4.11,3.84,4.22,4.53,3.97,4.62,3.95,5.67,4.78,3.99,4.96] SAT=[33000.0,32000.0,33000.0,32000.0,24000.0,36000.0,35000.0,33000.0,35000.0,35000.0,37000.0,34000.0,33000.0,35000.0,34000.0,34000.0] SAT=np.array(SAT) SAT=SAT*0.9 ################################################################### #se crea un archivo que contiene informacion de cada imagen, como el dia juliano, el zp y el error en el zp. arch2=open("../stat/datos_cat_"+field+"_"+filt+".txt","w") arch3=open("../stat/failed_cat_"+field+"_"+filt+".txt","w") #arch2.writelines("imagen \t chip \t JD \t zp \t errzp \t time \t extinct \t air \n") ################################################################### #separamos los chips en distintos fits, extraemos la info de los headers y corremos sextractor for i in xrange(len(list_stacks)): #for i in range(389,len(datos)): print list_img_name[i] arch = fits.open(list_stacks[i]) #arch=pf.open(datos[i]+'_'+filt+'.fits') arch_conf=fits.open(list_stacks_conf[i]) #arch_conf=pf.open(datos[i]+'_conf_'+filt+'.fits') head0=arch[0].header tim=head0['EXPTIME'] air_e=head0['HIERARCH ESO TEL AIRM END'] air_s=head0['HIERARCH ESO TEL AIRM START'] air=(air_e+air_s)*0.5 print list_img_name[i], tim for j in range(1,17): try: chip=arch[j].data head=arch[j].header seeing=head['SEEING'] print seeing if (seeing>0) and (seeing<sigma0) : try: sigma=(1/(np.sqrt(8*np.log(2))))*np.sqrt((sigma0**2)-(seeing**2)) b_chip=ndimage.gaussian_filter(chip, sigma) #aux=head['HIERARCH ESO DET CHIP PXSPACE'] #aux=float(aux) #head['HIERARCH ESO DET CHIP PXSPACE']=0 fits.writeto("temp/"+list_img_name[i]+'_'+str(j)+'_'+filt+'.fits',data=b_chip,header=head,output_verify='fix') chip_conf=arch_conf[j].data head_conf=arch_conf[j].header b_chip_conf=ndimage.gaussian_filter(chip_conf, sigma) #auxc=head_conf['HIERARCH ESO DET CHIP PXSPACE'] #auxc=float(auxc) #head_conf['HIERARCH ESO DET CHIP PXSPACE']=auxc fits.writeto("temp/"+list_img_name[i]+'_conf_'+str(j)+'_'+filt+'.fits',data=b_chip_conf,header=head_conf,output_verify='fix') filter_image='gauss_2.5_5x5.conv' seeing=seeing*0.339 seeing0=sigma0*0.339 zp=head['MAGZPT']+2.5*np.log10(tim) errzp=head['MAGZRR'] jd=head['MJD-OBS'] ext=head['EXTINCT'] z=j-1 #gain=GAIN[z]#*time gain=4.19 sat=SAT[z] #print gain #cmd1="sex "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".fits -c "+defa+"/vistapp.sex -CATALOG_TYPE FITS_1.0 -CATALOG_NAME "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".cat.fits"+" -PARAMETERS_NAME "+defa+"/paula4.param -FILTER_NAME "+defa+"/"+filter_image+" -MAG_ZEROPOINT "+str(zp)+" -WEIGHT_IMAGE "+"temp/"+list_img_name[i]+"_conf_"+str(j)+"_"+filt+".fits -WEIGHT_TYPE MAP_WEIGHT -SATUR_LEVEL "+str(sat)+" -GAIN "+str(gain)+" -SEEING_FWHM "+str(seeing0)+" -STARNNW_NAME "+defa+"/default.nnw -CHECKIMAGE_NAME "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".seg -CHECKIMAGE_TYPE SEGMENTATION -PHOT_APERTURES 4,6,9,20,30" cmd1="sex "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".fits -c "+defa+"/vistapp.sex -CATALOG_TYPE FITS_1.0 -CATALOG_NAME "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".cat.fits"+" -PARAMETERS_NAME "+defa+"/paula4.param -FILTER_NAME "+defa+"/"+filter_image+" -MAG_ZEROPOINT "+str(zp)+" -WEIGHT_IMAGE "+"temp/"+list_img_name[i]+"_conf_"+str(j)+"_"+filt+".fits -WEIGHT_TYPE MAP_WEIGHT -SATUR_LEVEL "+str(sat)+"
get_flux
identifier_name
runsex_final_VIDEO_Aug2018.py
empty_ap=np.array(empty_ap) #plt.hist(empty_ap,50) #plt.savefig(image+'_hist_'+str(lado)+'.png') #plt.close('all') empty_ap2=empty_ap[np.where((empty_ap<(np.mean(empty_ap)+3*np.std(empty_ap))) & (empty_ap>(np.mean(empty_ap)-3*np.std(empty_ap))))] return (np.median(empty_ap2),np.mean(empty_ap2),np.std(empty_ap2),npix) def get_flux(pos_ver, pos_hor, img, lado): #if verbose: print 'Radius %i' % radius counts=np.sum(img[pos_ver : pos_ver + lado, pos_hor : pos_hor + lado]) numpix=lado**2 return counts,numpix def get_error_model(img,seg,apmin,apmax,numap): hdufits_ima = fits.open(img) imag_data = hdufits_ima[0].data ver_max,hor_max = imag_data.shape hdufits_seg = fits.open(seg) segm_data = hdufits_seg[0].data filtered_segm_data=ndimage.gaussian_filter(segm_data, 2) segm_mask = (filtered_segm_data > 0) matrix_used = np.zeros(np.shape(segm_data)) pix_count=[] n=0 while n < 3001: #segm_mask = (segm_data > 0) random_center = np.random.random_integers(300 ,np.amin(np.array([ver_max,hor_max]))-300, 2) if (all((~segm_mask[random_center[0], random_center[1]]).flat)) and (np.sum(matrix_used[random_center[0], random_center[1]])==0): pix_count.append(imag_data[random_center[0]][random_center[1]]) n += 1 matrix_used[random_center[0], random_center[1]]=22 pix_count=np.array(pix_count) #pix_count=pix_count[np.where((pix_count<(np.mean(pix_count)+3.5*np.std(pix_count))) & (pix_count>(np.mean(pix_count)-3.5*np.std(pix_count))))] sigma1=np.std(pix_count) #plt.hist(pix_count,50) #plt.savefig(img+'_sigma1_hist.png') #plt.close('all') apertures=np.linspace(apmin,apmax,num=numap) median=[] mean=[] std=[] npix=[] for i in xrange(len(apertures)): median0,mean0,std0,npix0=empty_apertures(img, seg, apertures[i]) median.append(median0) mean.append(mean0) std.append(std0) npix.append(npix0) npix=np.array(npix) npix=npix[np.where(npix>0)] median=np.array(median)[np.where(npix>0)] mean=np.array(mean)[np.where(npix>0)] std=np.array(std)[np.where(npix>0)] N=np.sqrt(npix) x=np.log10(N) y=np.log10(std)-np.log10(sigma1) coefficients=np.polyfit(x,y,1) alpha=10**(coefficients[1]) beta=coefficients[0] #plt.plot(N,std,'r*') #plt.plot(N,sigma1*alpha*(N**beta),'b-') #plt.yscale('log') #plt.savefig(img+'_model.png') #plt.close('all') return (sigma1,alpha,beta) ################################################################### #cargamos la lista que contiene los nombres de los archivos #datos=np.loadtxt('lista_datos.txt',dtype='str').transpose() #definimos el seeing al cual se cambiaran todas las imagenes: sigma0=2.95 #valor del seeing al cual se convertiran las imagenes ################################################################### #se genera la lista que contiene las ganancias y saturaciones para cada chip. GAIN=[3.66,4.25,3.95,4.16,4.24,4.11,3.84,4.22,4.53,3.97,4.62,3.95,5.67,4.78,3.99,4.96] SAT=[33000.0,32000.0,33000.0,32000.0,24000.0,36000.0,35000.0,33000.0,35000.0,35000.0,37000.0,34000.0,33000.0,35000.0,34000.0,34000.0] SAT=np.array(SAT) SAT=SAT*0.9 ################################################################### #se crea un archivo que contiene informacion de cada imagen, como el dia juliano, el zp y el error en el zp. arch2=open("../stat/datos_cat_"+field+"_"+filt+".txt","w") arch3=open("../stat/failed_cat_"+field+"_"+filt+".txt","w") #arch2.writelines("imagen \t chip \t JD \t zp \t errzp \t time \t extinct \t air \n") ################################################################### #separamos los chips en distintos fits, extraemos la info de los headers y corremos sextractor for i in xrange(len(list_stacks)): #for i in range(389,len(datos)): print list_img_name[i] arch = fits.open(list_stacks[i]) #arch=pf.open(datos[i]+'_'+filt+'.fits') arch_conf=fits.open(list_stacks_conf[i]) #arch_conf=pf.open(datos[i]+'_conf_'+filt+'.fits') head0=arch[0].header tim=head0['EXPTIME'] air_e=head0['HIERARCH ESO TEL AIRM END'] air_s=head0['HIERARCH ESO TEL AIRM START'] air=(air_e+air_s)*0.5 print list_img_name[i], tim for j in range(1,17): try: chip=arch[j].data head=arch[j].header seeing=head['SEEING'] print seeing if (seeing>0) and (seeing<sigma0) : try: sigma=(1/(np.sqrt(8*np.log(2))))*np.sqrt((sigma0**2)-(seeing**2)) b_chip=ndimage.gaussian_filter(chip, sigma) #aux=head['HIERARCH ESO DET CHIP PXSPACE'] #aux=float(aux) #head['HIERARCH ESO DET CHIP PXSPACE']=0 fits.writeto("temp/"+list_img_name[i]+'_'+str(j)+'_'+filt+'.fits',data=b_chip,header=head,output_verify='fix') chip_conf=arch_conf[j].data head_conf=arch_conf[j].header b_chip_conf=ndimage.gaussian_filter(chip_conf, sigma) #auxc=head_conf['HIERARCH ESO DET CHIP PXSPACE'] #auxc=float(auxc) #head_conf['HIERARCH ESO DET CHIP PXSPACE']=auxc fits.writeto("temp/"+list_img_name[i]+'_conf_'+str(j)+'_'+filt+'.fits',data=b_chip_conf,header=head_conf,output_verify='fix') filter_image='gauss_2.5_5x5.conv' seeing=seeing*0.339 seeing0=sigma0*0.339 zp=head['MAGZPT']+2.5*np.log10(tim) errzp=head['MAGZRR'] jd=head['MJD-OBS'] ext=head['EXTINCT'] z=j-1 #gain=GAIN[z]#*time gain=4.19 sat=SAT[z] #print gain #cmd1="sex "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".fits -c "+defa+"/vistapp.sex -CATALOG_TYPE FITS_1.0 -CATALOG_NAME "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".cat.fits"+" -PARAMETERS_NAME "+defa+"/paula4.param -FILTER_NAME "+defa+"/"+filter_image+" -MAG_ZEROPOINT "+str(zp)+" -WEIGHT_IMAGE "+"temp/"+list_img_name[i]+"_conf_"+str(j)+"_"+filt+".fits -WEIGHT_TYPE MAP_WEIGHT -SATUR_LEVEL "+str(sat)+" -GAIN "+str(gain)+" -SEEING_FWHM "+str(seeing0)+" -STARNNW_NAME "+defa+"/default.nnw -CHECKIMAGE_NAME "+"temp/"+list_img
counts,npix=get_flux(random_center[0], random_center[1], imag_data, int(lado)) empty_ap.append(counts) n += 1 matrix_used[random_center[0]:random_center[0]+lado, random_center[1]:random_center[1] + lado ]=22
conditional_block
runsex_final_VIDEO_Aug2018.py
00.0,33000.0,35000.0,34000.0,34000.0] SAT=np.array(SAT) SAT=SAT*0.9 ################################################################### #se crea un archivo que contiene informacion de cada imagen, como el dia juliano, el zp y el error en el zp. arch2=open("../stat/datos_cat_"+field+"_"+filt+".txt","w") arch3=open("../stat/failed_cat_"+field+"_"+filt+".txt","w") #arch2.writelines("imagen \t chip \t JD \t zp \t errzp \t time \t extinct \t air \n") ################################################################### #separamos los chips en distintos fits, extraemos la info de los headers y corremos sextractor for i in xrange(len(list_stacks)): #for i in range(389,len(datos)): print list_img_name[i] arch = fits.open(list_stacks[i]) #arch=pf.open(datos[i]+'_'+filt+'.fits') arch_conf=fits.open(list_stacks_conf[i]) #arch_conf=pf.open(datos[i]+'_conf_'+filt+'.fits') head0=arch[0].header tim=head0['EXPTIME'] air_e=head0['HIERARCH ESO TEL AIRM END'] air_s=head0['HIERARCH ESO TEL AIRM START'] air=(air_e+air_s)*0.5 print list_img_name[i], tim for j in range(1,17): try: chip=arch[j].data head=arch[j].header seeing=head['SEEING'] print seeing if (seeing>0) and (seeing<sigma0) : try: sigma=(1/(np.sqrt(8*np.log(2))))*np.sqrt((sigma0**2)-(seeing**2)) b_chip=ndimage.gaussian_filter(chip, sigma) #aux=head['HIERARCH ESO DET CHIP PXSPACE'] #aux=float(aux) #head['HIERARCH ESO DET CHIP PXSPACE']=0 fits.writeto("temp/"+list_img_name[i]+'_'+str(j)+'_'+filt+'.fits',data=b_chip,header=head,output_verify='fix') chip_conf=arch_conf[j].data head_conf=arch_conf[j].header b_chip_conf=ndimage.gaussian_filter(chip_conf, sigma) #auxc=head_conf['HIERARCH ESO DET CHIP PXSPACE'] #auxc=float(auxc) #head_conf['HIERARCH ESO DET CHIP PXSPACE']=auxc fits.writeto("temp/"+list_img_name[i]+'_conf_'+str(j)+'_'+filt+'.fits',data=b_chip_conf,header=head_conf,output_verify='fix') filter_image='gauss_2.5_5x5.conv' seeing=seeing*0.339 seeing0=sigma0*0.339 zp=head['MAGZPT']+2.5*np.log10(tim) errzp=head['MAGZRR'] jd=head['MJD-OBS'] ext=head['EXTINCT'] z=j-1 #gain=GAIN[z]#*time gain=4.19 sat=SAT[z] #print gain #cmd1="sex "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".fits -c "+defa+"/vistapp.sex -CATALOG_TYPE FITS_1.0 -CATALOG_NAME "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".cat.fits"+" -PARAMETERS_NAME "+defa+"/paula4.param -FILTER_NAME "+defa+"/"+filter_image+" -MAG_ZEROPOINT "+str(zp)+" -WEIGHT_IMAGE "+"temp/"+list_img_name[i]+"_conf_"+str(j)+"_"+filt+".fits -WEIGHT_TYPE MAP_WEIGHT -SATUR_LEVEL "+str(sat)+" -GAIN "+str(gain)+" -SEEING_FWHM "+str(seeing0)+" -STARNNW_NAME "+defa+"/default.nnw -CHECKIMAGE_NAME "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".seg -CHECKIMAGE_TYPE SEGMENTATION -PHOT_APERTURES 4,6,9,20,30" cmd1="sex "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".fits -c "+defa+"/vistapp.sex -CATALOG_TYPE FITS_1.0 -CATALOG_NAME "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".cat.fits"+" -PARAMETERS_NAME "+defa+"/paula4.param -FILTER_NAME "+defa+"/"+filter_image+" -MAG_ZEROPOINT "+str(zp)+" -WEIGHT_IMAGE "+"temp/"+list_img_name[i]+"_conf_"+str(j)+"_"+filt+".fits -WEIGHT_TYPE MAP_WEIGHT -SATUR_LEVEL "+str(sat)+" -GAIN "+str(gain)+" -SEEING_FWHM "+str(seeing0)+" -STARNNW_NAME "+defa+"/default.nnw -CHECKIMAGE_NAME "+"temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".seg,temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".subs -CHECKIMAGE_TYPE SEGMENTATION,-BACKGROUND -PHOT_APERTURES 4,6,9,20,30" os.system(cmd1) hdufits_cat = fits.open("temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".cat.fits") cata = hdufits_cat[1].data err_1= cata['FLUXERR_APER'][:,0] err_2= cata['FLUXERR_APER'][:,1] err_3= cata['FLUXERR_APER'][:,2] err_4= cata['FLUXERR_APER'][:,3] err_5= cata['FLUXERR_APER'][:,4] flux_1= cata['FLUX_APER'][:,0] flux_2= cata['FLUX_APER'][:,1] flux_3= cata['FLUX_APER'][:,2] flux_4= cata['FLUX_APER'][:,3] flux_5= cata['FLUX_APER'][:,4] flag= cata['FLAGS'] #sigma1,alpha,beta=get_error_model("temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".fits", "temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".seg",1,7,6) sigma1,alpha,beta=get_error_model("temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".subs", "temp/"+list_img_name[i]+"_"+str(j)+"_"+filt+".seg",2,10,9) print sigma1,alpha,beta npix1=((4*0.5)**2)*np.pi sigma_phot_1=np.sqrt(((sigma1**2)*(alpha**2)*(npix1**beta))+(flux_1/gain)) npix2=((6*0.5)**2)*np.pi sigma_phot_2=np.sqrt(((sigma1**2)*(alpha**2)*(npix2**beta))+(flux_2/gain)) #print np.amin(np.sqrt((((alpha**2)*(npix2**beta))+(flux_2/((sigma1**2)*gain)))/(npix2+(flux_2/((sigma1**2)*gain))))) #npix3=get_npix(int(9*0.5)) #print npix3 npix3=((9*0.5)**2)*np.pi #print npix3 #sigma_phot_3=np.sqrt((err_3**2)*((((alpha**2)*(npix3**beta))+(flux_3/((sigma1**2)*gain)))/(npix3+(flux_3/((sigma1**2)*gain))))) sigma_phot_3=np.sqrt(((sigma1**2)*(alpha**2)*(npix3**beta))+(flux_3/gain)) #npix4=get_npix(int(20*0.5)) #print npix4 npix4=((20*0.5)**2)*np.pi #print npix4 #sigma_phot_4=np.sqrt((err_4**2)*((((alpha**2)*(npix4**beta))+(flux_4/((sigma1**2)*gain)))/(npix4+(flux_4/((sigma1**2)*gain))))) sigma_phot_4=np.sqrt(((sigma1**2)*(alpha**2)*(npix4**beta))+(flux_4/gain)) orig_cols=cata.columns c1=fits.Column(name='FLUXERR_OP',format='E',array=sigma_phot_1) c2=fits.Column(name='FLUXERR_2',format='E',array=sigma_phot_2) c3=fits.Column(name='FLUXERR_3',format='E',array=sigma_phot_3) c4=fits.Column(name='FLUXERR_7',format='E',array=sigma_phot_4) c5=fits.Column(name='FLUX_2',format='E',array=flux_2)
random_line_split
runsex_final_VIDEO_Aug2018.py
random_center = np.random.random_integers(350 ,np.amin(np.array([ver_max,hor_max]))-350, 2) #segm_mask = (segm_data > 0) if (all((~segm_mask[random_center[0]:random_center[0]+lado, random_center[1]:random_center[1] + lado]).flat)) and (np.sum(matrix_used[random_center[0]:random_center[0]+lado, random_center[1]:random_center[1] + lado])==0): counts,npix=get_flux(random_center[0], random_center[1], imag_data, int(lado)) empty_ap.append(counts) n += 1 matrix_used[random_center[0]:random_center[0]+lado, random_center[1]:random_center[1] + lado ]=22 empty_ap=np.array(empty_ap) #plt.hist(empty_ap,50) #plt.savefig(image+'_hist_'+str(lado)+'.png') #plt.close('all') empty_ap2=empty_ap[np.where((empty_ap<(np.mean(empty_ap)+3*np.std(empty_ap))) & (empty_ap>(np.mean(empty_ap)-3*np.std(empty_ap))))] return (np.median(empty_ap2),np.mean(empty_ap2),np.std(empty_ap2),npix) def get_flux(pos_ver, pos_hor, img, lado): #if verbose: print 'Radius %i' % radius counts=np.sum(img[pos_ver : pos_ver + lado, pos_hor : pos_hor + lado]) numpix=lado**2 return counts,numpix def get_error_model(img,seg,apmin,apmax,numap): hdufits_ima = fits.open(img) imag_data = hdufits_ima[0].data ver_max,hor_max = imag_data.shape hdufits_seg = fits.open(seg) segm_data = hdufits_seg[0].data filtered_segm_data=ndimage.gaussian_filter(segm_data, 2) segm_mask = (filtered_segm_data > 0) matrix_used = np.zeros(np.shape(segm_data)) pix_count=[] n=0 while n < 3001: #segm_mask = (segm_data > 0) random_center = np.random.random_integers(300 ,np.amin(np.array([ver_max,hor_max]))-300, 2) if (all((~segm_mask[random_center[0], random_center[1]]).flat)) and (np.sum(matrix_used[random_center[0], random_center[1]])==0): pix_count.append(imag_data[random_center[0]][random_center[1]]) n += 1 matrix_used[random_center[0], random_center[1]]=22 pix_count=np.array(pix_count) #pix_count=pix_count[np.where((pix_count<(np.mean(pix_count)+3.5*np.std(pix_count))) & (pix_count>(np.mean(pix_count)-3.5*np.std(pix_count))))] sigma1=np.std(pix_count) #plt.hist(pix_count,50) #plt.savefig(img+'_sigma1_hist.png') #plt.close('all') apertures=np.linspace(apmin,apmax,num=numap) median=[] mean=[] std=[] npix=[] for i in xrange(len(apertures)): median0,mean0,std0,npix0=empty_apertures(img, seg, apertures[i]) median.append(median0) mean.append(mean0) std.append(std0) npix.append(npix0) npix=np.array(npix) npix=npix[np.where(npix>0)] median=np.array(median)[np.where(npix>0)] mean=np.array(mean)[np.where(npix>0)] std=np.array(std)[np.where(npix>0)] N=np.sqrt(npix) x=np.log10(N) y=np.log10(std)-np.log10(sigma1) coefficients=np.polyfit(x,y,1) alpha=10**(coefficients[1]) beta=coefficients[0] #plt.plot(N,std,'r*') #plt.plot(N,sigma1*alpha*(N**beta),'b-') #plt.yscale('log') #plt.savefig(img+'_model.png') #plt.close('all') return (sigma1,alpha,beta) ################################################################### #cargamos la lista que contiene los nombres de los archivos #datos=np.loadtxt('lista_datos.txt',dtype='str').transpose() #definimos el seeing al cual se cambiaran todas las imagenes: sigma0=2.95 #valor del seeing al cual se convertiran las imagenes ################################################################### #se genera la lista que contiene las ganancias y saturaciones para cada chip. GAIN=[3.66,4.25,3.95,4.16,4.24,4.11,3.84,4.22,4.53,3.97,4.62,3.95,5.67,4.78,3.99,4.96] SAT=[33000.0,32000.0,33000.0,32000.0,24000.0,36000.0,35000.0,33000.0,35000.0,35000.0,37000.0,34000.0,33000.0,35000.0,34000.0,34000.0] SAT=np.array(SAT) SAT=SAT*0.9 ################################################################### #se crea un archivo que contiene informacion de cada imagen, como el dia juliano, el zp y el error en el zp. arch2=open("../stat/datos_cat_"+field+"_"+filt+".txt","w") arch3=open("../stat/failed_cat_"+field+"_"+filt+".txt","w") #arch2.writelines("imagen \t chip \t JD \t zp \t errzp \t time \t extinct \t air \n") ################################################################### #separamos los chips en distintos fits, extraemos la info de los headers y corremos sextractor for i in xrange(len(list_stacks)): #for i in range(389,len(datos)): print list_img_name[i] arch = fits.open(list_stacks[i]) #arch=pf.open(datos[i]+'_'+filt+'.fits') arch_conf=fits.open(list_stacks_conf[i]) #arch_conf=pf.open(datos[i]+'_conf_'+filt+'.fits') head0=arch[0].header tim=head0['EXPTIME'] air_e=head0['HIERARCH ESO TEL AIRM END'] air_s=head0['HIERARCH ESO TEL AIRM START'] air=(air_e+air_s)*0.5 print list_img_name[i], tim for j in range(1,17): try: chip=arch[j].data head=arch[j].header seeing=head['SEEING'] print seeing if (seeing>0) and (seeing<sigma0) : try: sigma=(1/(np.sqrt(8*np.log(2))))*np.sqrt((sigma0**2)-(seeing**2)) b_chip=ndimage.gaussian_filter(chip, sigma) #aux=head['HIERARCH ESO DET CHIP PXSPACE'] #aux=float(aux) #head['HIERARCH ESO DET CHIP PXSPACE']=0 fits.writeto("temp/"+list_img_name[i]+'_'+str(j)+'_'+filt+'.fits',data=b_chip,header=head,output_verify='fix') chip_conf=arch_conf[j].data head_conf=arch_conf[j].header b_chip_conf=ndimage.gaussian_filter(chip_conf, sigma) #auxc=head_conf['HIERARCH ESO DET CHIP PXSPACE'] #auxc=float(auxc) #head_conf['HIERARCH ESO DET CHIP PXSPACE']=auxc fits.writeto("temp/"+list_img_name[i]+'_conf_'+str(j)+'_'+filt+'.fits',data=b_chip_conf,header=head_conf,output_verify='fix') filter_image='gauss_2.5_5x5.conv' seeing=seeing*0.339 seeing0=sigma0*0.339 z
hdufits_ima = fits.open(image) imag_data = hdufits_ima[0].data ver_max,hor_max = imag_data.shape hdufits_seg = fits.open(seg) segm_data = hdufits_seg[0].data filtered_segm_data=ndimage.gaussian_filter(segm_data, 2) segm_mask = (filtered_segm_data > 0) #segm_mask = (segm_data > 0) n = 0 empty_ap = [] matrix_used = np.zeros(np.shape(segm_data)) while n < 2001:
identifier_body
github.go
Assignees(ctx context.Context, repo repository, newPR scm.NewPullRequest, createdPR *github.PullRequest) error { if len(newPR.Assignees) == 0 { return nil } _, _, err := retry(ctx, func() (*github.Issue, *github.Response, error) { return g.ghClient.Issues.AddAssignees(ctx, repo.ownerName, repo.name, createdPR.GetNumber(), newPR.Assignees) }) return err } func (g *Github) addLabels(ctx context.Context, repo repository, newPR scm.NewPullRequest, createdPR *github.PullRequest) error { if len(newPR.Labels) == 0 { return nil } _, _, err := retry(ctx, func() ([]*github.Label, *github.Response, error) { return g.ghClient.Issues.AddLabelsToIssue(ctx, repo.ownerName, repo.name, createdPR.GetNumber(), newPR.Labels) }) return err } // GetPullRequests gets all pull requests of with a specific branch func (g *Github) GetPullRequests(ctx context.Context, branchName string) ([]scm.PullRequest, error) { repos, err := g.getRepositories(ctx) if err != nil { return nil, err } // github limits the amount of data which can be handled by the graphql api // data needs to be chunked into multiple requests batches := chunkSlice(repos, 50) var pullRequests []scm.PullRequest for _, repos := range batches { result, err := g.getPullRequests(ctx, branchName, repos) if err != nil { return pullRequests, fmt.Errorf("failed to get pull request batch: %w", err) } pullRequests = append(pullRequests, result...) } return pullRequests, nil } func (g *Github) getPullRequests(ctx context.Context, branchName string, repos []*github.Repository) ([]scm.PullRequest, error) { // The fragment is all the data needed from every repository const fragment = `fragment repoProperties on Repository { pullRequests(headRefName: $branchName, last: 1) { nodes { number headRefName closed url merged baseRepository { name owner { login } } headRepository { name owner { login } } commits(last: 1) { nodes { commit { statusCheckRollup { state } } } } } } }` // Prepare data for compiling the query. // Each repository will get its own variables ($ownerX, $repoX) and be returned // via and alias repoX repoParameters := make([]string, len(repos)) repoQueries := make([]string, len(repos)) queryVariables := map[string]interface{}{ "branchName": branchName, } for i, repo := range repos { repoParameters[i] = fmt.Sprintf("$owner%[1]d: String!, $repo%[1]d: String!", i) repoQueries[i] = fmt.Sprintf("repo%[1]d: repository(owner: $owner%[1]d, name: $repo%[1]d) { ...repoProperties }", i) queryVariables[fmt.Sprintf("owner%d", i)] = repo.GetOwner().GetLogin() queryVariables[fmt.Sprintf("repo%d", i)] = repo.GetName() } // Create the final query query := fmt.Sprintf(` %s query ($branchName: String!, %s) { %s }`, fragment, strings.Join(repoParameters, ", "), strings.Join(repoQueries, "\n"), ) result := map[string]graphqlRepo{} err := g.makeGraphQLRequest(ctx, query, queryVariables, &result) if err != nil { return nil, err } // Fetch the repo based on name instead of looping through the map since that will // guarantee the same ordering as the original repository list prs := []scm.PullRequest{} for i := range repos { repo, ok := result[fmt.Sprintf("repo%d", i)] if !ok { return nil, fmt.Errorf("could not find repo%d", i) } if len(repo.PullRequests.Nodes) != 1 { continue } pr := repo.PullRequests.Nodes[0] // The graphql API does not have a way at query time to filter out the owner of the head branch // of a PR. Therefore, we have to filter out any repo that does not match the head owner. headOwner, err := g.headOwner(ctx, pr.BaseRepository.Owner.Login) if err != nil { return nil, err } if pr.HeadRepository.Owner.Login != headOwner { continue } prs = append(prs, convertGraphQLPullRequest(pr)) } return prs, nil } func (g *Github) loggedInUser(ctx context.Context) (string, error) { g.userMutex.Lock() defer g.userMutex.Unlock() if g.user != "" { return g.user, nil } user, _, err := retry(ctx, func() (*github.User, *github.Response, error) { return g.ghClient.Users.Get(ctx, "") }) if err != nil { return "", err } g.user = user.GetLogin() return g.user, nil } // headOwner returns the owner of the repository from which any pullrequest will be made // This is normally the owner of the original repository, but if a fork has been made // it will be a different owner func (g *Github) headOwner(ctx context.Context, repoOwner string) (string, error) { if !g.Fork { return repoOwner, nil } if g.ForkOwner != "" { return g.ForkOwner, nil } return g.loggedInUser(ctx) } // GetOpenPullRequest gets a pull request for one specific repository func (g *Github) GetOpenPullRequest(ctx context.Context, repo scm.Repository, branchName string) (scm.PullRequest, error) { r := repo.(repository) headOwner, err := g.headOwner(ctx, r.ownerName) if err != nil { return nil, err } prs, _, err := retry(ctx, func() ([]*github.PullRequest, *github.Response, error) { return g.ghClient.PullRequests.List(ctx, headOwner, r.name, &github.PullRequestListOptions{ Head: fmt.Sprintf("%s:%s", headOwner, branchName), State: "open", ListOptions: github.ListOptions{ PerPage: 1, }, }) }) if err != nil { return nil, fmt.Errorf("failed to get open pull requests: %w", err) } if len(prs) == 0 { return nil, nil } return convertPullRequest(prs[0]), nil } // MergePullRequest merges a pull request func (g *Github) MergePullRequest(ctx context.Context, pullReq scm.PullRequest) error { pr := pullReq.(pullRequest) g.modLock() defer g.modUnlock() // We need to fetch the repo again since no AllowXMerge is present in listings of repositories repo, _, err := retry(ctx, func() (*github.Repository, *github.Response, error) { return g.ghClient.Repositories.Get(ctx, pr.ownerName, pr.repoName) }) if err != nil { return err } // Filter out all merge types to only the allowed ones, but keep the order of the ones left mergeTypes := scm.MergeTypeIntersection(g.MergeTypes, repoMergeTypes(repo)) if len(mergeTypes) == 0 { return errors.New("none of the configured merge types was permitted") } _, _, err = retry(ctx, func() (*github.PullRequestMergeResult, *github.Response, error) { return g.ghClient.PullRequests.Merge(ctx, pr.ownerName, pr.repoName, pr.number, "", &github.PullRequestOptions{ MergeMethod: mergeTypeGhName[mergeTypes[0]], }) }) if err != nil { return err } _, err = retryWithoutReturn(ctx, func() (*github.Response, error) { return g.ghClient.Git.DeleteRef(ctx, pr.prOwnerName, pr.prRepoName, fmt.Sprintf("heads/%s", pr.branchName)) }) // Ignore errors about the reference not existing since it may be the case that GitHub has already deleted the branch if err != nil && !strings.Contains(err.Error(), "Reference does not exist") { return err } return nil } // ClosePullRequest closes a pull request func (g *Github) ClosePullRequest(ctx context.Context, pullReq scm.PullRequest) error { pr := pullReq.(pullRequest) g.modLock() defer g.modUnlock() _, _, err := retry(ctx, func() (*github.PullRequest, *github.Response, error) { return g.ghClient.PullRequests.Edit(ctx, pr.ownerName, pr.repoName, pr.number, &github.PullRequest{ State: &[]string{"closed"}[0], }) }) if err != nil
{ return err }
conditional_block
github.go
([]*github.Repository, *github.Response, error) { return g.ghClient.Repositories.List(ctx, user, &github.RepositoryListOptions{ ListOptions: github.ListOptions{ Page: i, PerPage: 100, }, }) }) if err != nil { return nil, err } repos = append(repos, rr...) if len(rr) != 100 { break } i++ } return repos, nil } func (g *Github) getRepository(ctx context.Context, repoRef RepositoryReference) (*github.Repository, error) { repo, _, err := retry(ctx, func() (*github.Repository, *github.Response, error) { return g.ghClient.Repositories.Get(ctx, repoRef.OwnerName, repoRef.Name) }) if err != nil { return nil, err } return repo, nil } // CreatePullRequest creates a pull request func (g *Github) CreatePullRequest(ctx context.Context, repo scm.Repository, prRepo scm.Repository, newPR scm.NewPullRequest) (scm.PullRequest, error) { r := repo.(repository) prR := prRepo.(repository) g.modLock() defer g.modUnlock() pr, err := g.createPullRequest(ctx, r, prR, newPR) if err != nil { return nil, err } if err := g.addReviewers(ctx, r, newPR, pr); err != nil { return nil, err } if err := g.addAssignees(ctx, r, newPR, pr); err != nil { return nil, err } if err := g.addLabels(ctx, r, newPR, pr); err != nil { return nil, err } return convertPullRequest(pr), nil } func (g *Github) createPullRequest(ctx context.Context, repo repository, prRepo repository, newPR scm.NewPullRequest) (*github.PullRequest, error) { head := fmt.Sprintf("%s:%s", prRepo.ownerName, newPR.Head) pr, _, err := retry(ctx, func() (*github.PullRequest, *github.Response, error) { return g.ghClient.PullRequests.Create(ctx, repo.ownerName, repo.name, &github.NewPullRequest{ Title: &newPR.Title, Body: &newPR.Body, Head: &head, Base: &newPR.Base, Draft: &newPR.Draft, }) }) return pr, err } func (g *Github) addReviewers(ctx context.Context, repo repository, newPR scm.NewPullRequest, createdPR *github.PullRequest) error { if len(newPR.Reviewers) == 0 && len(newPR.TeamReviewers) == 0 { return nil } _, _, err := retry(ctx, func() (*github.PullRequest, *github.Response, error) { return g.ghClient.PullRequests.RequestReviewers(ctx, repo.ownerName, repo.name, createdPR.GetNumber(), github.ReviewersRequest{ Reviewers: newPR.Reviewers, TeamReviewers: newPR.TeamReviewers, }) }) return err } func (g *Github) addAssignees(ctx context.Context, repo repository, newPR scm.NewPullRequest, createdPR *github.PullRequest) error { if len(newPR.Assignees) == 0 { return nil } _, _, err := retry(ctx, func() (*github.Issue, *github.Response, error) { return g.ghClient.Issues.AddAssignees(ctx, repo.ownerName, repo.name, createdPR.GetNumber(), newPR.Assignees) }) return err } func (g *Github) addLabels(ctx context.Context, repo repository, newPR scm.NewPullRequest, createdPR *github.PullRequest) error { if len(newPR.Labels) == 0 { return nil } _, _, err := retry(ctx, func() ([]*github.Label, *github.Response, error) { return g.ghClient.Issues.AddLabelsToIssue(ctx, repo.ownerName, repo.name, createdPR.GetNumber(), newPR.Labels) }) return err } // GetPullRequests gets all pull requests of with a specific branch func (g *Github) GetPullRequests(ctx context.Context, branchName string) ([]scm.PullRequest, error) { repos, err := g.getRepositories(ctx) if err != nil { return nil, err } // github limits the amount of data which can be handled by the graphql api // data needs to be chunked into multiple requests batches := chunkSlice(repos, 50) var pullRequests []scm.PullRequest for _, repos := range batches { result, err := g.getPullRequests(ctx, branchName, repos) if err != nil { return pullRequests, fmt.Errorf("failed to get pull request batch: %w", err) } pullRequests = append(pullRequests, result...) } return pullRequests, nil } func (g *Github) getPullRequests(ctx context.Context, branchName string, repos []*github.Repository) ([]scm.PullRequest, error) { // The fragment is all the data needed from every repository const fragment = `fragment repoProperties on Repository { pullRequests(headRefName: $branchName, last: 1) { nodes { number headRefName closed url merged baseRepository { name owner { login } } headRepository { name owner { login } } commits(last: 1) { nodes { commit { statusCheckRollup { state } } } } } } }` // Prepare data for compiling the query. // Each repository will get its own variables ($ownerX, $repoX) and be returned // via and alias repoX repoParameters := make([]string, len(repos)) repoQueries := make([]string, len(repos)) queryVariables := map[string]interface{}{ "branchName": branchName, } for i, repo := range repos { repoParameters[i] = fmt.Sprintf("$owner%[1]d: String!, $repo%[1]d: String!", i) repoQueries[i] = fmt.Sprintf("repo%[1]d: repository(owner: $owner%[1]d, name: $repo%[1]d) { ...repoProperties }", i) queryVariables[fmt.Sprintf("owner%d", i)] = repo.GetOwner().GetLogin() queryVariables[fmt.Sprintf("repo%d", i)] = repo.GetName() } // Create the final query query := fmt.Sprintf(` %s query ($branchName: String!, %s) { %s }`, fragment, strings.Join(repoParameters, ", "), strings.Join(repoQueries, "\n"), ) result := map[string]graphqlRepo{} err := g.makeGraphQLRequest(ctx, query, queryVariables, &result) if err != nil { return nil, err } // Fetch the repo based on name instead of looping through the map since that will // guarantee the same ordering as the original repository list prs := []scm.PullRequest{} for i := range repos { repo, ok := result[fmt.Sprintf("repo%d", i)] if !ok { return nil, fmt.Errorf("could not find repo%d", i) } if len(repo.PullRequests.Nodes) != 1 { continue } pr := repo.PullRequests.Nodes[0] // The graphql API does not have a way at query time to filter out the owner of the head branch // of a PR. Therefore, we have to filter out any repo that does not match the head owner. headOwner, err := g.headOwner(ctx, pr.BaseRepository.Owner.Login) if err != nil { return nil, err } if pr.HeadRepository.Owner.Login != headOwner { continue } prs = append(prs, convertGraphQLPullRequest(pr)) } return prs, nil } func (g *Github) loggedInUser(ctx context.Context) (string, error) { g.userMutex.Lock() defer g.userMutex.Unlock() if g.user != "" { return g.user, nil } user, _, err := retry(ctx, func() (*github.User, *github.Response, error) { return g.ghClient.Users.Get(ctx, "") }) if err != nil { return "", err } g.user = user.GetLogin() return g.user, nil } // headOwner returns the owner of the repository from which any pullrequest will be made // This is normally the owner of the original repository, but if a fork has been made // it will be a different owner func (g *Github) headOwner(ctx context.Context, repoOwner string) (string, error) { if !g.Fork { return repoOwner, nil } if g.ForkOwner != "" { return g.ForkOwner, nil } return g.loggedInUser(ctx) } // GetOpenPullRequest gets a pull request for one specific repository func (g *Github)
GetOpenPullRequest
identifier_name
github.go
log.Debug("Skipping repository since it's a fork") continue } if g.checkPermissions { switch { case !permissions["pull"]: log.Debug("Skipping repository since the token does not have pull permissions") continue case !g.Fork && !g.ReadOnly && !permissions["push"]: log.Debug("Skipping repository since the token does not have push permissions and the run will not fork") continue } } newRepo, err := g.convertRepo(r)
repos = append(repos, newRepo) } return repos, nil } func (g *Github) getRepositories(ctx context.Context) ([]*github.Repository, error) { allRepos := []*github.Repository{} for _, org := range g.Organizations { repos, err := g.getOrganizationRepositories(ctx, org) if err != nil { return nil, errors.Wrapf(err, "could not get organization repositories for %s", org) } allRepos = append(allRepos, repos...) } for _, user := range g.Users { repos, err := g.getUserRepositories(ctx, user) if err != nil { return nil, errors.Wrapf(err, "could not get user repositories for %s", user) } allRepos = append(allRepos, repos...) } for _, repoRef := range g.Repositories { repo, err := g.getRepository(ctx, repoRef) if err != nil { return nil, errors.Wrapf(err, "could not get information about %s", repoRef.String()) } allRepos = append(allRepos, repo) } // Remove duplicate repos repoMap := map[string]*github.Repository{} for _, repo := range allRepos { repoMap[repo.GetFullName()] = repo } allRepos = make([]*github.Repository, 0, len(repoMap)) for _, repo := range repoMap { if repo.GetArchived() || repo.GetDisabled() { continue } allRepos = append(allRepos, repo) } sort.Slice(allRepos, func(i, j int) bool { return allRepos[i].GetCreatedAt().Before(allRepos[j].GetCreatedAt().Time) }) return allRepos, nil } func (g *Github) getOrganizationRepositories(ctx context.Context, orgName string) ([]*github.Repository, error) { var repos []*github.Repository i := 1 for { rr, _, err := retry(ctx, func() ([]*github.Repository, *github.Response, error) { return g.ghClient.Repositories.ListByOrg(ctx, orgName, &github.RepositoryListByOrgOptions{ ListOptions: github.ListOptions{ Page: i, PerPage: 100, }, }) }) if err != nil { return nil, err } repos = append(repos, rr...) if len(rr) != 100 { break } i++ } return repos, nil } func (g *Github) getUserRepositories(ctx context.Context, user string) ([]*github.Repository, error) { var repos []*github.Repository i := 1 for { rr, _, err := retry(ctx, func() ([]*github.Repository, *github.Response, error) { return g.ghClient.Repositories.List(ctx, user, &github.RepositoryListOptions{ ListOptions: github.ListOptions{ Page: i, PerPage: 100, }, }) }) if err != nil { return nil, err } repos = append(repos, rr...) if len(rr) != 100 { break } i++ } return repos, nil } func (g *Github) getRepository(ctx context.Context, repoRef RepositoryReference) (*github.Repository, error) { repo, _, err := retry(ctx, func() (*github.Repository, *github.Response, error) { return g.ghClient.Repositories.Get(ctx, repoRef.OwnerName, repoRef.Name) }) if err != nil { return nil, err } return repo, nil } // CreatePullRequest creates a pull request func (g *Github) CreatePullRequest(ctx context.Context, repo scm.Repository, prRepo scm.Repository, newPR scm.NewPullRequest) (scm.PullRequest, error) { r := repo.(repository) prR := prRepo.(repository) g.modLock() defer g.modUnlock() pr, err := g.createPullRequest(ctx, r, prR, newPR) if err != nil { return nil, err } if err := g.addReviewers(ctx, r, newPR, pr); err != nil { return nil, err } if err := g.addAssignees(ctx, r, newPR, pr); err != nil { return nil, err } if err := g.addLabels(ctx, r, newPR, pr); err != nil { return nil, err } return convertPullRequest(pr), nil } func (g *Github) createPullRequest(ctx context.Context, repo repository, prRepo repository, newPR scm.NewPullRequest) (*github.PullRequest, error) { head := fmt.Sprintf("%s:%s", prRepo.ownerName, newPR.Head) pr, _, err := retry(ctx, func() (*github.PullRequest, *github.Response, error) { return g.ghClient.PullRequests.Create(ctx, repo.ownerName, repo.name, &github.NewPullRequest{ Title: &newPR.Title, Body: &newPR.Body, Head: &head, Base: &newPR.Base, Draft: &newPR.Draft, }) }) return pr, err } func (g *Github) addReviewers(ctx context.Context, repo repository, newPR scm.NewPullRequest, createdPR *github.PullRequest) error { if len(newPR.Reviewers) == 0 && len(newPR.TeamReviewers) == 0 { return nil } _, _, err := retry(ctx, func() (*github.PullRequest, *github.Response, error) { return g.ghClient.PullRequests.RequestReviewers(ctx, repo.ownerName, repo.name, createdPR.GetNumber(), github.ReviewersRequest{ Reviewers: newPR.Reviewers, TeamReviewers: newPR.TeamReviewers, }) }) return err } func (g *Github) addAssignees(ctx context.Context, repo repository, newPR scm.NewPullRequest, createdPR *github.PullRequest) error { if len(newPR.Assignees) == 0 { return nil } _, _, err := retry(ctx, func() (*github.Issue, *github.Response, error) { return g.ghClient.Issues.AddAssignees(ctx, repo.ownerName, repo.name, createdPR.GetNumber(), newPR.Assignees) }) return err } func (g *Github) addLabels(ctx context.Context, repo repository, newPR scm.NewPullRequest, createdPR *github.PullRequest) error { if len(newPR.Labels) == 0 { return nil } _, _, err := retry(ctx, func() ([]*github.Label, *github.Response, error) { return g.ghClient.Issues.AddLabelsToIssue(ctx, repo.ownerName, repo.name, createdPR.GetNumber(), newPR.Labels) }) return err } // GetPullRequests gets all pull requests of with a specific branch func (g *Github) GetPullRequests(ctx context.Context, branchName string) ([]scm.PullRequest, error) { repos, err := g.getRepositories(ctx) if err != nil { return nil, err } // github limits the amount of data which can be handled by the graphql api // data needs to be chunked into multiple requests batches := chunkSlice(repos, 50) var pullRequests []scm.PullRequest for _, repos := range batches { result, err := g.getPullRequests(ctx, branchName, repos) if err != nil { return pullRequests, fmt.Errorf("failed to get pull request batch: %w", err) } pullRequests = append(pullRequests, result...) } return pullRequests, nil } func (g *Github) getPullRequests(ctx context.Context, branchName string, repos []*github.Repository) ([]scm.PullRequest, error) { // The fragment is all the data needed from every repository const fragment = `fragment repoProperties on Repository { pullRequests(headRefName: $branchName, last: 1) { nodes { number headRefName closed url merged baseRepository { name owner { login } } headRepository { name owner { login } } commits(last: 1) { nodes { commit { statusCheckRollup { state } } } } } } }` // Prepare data for compiling the query. // Each repository will get its own variables ($ownerX, $repoX) and be returned // via and alias repoX
if err != nil { return nil, err }
random_line_split
github.go
*github.Response, error) { return g.ghClient.Issues.AddAssignees(ctx, repo.ownerName, repo.name, createdPR.GetNumber(), newPR.Assignees) }) return err } func (g *Github) addLabels(ctx context.Context, repo repository, newPR scm.NewPullRequest, createdPR *github.PullRequest) error { if len(newPR.Labels) == 0 { return nil } _, _, err := retry(ctx, func() ([]*github.Label, *github.Response, error) { return g.ghClient.Issues.AddLabelsToIssue(ctx, repo.ownerName, repo.name, createdPR.GetNumber(), newPR.Labels) }) return err } // GetPullRequests gets all pull requests of with a specific branch func (g *Github) GetPullRequests(ctx context.Context, branchName string) ([]scm.PullRequest, error) { repos, err := g.getRepositories(ctx) if err != nil { return nil, err } // github limits the amount of data which can be handled by the graphql api // data needs to be chunked into multiple requests batches := chunkSlice(repos, 50) var pullRequests []scm.PullRequest for _, repos := range batches { result, err := g.getPullRequests(ctx, branchName, repos) if err != nil { return pullRequests, fmt.Errorf("failed to get pull request batch: %w", err) } pullRequests = append(pullRequests, result...) } return pullRequests, nil } func (g *Github) getPullRequests(ctx context.Context, branchName string, repos []*github.Repository) ([]scm.PullRequest, error) { // The fragment is all the data needed from every repository const fragment = `fragment repoProperties on Repository { pullRequests(headRefName: $branchName, last: 1) { nodes { number headRefName closed url merged baseRepository { name owner { login } } headRepository { name owner { login } } commits(last: 1) { nodes { commit { statusCheckRollup { state } } } } } } }` // Prepare data for compiling the query. // Each repository will get its own variables ($ownerX, $repoX) and be returned // via and alias repoX repoParameters := make([]string, len(repos)) repoQueries := make([]string, len(repos)) queryVariables := map[string]interface{}{ "branchName": branchName, } for i, repo := range repos { repoParameters[i] = fmt.Sprintf("$owner%[1]d: String!, $repo%[1]d: String!", i) repoQueries[i] = fmt.Sprintf("repo%[1]d: repository(owner: $owner%[1]d, name: $repo%[1]d) { ...repoProperties }", i) queryVariables[fmt.Sprintf("owner%d", i)] = repo.GetOwner().GetLogin() queryVariables[fmt.Sprintf("repo%d", i)] = repo.GetName() } // Create the final query query := fmt.Sprintf(` %s query ($branchName: String!, %s) { %s }`, fragment, strings.Join(repoParameters, ", "), strings.Join(repoQueries, "\n"), ) result := map[string]graphqlRepo{} err := g.makeGraphQLRequest(ctx, query, queryVariables, &result) if err != nil { return nil, err } // Fetch the repo based on name instead of looping through the map since that will // guarantee the same ordering as the original repository list prs := []scm.PullRequest{} for i := range repos { repo, ok := result[fmt.Sprintf("repo%d", i)] if !ok { return nil, fmt.Errorf("could not find repo%d", i) } if len(repo.PullRequests.Nodes) != 1 { continue } pr := repo.PullRequests.Nodes[0] // The graphql API does not have a way at query time to filter out the owner of the head branch // of a PR. Therefore, we have to filter out any repo that does not match the head owner. headOwner, err := g.headOwner(ctx, pr.BaseRepository.Owner.Login) if err != nil { return nil, err } if pr.HeadRepository.Owner.Login != headOwner { continue } prs = append(prs, convertGraphQLPullRequest(pr)) } return prs, nil } func (g *Github) loggedInUser(ctx context.Context) (string, error) { g.userMutex.Lock() defer g.userMutex.Unlock() if g.user != "" { return g.user, nil } user, _, err := retry(ctx, func() (*github.User, *github.Response, error) { return g.ghClient.Users.Get(ctx, "") }) if err != nil { return "", err } g.user = user.GetLogin() return g.user, nil } // headOwner returns the owner of the repository from which any pullrequest will be made // This is normally the owner of the original repository, but if a fork has been made // it will be a different owner func (g *Github) headOwner(ctx context.Context, repoOwner string) (string, error) { if !g.Fork { return repoOwner, nil } if g.ForkOwner != "" { return g.ForkOwner, nil } return g.loggedInUser(ctx) } // GetOpenPullRequest gets a pull request for one specific repository func (g *Github) GetOpenPullRequest(ctx context.Context, repo scm.Repository, branchName string) (scm.PullRequest, error) { r := repo.(repository) headOwner, err := g.headOwner(ctx, r.ownerName) if err != nil { return nil, err } prs, _, err := retry(ctx, func() ([]*github.PullRequest, *github.Response, error) { return g.ghClient.PullRequests.List(ctx, headOwner, r.name, &github.PullRequestListOptions{ Head: fmt.Sprintf("%s:%s", headOwner, branchName), State: "open", ListOptions: github.ListOptions{ PerPage: 1, }, }) }) if err != nil { return nil, fmt.Errorf("failed to get open pull requests: %w", err) } if len(prs) == 0 { return nil, nil } return convertPullRequest(prs[0]), nil } // MergePullRequest merges a pull request func (g *Github) MergePullRequest(ctx context.Context, pullReq scm.PullRequest) error { pr := pullReq.(pullRequest) g.modLock() defer g.modUnlock() // We need to fetch the repo again since no AllowXMerge is present in listings of repositories repo, _, err := retry(ctx, func() (*github.Repository, *github.Response, error) { return g.ghClient.Repositories.Get(ctx, pr.ownerName, pr.repoName) }) if err != nil { return err } // Filter out all merge types to only the allowed ones, but keep the order of the ones left mergeTypes := scm.MergeTypeIntersection(g.MergeTypes, repoMergeTypes(repo)) if len(mergeTypes) == 0 { return errors.New("none of the configured merge types was permitted") } _, _, err = retry(ctx, func() (*github.PullRequestMergeResult, *github.Response, error) { return g.ghClient.PullRequests.Merge(ctx, pr.ownerName, pr.repoName, pr.number, "", &github.PullRequestOptions{ MergeMethod: mergeTypeGhName[mergeTypes[0]], }) }) if err != nil { return err } _, err = retryWithoutReturn(ctx, func() (*github.Response, error) { return g.ghClient.Git.DeleteRef(ctx, pr.prOwnerName, pr.prRepoName, fmt.Sprintf("heads/%s", pr.branchName)) }) // Ignore errors about the reference not existing since it may be the case that GitHub has already deleted the branch if err != nil && !strings.Contains(err.Error(), "Reference does not exist") { return err } return nil } // ClosePullRequest closes a pull request func (g *Github) ClosePullRequest(ctx context.Context, pullReq scm.PullRequest) error
{ pr := pullReq.(pullRequest) g.modLock() defer g.modUnlock() _, _, err := retry(ctx, func() (*github.PullRequest, *github.Response, error) { return g.ghClient.PullRequests.Edit(ctx, pr.ownerName, pr.repoName, pr.number, &github.PullRequest{ State: &[]string{"closed"}[0], }) }) if err != nil { return err } _, err = retryWithoutReturn(ctx, func() (*github.Response, error) { return g.ghClient.Git.DeleteRef(ctx, pr.prOwnerName, pr.prRepoName, fmt.Sprintf("heads/%s", pr.branchName)) }) return err }
identifier_body
server.rs
::error::ErrorExt; use mz_ore::halt; use mz_ore::metrics::MetricsRegistry; use mz_ore::tracing::TracingHandle; use mz_persist_client::cache::PersistClientCache; use mz_service::client::{GenericClient, Partitionable, Partitioned}; use mz_service::local::LocalClient; use timely::communication::initialize::WorkerGuards; use timely::execute::execute_from; use timely::WorkerConfig; use tokio::runtime::Handle; use tokio::sync::mpsc; use tracing::{info, warn}; use crate::communication::initialize_networking; type PartitionedClient<C, R, A> = Partitioned<LocalClient<C, R, A>, C, R>; /// Configures a cluster server. #[derive(Debug)] pub struct ClusterConfig { /// Metrics registry through which dataflow metrics will be reported. pub metrics_registry: MetricsRegistry, /// `persist` client cache. pub persist_clients: Arc<PersistClientCache>, /// A process-global handle to tracing configuration. pub tracing_handle: Arc<TracingHandle>, } /// A client managing access to the local portion of a Timely cluster pub struct ClusterClient<Client, Worker, C, R> where Worker: crate::types::AsRunnableWorker<C, R>, { /// The actual client to talk to the cluster inner: Option<Client>, /// The running timely instance timely_container: TimelyContainerRef<C, R, Worker::Activatable>, /// Handle to the persist infrastructure. persist_clients: Arc<PersistClientCache>, /// The handle to the Tokio runtime. tokio_handle: tokio::runtime::Handle, /// A process-global handle to tracing configuration. tracing_handle: Arc<TracingHandle>, worker: Worker, } /// Metadata about timely workers in this process. pub struct TimelyContainer<C, R, A> { /// The current timely config in use config: TimelyConfig, /// Channels over which to send endpoints for wiring up a new Client client_txs: Vec< crossbeam_channel::Sender<( crossbeam_channel::Receiver<C>, mpsc::UnboundedSender<R>, crossbeam_channel::Sender<A>, )>, >, /// Thread guards that keep worker threads alive _worker_guards: WorkerGuards<()>, } /// Threadsafe reference to an optional TimelyContainer pub type TimelyContainerRef<C, R, A> = Arc<tokio::sync::Mutex<Option<TimelyContainer<C, R, A>>>>; /// Initiates a timely dataflow computation, processing cluster commands. pub fn serve<Worker, C, R>( config: ClusterConfig, worker_config: Worker, ) -> Result< ( TimelyContainerRef<C, R, Worker::Activatable>, impl Fn() -> Box<ClusterClient<PartitionedClient<C, R, Worker::Activatable>, Worker, C, R>>, ), Error, > where C: Send + 'static, R: Send + 'static, (C, R): Partitionable<C, R>, Worker: crate::types::AsRunnableWorker<C, R> + Clone + Send + Sync + 'static, { let tokio_executor = tokio::runtime::Handle::current(); let timely_container = Arc::new(tokio::sync::Mutex::new(None)); let client_builder = { let timely_container = Arc::clone(&timely_container); move || { let worker_config = worker_config.clone(); let client = ClusterClient::new( Arc::clone(&timely_container), Arc::clone(&config.persist_clients), tokio_executor.clone(), Arc::clone(&config.tracing_handle), worker_config, ); let client = Box::new(client); client } }; Ok((timely_container, client_builder)) } impl<Worker, C, R> ClusterClient<PartitionedClient<C, R, Worker::Activatable>, Worker, C, R> where C: Send + 'static, R: Send + 'static, (C, R): Partitionable<C, R>, Worker: crate::types::AsRunnableWorker<C, R> + Clone + Send + Sync + 'static, { fn new( timely_container: TimelyContainerRef<C, R, Worker::Activatable>, persist_clients: Arc<PersistClientCache>, tokio_handle: tokio::runtime::Handle, tracing_handle: Arc<TracingHandle>, worker_config: Worker, ) -> Self { Self { timely_container, inner: None, persist_clients, tokio_handle, tracing_handle,
async fn build_timely( user_worker_config: Worker, config: TimelyConfig, epoch: ClusterStartupEpoch, persist_clients: Arc<PersistClientCache>, tracing_handle: Arc<TracingHandle>, tokio_executor: Handle, ) -> Result<TimelyContainer<C, R, Worker::Activatable>, Error> { info!("Building timely container with config {config:?}"); let (client_txs, client_rxs): (Vec<_>, Vec<_>) = (0..config.workers) .map(|_| crossbeam_channel::unbounded()) .unzip(); let client_rxs: Mutex<Vec<_>> = Mutex::new(client_rxs.into_iter().map(Some).collect()); let (builders, other) = initialize_networking( config.workers, config.process, config.addresses.clone(), epoch, ) .await?; let mut worker_config = WorkerConfig::default(); differential_dataflow::configure( &mut worker_config, &differential_dataflow::Config { idle_merge_effort: Some(isize::cast_from(config.idle_arrangement_merge_effort)), }, ); let worker_guards = execute_from(builders, other, worker_config, move |timely_worker| { let timely_worker_index = timely_worker.index(); let _tokio_guard = tokio_executor.enter(); let client_rx = client_rxs.lock().unwrap()[timely_worker_index % config.workers] .take() .unwrap(); let persist_clients = Arc::clone(&persist_clients); let user_worker_config = user_worker_config.clone(); let tracing_handle = Arc::clone(&tracing_handle); Worker::build_and_run( user_worker_config, timely_worker, client_rx, persist_clients, tracing_handle, ) }) .map_err(|e| anyhow!("{e}"))?; Ok(TimelyContainer { config, client_txs, _worker_guards: worker_guards, }) } async fn build( &mut self, config: TimelyConfig, epoch: ClusterStartupEpoch, ) -> Result<(), Error> { let workers = config.workers; // Check if we can reuse the existing timely instance. // We currently do not support reinstantiating timely, we simply panic if another config is // requested. This code must panic before dropping the worker guards contained in // timely_container. As we don't terminate timely workers, the thread join would hang // forever, possibly creating a fair share of confusion in the orchestrator. let persist_clients = Arc::clone(&self.persist_clients); let handle = self.tokio_handle.clone(); let tracing_handle = Arc::clone(&self.tracing_handle); let worker_config = self.worker.clone(); let mut timely_lock = self.timely_container.lock().await; let timely = match timely_lock.take() { Some(existing) => { if config != existing.config { halt!( "new timely configuration does not match existing timely configuration:\n{:?}\nvs\n{:?}", config, existing.config, ); } info!("Timely already initialized; re-using.",); existing } None => { let build_timely_result = Self::build_timely( worker_config, config, epoch, persist_clients, tracing_handle, handle, ) .await; match build_timely_result { Err(e) => { warn!("timely initialization failed: {}", e.display_with_causes()); return Err(e); } Ok(ok) => ok, } } }; let (command_txs, command_rxs): (Vec<_>, Vec<_>) = (0..workers).map(|_| crossbeam_channel::unbounded()).unzip(); let (response_txs, response_rxs): (Vec<_>, Vec<_>) = (0..workers).map(|_| mpsc::unbounded_channel()).unzip(); let activators = timely .client_txs .iter() .zip(command_rxs) .zip(response_txs) .map(|((client_tx, cmd_rx), resp_tx)| { let (activator_tx, activator_rx) = crossbeam_channel::unbounded(); client_tx .send((cmd_rx, resp_tx, activator_tx)) .expect("worker should not drop first"); activator_rx.recv().unwrap() }) .collect(); *timely_lock = Some(timely); self.inner = Some(LocalClient::new_partitioned( response_rxs, command_txs, activators, )); Ok(()) } } impl<Client: Debug, Worker: crate::types::AsRunnableWorker<C, R>,
worker: worker_config, } }
random_line_split
server.rs
::error::ErrorExt; use mz_ore::halt; use mz_ore::metrics::MetricsRegistry; use mz_ore::tracing::TracingHandle; use mz_persist_client::cache::PersistClientCache; use mz_service::client::{GenericClient, Partitionable, Partitioned}; use mz_service::local::LocalClient; use timely::communication::initialize::WorkerGuards; use timely::execute::execute_from; use timely::WorkerConfig; use tokio::runtime::Handle; use tokio::sync::mpsc; use tracing::{info, warn}; use crate::communication::initialize_networking; type PartitionedClient<C, R, A> = Partitioned<LocalClient<C, R, A>, C, R>; /// Configures a cluster server. #[derive(Debug)] pub struct ClusterConfig { /// Metrics registry through which dataflow metrics will be reported. pub metrics_registry: MetricsRegistry, /// `persist` client cache. pub persist_clients: Arc<PersistClientCache>, /// A process-global handle to tracing configuration. pub tracing_handle: Arc<TracingHandle>, } /// A client managing access to the local portion of a Timely cluster pub struct ClusterClient<Client, Worker, C, R> where Worker: crate::types::AsRunnableWorker<C, R>, { /// The actual client to talk to the cluster inner: Option<Client>, /// The running timely instance timely_container: TimelyContainerRef<C, R, Worker::Activatable>, /// Handle to the persist infrastructure. persist_clients: Arc<PersistClientCache>, /// The handle to the Tokio runtime. tokio_handle: tokio::runtime::Handle, /// A process-global handle to tracing configuration. tracing_handle: Arc<TracingHandle>, worker: Worker, } /// Metadata about timely workers in this process. pub struct TimelyContainer<C, R, A> { /// The current timely config in use config: TimelyConfig, /// Channels over which to send endpoints for wiring up a new Client client_txs: Vec< crossbeam_channel::Sender<( crossbeam_channel::Receiver<C>, mpsc::UnboundedSender<R>, crossbeam_channel::Sender<A>, )>, >, /// Thread guards that keep worker threads alive _worker_guards: WorkerGuards<()>, } /// Threadsafe reference to an optional TimelyContainer pub type TimelyContainerRef<C, R, A> = Arc<tokio::sync::Mutex<Option<TimelyContainer<C, R, A>>>>; /// Initiates a timely dataflow computation, processing cluster commands. pub fn
<Worker, C, R>( config: ClusterConfig, worker_config: Worker, ) -> Result< ( TimelyContainerRef<C, R, Worker::Activatable>, impl Fn() -> Box<ClusterClient<PartitionedClient<C, R, Worker::Activatable>, Worker, C, R>>, ), Error, > where C: Send + 'static, R: Send + 'static, (C, R): Partitionable<C, R>, Worker: crate::types::AsRunnableWorker<C, R> + Clone + Send + Sync + 'static, { let tokio_executor = tokio::runtime::Handle::current(); let timely_container = Arc::new(tokio::sync::Mutex::new(None)); let client_builder = { let timely_container = Arc::clone(&timely_container); move || { let worker_config = worker_config.clone(); let client = ClusterClient::new( Arc::clone(&timely_container), Arc::clone(&config.persist_clients), tokio_executor.clone(), Arc::clone(&config.tracing_handle), worker_config, ); let client = Box::new(client); client } }; Ok((timely_container, client_builder)) } impl<Worker, C, R> ClusterClient<PartitionedClient<C, R, Worker::Activatable>, Worker, C, R> where C: Send + 'static, R: Send + 'static, (C, R): Partitionable<C, R>, Worker: crate::types::AsRunnableWorker<C, R> + Clone + Send + Sync + 'static, { fn new( timely_container: TimelyContainerRef<C, R, Worker::Activatable>, persist_clients: Arc<PersistClientCache>, tokio_handle: tokio::runtime::Handle, tracing_handle: Arc<TracingHandle>, worker_config: Worker, ) -> Self { Self { timely_container, inner: None, persist_clients, tokio_handle, tracing_handle, worker: worker_config, } } async fn build_timely( user_worker_config: Worker, config: TimelyConfig, epoch: ClusterStartupEpoch, persist_clients: Arc<PersistClientCache>, tracing_handle: Arc<TracingHandle>, tokio_executor: Handle, ) -> Result<TimelyContainer<C, R, Worker::Activatable>, Error> { info!("Building timely container with config {config:?}"); let (client_txs, client_rxs): (Vec<_>, Vec<_>) = (0..config.workers) .map(|_| crossbeam_channel::unbounded()) .unzip(); let client_rxs: Mutex<Vec<_>> = Mutex::new(client_rxs.into_iter().map(Some).collect()); let (builders, other) = initialize_networking( config.workers, config.process, config.addresses.clone(), epoch, ) .await?; let mut worker_config = WorkerConfig::default(); differential_dataflow::configure( &mut worker_config, &differential_dataflow::Config { idle_merge_effort: Some(isize::cast_from(config.idle_arrangement_merge_effort)), }, ); let worker_guards = execute_from(builders, other, worker_config, move |timely_worker| { let timely_worker_index = timely_worker.index(); let _tokio_guard = tokio_executor.enter(); let client_rx = client_rxs.lock().unwrap()[timely_worker_index % config.workers] .take() .unwrap(); let persist_clients = Arc::clone(&persist_clients); let user_worker_config = user_worker_config.clone(); let tracing_handle = Arc::clone(&tracing_handle); Worker::build_and_run( user_worker_config, timely_worker, client_rx, persist_clients, tracing_handle, ) }) .map_err(|e| anyhow!("{e}"))?; Ok(TimelyContainer { config, client_txs, _worker_guards: worker_guards, }) } async fn build( &mut self, config: TimelyConfig, epoch: ClusterStartupEpoch, ) -> Result<(), Error> { let workers = config.workers; // Check if we can reuse the existing timely instance. // We currently do not support reinstantiating timely, we simply panic if another config is // requested. This code must panic before dropping the worker guards contained in // timely_container. As we don't terminate timely workers, the thread join would hang // forever, possibly creating a fair share of confusion in the orchestrator. let persist_clients = Arc::clone(&self.persist_clients); let handle = self.tokio_handle.clone(); let tracing_handle = Arc::clone(&self.tracing_handle); let worker_config = self.worker.clone(); let mut timely_lock = self.timely_container.lock().await; let timely = match timely_lock.take() { Some(existing) => { if config != existing.config { halt!( "new timely configuration does not match existing timely configuration:\n{:?}\nvs\n{:?}", config, existing.config, ); } info!("Timely already initialized; re-using.",); existing } None => { let build_timely_result = Self::build_timely( worker_config, config, epoch, persist_clients, tracing_handle, handle, ) .await; match build_timely_result { Err(e) => { warn!("timely initialization failed: {}", e.display_with_causes()); return Err(e); } Ok(ok) => ok, } } }; let (command_txs, command_rxs): (Vec<_>, Vec<_>) = (0..workers).map(|_| crossbeam_channel::unbounded()).unzip(); let (response_txs, response_rxs): (Vec<_>, Vec<_>) = (0..workers).map(|_| mpsc::unbounded_channel()).unzip(); let activators = timely .client_txs .iter() .zip(command_rxs) .zip(response_txs) .map(|((client_tx, cmd_rx), resp_tx)| { let (activator_tx, activator_rx) = crossbeam_channel::unbounded(); client_tx .send((cmd_rx, resp_tx, activator_tx)) .expect("worker should not drop first"); activator_rx.recv().unwrap() }) .collect(); *timely_lock = Some(timely); self.inner = Some(LocalClient::new_partitioned( response_rxs, command_txs, activators, )); Ok(()) } } impl<Client: Debug, Worker: crate::types::AsRunnableWorker<C, R
serve
identifier_name
model.ts
this.cid = _.uniqueId(this.cidPrefix); if (options.parse) attrs = this.parse(attrs, options) || <A> {}; let defaults = _.result(this, 'defaults'); attrs = _.defaults(_.extend({}, defaults, attrs), defaults); this.set(attrs, options); } // Attributes protected attributes: A = <A> {}; protected defaults: A; protected _previousAttributes: A; protected changed: A = <A> {}; public $attributes = new Proxy(this.attributes, { get: (target, property, receiver) => this.get(property), set: (target, property, value, receiver) => this.set(property, value) }) url() : string { let base = super.url(); if (this.isNew()) return base; let id = this.get(this.idAttribute); return base.replace(/[^\/]$/, '$&/') + encodeURIComponent(id); } get(attr: string) : any { return this.attributes[attr]; } // Get the HTML-escaped value of an attribute. escape(attr) { return _.escape(this.get(attr)); } // Returns `true` if the attribute contains a value that is not null // or undefined. has(attr) { return this.get(attr) != null; } // Special-cased proxy to underscore's `_.matches` method. matches(attrs) { return !!_.iteratee(attrs, this)(this.attributes); } set(key: any, val: any, options: any = {}) : Model<A> { if (key == null) return this; // Handle both `"key", value` and `{key: value}` -style arguments. var attrs; if (typeof key === 'object') { attrs = key; options = val; } else { (attrs = {})[key] = val; } // Run validation. if (!this._validate(attrs, options)) return this; // Extract attributes and options. var unset = options.unset; var silent = options.silent; var changes = []; var changing = this._changing; this._changing = true; if (!changing) { this._previousAttributes = _.clone(this.attributes); this.changed = <A> {}; } var current = this.attributes; var changed = this.changed; var prev = this._previousAttributes; // For each `set` attribute, update or delete the current value. for (var attr in attrs) { val = attrs[attr]; if (!_.isEqual(current[attr], val)) changes.push(attr); if (!_.isEqual(prev[attr], val)) { changed[attr] = val; } else { delete changed[attr]; } unset ? delete current[attr] : current[attr] = val; } // Update the `id`. if (this.idAttribute in attrs) this.id = this.get(this.idAttribute); // Trigger all relevant attribute changes. if (!silent) { if (changes.length) this._pending = options; for (var i = 0; i < changes.length; i++) { this.event$.emit(<IEvent> { topic: 'change:' + changes[i], emitter: this, payload: current[changes[i]], options: options}); } } // You might be wondering why there's a `while` loop here. Changes can // be recursively nested within `"change"` events. if (changing) return this; if (!silent) { while (this._pending) { options = this._pending; this._pending = false; this.event$.emit(<IEvent> { topic: 'change', emitter: this, options: options }); } } this._pending = false; this._changing = false; return this; } // Return a copy of the model's `attributes` object. toJSON (options?: any) { return _.clone(this.attributes); } // Remove an attribute from the model, firing `"change"`. `unset` is a noop // if the attribute doesn't exist. unset(attr, options) { return this.set(attr, void 0, _.extend({}, options, {unset: true})); } // Clear all attributes on the model, firing `"change"`. clear(options) { var attrs = {}; for (var key in this.attributes) attrs[key] = void 0; return this.set(attrs, _.extend({}, options, {unset: true})); } // Determine if the model has changed since the last `"change"` event. // If you specify an attribute name, determine if that attribute has changed. hasChanged(attr?) : boolean { if (attr == null) return !_.isEmpty(this.changed); return _.has(this.changed, attr); } // Return an object containing all the attributes that have changed, or // false if there are no changed attributes. Useful for determining what // parts of a view need to be updated and/or what attributes need to be // persisted to the server. Unset attributes will be set to undefined. // You can also pass an attributes object to diff against the model, // determining if there *would be* a change. changedAttributes(diff) { if (!diff) return this.hasChanged() ? _.clone(this.changed) : false; var old = this._changing ? this._previousAttributes : this.attributes; var changed = {}; for (var attr in diff) { var val = diff[attr]; if (_.isEqual(old[attr], val)) continue; changed[attr] = val; } return _.size(changed) ? changed : false; } // Get the previous value of an attribute, recorded at the time the last // `"change"` event was fired. previous(attr) { if (attr == null || !this._previousAttributes) return null; return this._previousAttributes[attr]; } // Get all of the attributes of the model at the time of the previous // `"change"` event. previousAttributes() { return _.clone(this._previousAttributes); } // Fetch the model from the server, merging the response with the model's // local attributes. Any changed attributes will trigger a "change" event. fetch(options: any = {}) : Observable<Model<A>> { options = _.extend({parse: true}, options); let obs$ = this.sync('read', options); obs$.subscribe( resp => { var serverAttrs = options.parse ? this.parse(resp, options) : resp; if (!this.set(serverAttrs, options)) return false; }, err => this.event$.emit(<IEvent> { topic: 'error', emitter: this, payload: err, options: options}) ); return obs$; } // Set a hash of model attributes, and sync the model to the server. // If the server returns an attributes hash that differs, the model's // state will be `set` again. save(key, val, options?) : Observable<Model<A>> { // Handle both `"key", value` and `{key: value}` -style arguments. let attrs; if (key == null || typeof key === 'object') { attrs = key; options = val; } else { (attrs = {})[key] = val; } options = _.extend({validate: true, parse: true}, options); let wait = options.wait; // If we're not waiting and attributes exist, save acts as // `set(attr).save(null, opts)` with validation. Otherwise, check if // the model will be valid when the attributes, if any, are set. if (attrs && !wait) { if (!this.set(attrs, options)) return Observable.empty<Model<A>>(); } else if (!this._validate(attrs, options)) { return Observable.empty<Model<A>>(); } let attributes = this.attributes; // Set temporary attributes if `{wait: true}` to properly find new ids. if (attrs && wait) this.attributes = _.extend({}, attributes, attrs); let method = this.isNew() ? 'create' : (options.patch ? 'patch' : 'update'); if (method === 'patch' && !options.attrs) options.attrs = attrs; let obs$ = this.sync(method, options); // After a successful server-side save, the client is (optionally) // updated with the server-side state. obs$.subscribe( resp => { // Ensure attributes are restored during synchronous saves. this.attributes = attributes; var serverAttrs = options.parse ? this.parse(resp, options) : resp; if (wait) serverAttrs = _.extend({}, attrs, serverAttrs); if (serverAttrs && !this.set(serverAttrs, options)) return false; }, err => this.event$.emit(<IEvent> { topic: 'error', emitter: this, payload: err, options: options}) ); // Restore attributes. this.attributes = attributes; return obs$; } // Destroy this model on the server if it was already persisted. // If `wait: true` is passed, waits for the
super(options); // For clearing status when destroy model on collection this.event$.filter(e => e.topic == 'destroy') .subscribe(e => this._resetStatus());
random_line_split
model.ts
return base.replace(/[^\/]$/, '$&/') + encodeURIComponent(id); } get(attr: string) : any { return this.attributes[attr]; } // Get the HTML-escaped value of an attribute. escape(attr) { return _.escape(this.get(attr)); } // Returns `true` if the attribute contains a value that is not null // or undefined. has(attr) { return this.get(attr) != null; } // Special-cased proxy to underscore's `_.matches` method. matches(attrs) { return !!_.iteratee(attrs, this)(this.attributes); } set(key: any, val: any, options: any = {}) : Model<A> { if (key == null) return this; // Handle both `"key", value` and `{key: value}` -style arguments. var attrs; if (typeof key === 'object') { attrs = key; options = val; } else { (attrs = {})[key] = val; } // Run validation. if (!this._validate(attrs, options)) return this; // Extract attributes and options. var unset = options.unset; var silent = options.silent; var changes = []; var changing = this._changing; this._changing = true; if (!changing) { this._previousAttributes = _.clone(this.attributes); this.changed = <A> {}; } var current = this.attributes; var changed = this.changed; var prev = this._previousAttributes; // For each `set` attribute, update or delete the current value. for (var attr in attrs) { val = attrs[attr]; if (!_.isEqual(current[attr], val)) changes.push(attr); if (!_.isEqual(prev[attr], val)) { changed[attr] = val; } else { delete changed[attr]; } unset ? delete current[attr] : current[attr] = val; } // Update the `id`. if (this.idAttribute in attrs) this.id = this.get(this.idAttribute); // Trigger all relevant attribute changes. if (!silent) { if (changes.length) this._pending = options; for (var i = 0; i < changes.length; i++) { this.event$.emit(<IEvent> { topic: 'change:' + changes[i], emitter: this, payload: current[changes[i]], options: options}); } } // You might be wondering why there's a `while` loop here. Changes can // be recursively nested within `"change"` events. if (changing) return this; if (!silent) { while (this._pending) { options = this._pending; this._pending = false; this.event$.emit(<IEvent> { topic: 'change', emitter: this, options: options }); } } this._pending = false; this._changing = false; return this; } // Return a copy of the model's `attributes` object. toJSON (options?: any) { return _.clone(this.attributes); } // Remove an attribute from the model, firing `"change"`. `unset` is a noop // if the attribute doesn't exist. unset(attr, options) { return this.set(attr, void 0, _.extend({}, options, {unset: true})); } // Clear all attributes on the model, firing `"change"`. clear(options) { var attrs = {}; for (var key in this.attributes) attrs[key] = void 0; return this.set(attrs, _.extend({}, options, {unset: true})); } // Determine if the model has changed since the last `"change"` event. // If you specify an attribute name, determine if that attribute has changed. hasChanged(attr?) : boolean { if (attr == null) return !_.isEmpty(this.changed); return _.has(this.changed, attr); } // Return an object containing all the attributes that have changed, or // false if there are no changed attributes. Useful for determining what // parts of a view need to be updated and/or what attributes need to be // persisted to the server. Unset attributes will be set to undefined. // You can also pass an attributes object to diff against the model, // determining if there *would be* a change. changedAttributes(diff) { if (!diff) return this.hasChanged() ? _.clone(this.changed) : false; var old = this._changing ? this._previousAttributes : this.attributes; var changed = {}; for (var attr in diff) { var val = diff[attr]; if (_.isEqual(old[attr], val)) continue; changed[attr] = val; } return _.size(changed) ? changed : false; } // Get the previous value of an attribute, recorded at the time the last // `"change"` event was fired. previous(attr) { if (attr == null || !this._previousAttributes) return null; return this._previousAttributes[attr]; } // Get all of the attributes of the model at the time of the previous // `"change"` event. previousAttributes() { return _.clone(this._previousAttributes); } // Fetch the model from the server, merging the response with the model's // local attributes. Any changed attributes will trigger a "change" event. fetch(options: any = {}) : Observable<Model<A>> { options = _.extend({parse: true}, options); let obs$ = this.sync('read', options); obs$.subscribe( resp => { var serverAttrs = options.parse ? this.parse(resp, options) : resp; if (!this.set(serverAttrs, options)) return false; }, err => this.event$.emit(<IEvent> { topic: 'error', emitter: this, payload: err, options: options}) ); return obs$; } // Set a hash of model attributes, and sync the model to the server. // If the server returns an attributes hash that differs, the model's // state will be `set` again. save(key, val, options?) : Observable<Model<A>> { // Handle both `"key", value` and `{key: value}` -style arguments. let attrs; if (key == null || typeof key === 'object') { attrs = key; options = val; } else { (attrs = {})[key] = val; } options = _.extend({validate: true, parse: true}, options); let wait = options.wait; // If we're not waiting and attributes exist, save acts as // `set(attr).save(null, opts)` with validation. Otherwise, check if // the model will be valid when the attributes, if any, are set. if (attrs && !wait) { if (!this.set(attrs, options)) return Observable.empty<Model<A>>(); } else if (!this._validate(attrs, options)) { return Observable.empty<Model<A>>(); } let attributes = this.attributes; // Set temporary attributes if `{wait: true}` to properly find new ids. if (attrs && wait) this.attributes = _.extend({}, attributes, attrs); let method = this.isNew() ? 'create' : (options.patch ? 'patch' : 'update'); if (method === 'patch' && !options.attrs) options.attrs = attrs; let obs$ = this.sync(method, options); // After a successful server-side save, the client is (optionally) // updated with the server-side state. obs$.subscribe( resp => { // Ensure attributes are restored during synchronous saves. this.attributes = attributes; var serverAttrs = options.parse ? this.parse(resp, options) : resp; if (wait) serverAttrs = _.extend({}, attrs, serverAttrs); if (serverAttrs && !this.set(serverAttrs, options)) return false; }, err => this.event$.emit(<IEvent> { topic: 'error', emitter: this, payload: err, options: options}) ); // Restore attributes. this.attributes = attributes; return obs$; } // Destroy this model on the server if it was already persisted. // If `wait: true` is passed, waits for the server to respond before removal. destroy(options) : Observable<Model<A>> { options = options ? _.clone(options) : {}; var wait = options.wait; var destroy = () => { this.event$.emit(<IEvent>{ topic: 'destroy', emitter: this, payload: this.service, options: options}); } var obs$ = Observable.empty<Model<A>>(); if (!this.isNew()) obs$ = this.sync('delete', options); obs$.subscribe( resp => { if (wait) destroy(); }, err => this.event$.emit(<IEvent> { topic: 'error', emitter: this, payload: err, options: options}) ); if (!wait) destroy(); return obs$; } // **parse** converts a response into the hash of attributes to be `set` on // the model. The default implementation is just to pass the response along. parse(resp, options?) : A
{ return resp; }
identifier_body
model.ts
{}; let defaults = _.result(this, 'defaults'); attrs = _.defaults(_.extend({}, defaults, attrs), defaults); this.set(attrs, options); } // Attributes protected attributes: A = <A> {}; protected defaults: A; protected _previousAttributes: A; protected changed: A = <A> {}; public $attributes = new Proxy(this.attributes, { get: (target, property, receiver) => this.get(property), set: (target, property, value, receiver) => this.set(property, value) }) url() : string { let base = super.url(); if (this.isNew()) return base; let id = this.get(this.idAttribute); return base.replace(/[^\/]$/, '$&/') + encodeURIComponent(id); } get(attr: string) : any { return this.attributes[attr]; } // Get the HTML-escaped value of an attribute. escape(attr) { return _.escape(this.get(attr)); } // Returns `true` if the attribute contains a value that is not null // or undefined. has(attr) { return this.get(attr) != null; } // Special-cased proxy to underscore's `_.matches` method. matches(attrs) { return !!_.iteratee(attrs, this)(this.attributes); } set(key: any, val: any, options: any = {}) : Model<A> { if (key == null) return this; // Handle both `"key", value` and `{key: value}` -style arguments. var attrs; if (typeof key === 'object')
else { (attrs = {})[key] = val; } // Run validation. if (!this._validate(attrs, options)) return this; // Extract attributes and options. var unset = options.unset; var silent = options.silent; var changes = []; var changing = this._changing; this._changing = true; if (!changing) { this._previousAttributes = _.clone(this.attributes); this.changed = <A> {}; } var current = this.attributes; var changed = this.changed; var prev = this._previousAttributes; // For each `set` attribute, update or delete the current value. for (var attr in attrs) { val = attrs[attr]; if (!_.isEqual(current[attr], val)) changes.push(attr); if (!_.isEqual(prev[attr], val)) { changed[attr] = val; } else { delete changed[attr]; } unset ? delete current[attr] : current[attr] = val; } // Update the `id`. if (this.idAttribute in attrs) this.id = this.get(this.idAttribute); // Trigger all relevant attribute changes. if (!silent) { if (changes.length) this._pending = options; for (var i = 0; i < changes.length; i++) { this.event$.emit(<IEvent> { topic: 'change:' + changes[i], emitter: this, payload: current[changes[i]], options: options}); } } // You might be wondering why there's a `while` loop here. Changes can // be recursively nested within `"change"` events. if (changing) return this; if (!silent) { while (this._pending) { options = this._pending; this._pending = false; this.event$.emit(<IEvent> { topic: 'change', emitter: this, options: options }); } } this._pending = false; this._changing = false; return this; } // Return a copy of the model's `attributes` object. toJSON (options?: any) { return _.clone(this.attributes); } // Remove an attribute from the model, firing `"change"`. `unset` is a noop // if the attribute doesn't exist. unset(attr, options) { return this.set(attr, void 0, _.extend({}, options, {unset: true})); } // Clear all attributes on the model, firing `"change"`. clear(options) { var attrs = {}; for (var key in this.attributes) attrs[key] = void 0; return this.set(attrs, _.extend({}, options, {unset: true})); } // Determine if the model has changed since the last `"change"` event. // If you specify an attribute name, determine if that attribute has changed. hasChanged(attr?) : boolean { if (attr == null) return !_.isEmpty(this.changed); return _.has(this.changed, attr); } // Return an object containing all the attributes that have changed, or // false if there are no changed attributes. Useful for determining what // parts of a view need to be updated and/or what attributes need to be // persisted to the server. Unset attributes will be set to undefined. // You can also pass an attributes object to diff against the model, // determining if there *would be* a change. changedAttributes(diff) { if (!diff) return this.hasChanged() ? _.clone(this.changed) : false; var old = this._changing ? this._previousAttributes : this.attributes; var changed = {}; for (var attr in diff) { var val = diff[attr]; if (_.isEqual(old[attr], val)) continue; changed[attr] = val; } return _.size(changed) ? changed : false; } // Get the previous value of an attribute, recorded at the time the last // `"change"` event was fired. previous(attr) { if (attr == null || !this._previousAttributes) return null; return this._previousAttributes[attr]; } // Get all of the attributes of the model at the time of the previous // `"change"` event. previousAttributes() { return _.clone(this._previousAttributes); } // Fetch the model from the server, merging the response with the model's // local attributes. Any changed attributes will trigger a "change" event. fetch(options: any = {}) : Observable<Model<A>> { options = _.extend({parse: true}, options); let obs$ = this.sync('read', options); obs$.subscribe( resp => { var serverAttrs = options.parse ? this.parse(resp, options) : resp; if (!this.set(serverAttrs, options)) return false; }, err => this.event$.emit(<IEvent> { topic: 'error', emitter: this, payload: err, options: options}) ); return obs$; } // Set a hash of model attributes, and sync the model to the server. // If the server returns an attributes hash that differs, the model's // state will be `set` again. save(key, val, options?) : Observable<Model<A>> { // Handle both `"key", value` and `{key: value}` -style arguments. let attrs; if (key == null || typeof key === 'object') { attrs = key; options = val; } else { (attrs = {})[key] = val; } options = _.extend({validate: true, parse: true}, options); let wait = options.wait; // If we're not waiting and attributes exist, save acts as // `set(attr).save(null, opts)` with validation. Otherwise, check if // the model will be valid when the attributes, if any, are set. if (attrs && !wait) { if (!this.set(attrs, options)) return Observable.empty<Model<A>>(); } else if (!this._validate(attrs, options)) { return Observable.empty<Model<A>>(); } let attributes = this.attributes; // Set temporary attributes if `{wait: true}` to properly find new ids. if (attrs && wait) this.attributes = _.extend({}, attributes, attrs); let method = this.isNew() ? 'create' : (options.patch ? 'patch' : 'update'); if (method === 'patch' && !options.attrs) options.attrs = attrs; let obs$ = this.sync(method, options); // After a successful server-side save, the client is (optionally) // updated with the server-side state. obs$.subscribe( resp => { // Ensure attributes are restored during synchronous saves. this.attributes = attributes; var serverAttrs = options.parse ? this.parse(resp, options) : resp; if (wait) serverAttrs = _.extend({}, attrs, serverAttrs); if (serverAttrs && !this.set(serverAttrs, options)) return false; }, err => this.event$.emit(<IEvent> { topic: 'error', emitter: this, payload: err, options: options}) ); // Restore attributes. this.attributes = attributes; return obs$; } // Destroy this model on the server if it was already persisted. // If `wait: true` is passed, waits for the server to respond before removal. destroy(options) : Observable<Model<A>> { options = options ? _.clone(options) : {}; var wait = options.wait; var destroy = () => { this.event$.emit(<IEvent>{ topic: 'destroy', emitter: this, payload: this.service
{ attrs = key; options = val; }
conditional_block
model.ts
) != null; } // Special-cased proxy to underscore's `_.matches` method. matches(attrs) { return !!_.iteratee(attrs, this)(this.attributes); } set(key: any, val: any, options: any = {}) : Model<A> { if (key == null) return this; // Handle both `"key", value` and `{key: value}` -style arguments. var attrs; if (typeof key === 'object') { attrs = key; options = val; } else { (attrs = {})[key] = val; } // Run validation. if (!this._validate(attrs, options)) return this; // Extract attributes and options. var unset = options.unset; var silent = options.silent; var changes = []; var changing = this._changing; this._changing = true; if (!changing) { this._previousAttributes = _.clone(this.attributes); this.changed = <A> {}; } var current = this.attributes; var changed = this.changed; var prev = this._previousAttributes; // For each `set` attribute, update or delete the current value. for (var attr in attrs) { val = attrs[attr]; if (!_.isEqual(current[attr], val)) changes.push(attr); if (!_.isEqual(prev[attr], val)) { changed[attr] = val; } else { delete changed[attr]; } unset ? delete current[attr] : current[attr] = val; } // Update the `id`. if (this.idAttribute in attrs) this.id = this.get(this.idAttribute); // Trigger all relevant attribute changes. if (!silent) { if (changes.length) this._pending = options; for (var i = 0; i < changes.length; i++) { this.event$.emit(<IEvent> { topic: 'change:' + changes[i], emitter: this, payload: current[changes[i]], options: options}); } } // You might be wondering why there's a `while` loop here. Changes can // be recursively nested within `"change"` events. if (changing) return this; if (!silent) { while (this._pending) { options = this._pending; this._pending = false; this.event$.emit(<IEvent> { topic: 'change', emitter: this, options: options }); } } this._pending = false; this._changing = false; return this; } // Return a copy of the model's `attributes` object. toJSON (options?: any) { return _.clone(this.attributes); } // Remove an attribute from the model, firing `"change"`. `unset` is a noop // if the attribute doesn't exist. unset(attr, options) { return this.set(attr, void 0, _.extend({}, options, {unset: true})); } // Clear all attributes on the model, firing `"change"`. clear(options) { var attrs = {}; for (var key in this.attributes) attrs[key] = void 0; return this.set(attrs, _.extend({}, options, {unset: true})); } // Determine if the model has changed since the last `"change"` event. // If you specify an attribute name, determine if that attribute has changed. hasChanged(attr?) : boolean { if (attr == null) return !_.isEmpty(this.changed); return _.has(this.changed, attr); } // Return an object containing all the attributes that have changed, or // false if there are no changed attributes. Useful for determining what // parts of a view need to be updated and/or what attributes need to be // persisted to the server. Unset attributes will be set to undefined. // You can also pass an attributes object to diff against the model, // determining if there *would be* a change. changedAttributes(diff) { if (!diff) return this.hasChanged() ? _.clone(this.changed) : false; var old = this._changing ? this._previousAttributes : this.attributes; var changed = {}; for (var attr in diff) { var val = diff[attr]; if (_.isEqual(old[attr], val)) continue; changed[attr] = val; } return _.size(changed) ? changed : false; } // Get the previous value of an attribute, recorded at the time the last // `"change"` event was fired. previous(attr) { if (attr == null || !this._previousAttributes) return null; return this._previousAttributes[attr]; } // Get all of the attributes of the model at the time of the previous // `"change"` event. previousAttributes() { return _.clone(this._previousAttributes); } // Fetch the model from the server, merging the response with the model's // local attributes. Any changed attributes will trigger a "change" event. fetch(options: any = {}) : Observable<Model<A>> { options = _.extend({parse: true}, options); let obs$ = this.sync('read', options); obs$.subscribe( resp => { var serverAttrs = options.parse ? this.parse(resp, options) : resp; if (!this.set(serverAttrs, options)) return false; }, err => this.event$.emit(<IEvent> { topic: 'error', emitter: this, payload: err, options: options}) ); return obs$; } // Set a hash of model attributes, and sync the model to the server. // If the server returns an attributes hash that differs, the model's // state will be `set` again. save(key, val, options?) : Observable<Model<A>> { // Handle both `"key", value` and `{key: value}` -style arguments. let attrs; if (key == null || typeof key === 'object') { attrs = key; options = val; } else { (attrs = {})[key] = val; } options = _.extend({validate: true, parse: true}, options); let wait = options.wait; // If we're not waiting and attributes exist, save acts as // `set(attr).save(null, opts)` with validation. Otherwise, check if // the model will be valid when the attributes, if any, are set. if (attrs && !wait) { if (!this.set(attrs, options)) return Observable.empty<Model<A>>(); } else if (!this._validate(attrs, options)) { return Observable.empty<Model<A>>(); } let attributes = this.attributes; // Set temporary attributes if `{wait: true}` to properly find new ids. if (attrs && wait) this.attributes = _.extend({}, attributes, attrs); let method = this.isNew() ? 'create' : (options.patch ? 'patch' : 'update'); if (method === 'patch' && !options.attrs) options.attrs = attrs; let obs$ = this.sync(method, options); // After a successful server-side save, the client is (optionally) // updated with the server-side state. obs$.subscribe( resp => { // Ensure attributes are restored during synchronous saves. this.attributes = attributes; var serverAttrs = options.parse ? this.parse(resp, options) : resp; if (wait) serverAttrs = _.extend({}, attrs, serverAttrs); if (serverAttrs && !this.set(serverAttrs, options)) return false; }, err => this.event$.emit(<IEvent> { topic: 'error', emitter: this, payload: err, options: options}) ); // Restore attributes. this.attributes = attributes; return obs$; } // Destroy this model on the server if it was already persisted. // If `wait: true` is passed, waits for the server to respond before removal. destroy(options) : Observable<Model<A>> { options = options ? _.clone(options) : {}; var wait = options.wait; var destroy = () => { this.event$.emit(<IEvent>{ topic: 'destroy', emitter: this, payload: this.service, options: options}); } var obs$ = Observable.empty<Model<A>>(); if (!this.isNew()) obs$ = this.sync('delete', options); obs$.subscribe( resp => { if (wait) destroy(); }, err => this.event$.emit(<IEvent> { topic: 'error', emitter: this, payload: err, options: options}) ); if (!wait) destroy(); return obs$; } // **parse** converts a response into the hash of attributes to be `set` on // the model. The default implementation is just to pass the response along. parse(resp, options?) : A { return resp; } validate(attrs, options) { } // Create a new model with identical attributes to this one. clone() : Model<A> { return this.service.createModel(this.attributes); } // A model is new if it has never been saved to the server, and lacks an id. isNew() : boolean { return !this.has(this.idAttribute); } // Check if the model is currently in a valid state.
isValid
identifier_name