file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
iadfa.py | from fsa import *
from nameGenerator import *
class IncrementalAdfa(Dfa):
"""This class is an Acyclic Deterministic Finite State Automaton
constructed by a list of words.
"""
def __init__(self, words, nameGenerator = None, sorted = False):
if nameGenerator is None:
nameGenerator = IndexNameGenerator()
self.nameGenerator = nameGenerator
if sorted:
self.createFromSortedListOfWords(words)
else:
self.createFromArbitraryListOfWords(words)
def getCommonPrefix(self, word):
stateName = self.startState
index = 0
nextStateName = stateName
while nextStateName is not None:
symbol = word[index]
stateName = nextStateName
if symbol in self.states[stateName].transitions:
nextStateName = self.states[stateName].transitions[symbol]
index += 1
else:
nextStateName = None
return (stateName, word[index:])
def hasChildren(self, stateName):
okay = False
if [s for s in list(self.states[stateName].transitions.values()) if s]:
okay = True
return okay
def addSuffix(self, stateName, currentSuffix):
lastState = stateName
while len(currentSuffix) > 0:
newStateName = self.nameGenerator.generate()
symbol = currentSuffix[0]
currentSuffix = currentSuffix[1:]
self.states[stateName].transitions[symbol] = newStateName
self.states[newStateName] = State(newStateName)
stateName = newStateName
self.finalStates.append(stateName)
def markedAsRegistered(self, stateName):
return stateName in self.register
def markAsRegistered(self, stateName):
self.register[stateName] = True
def equivalentRegisteredState(self, stateName):
equivatentState = None
for state in list(self.register.keys()):
if self.areEquivalents(state, stateName):
equivatentState = state
return equivatentState
def lastChild(self, stateName):
|
def replaceOrRegister(self, stateName):
#childName = self.finalStates[-1]
childName, lastSymbol = self.lastChild(stateName)
if not self.markedAsRegistered(childName):
if self.hasChildren(childName):
self.replaceOrRegister(childName)
equivalentState = self.equivalentRegisteredState(childName)
if equivalentState is not None:
self.deleteBranch(childName)
self.states[stateName].transitions[lastSymbol] = equivalentState
else:
self.markAsRegistered(childName)
def deleteBranch(self, child):
childs = [child]
while len(childs) > 0:
nextChilds = []
for child in childs:
nextChilds += [s for s in list(self.states[child].transitions.values()) if not self.markedAsRegistered(s)]
self.states.pop(child)
if child in self.finalStates:
self.finalStates.remove(child)
childs = nextChilds
def createFromSortedListOfWords(self, words):
self.register = {}
self.finalStates = []
self.startState = self.nameGenerator.generate()
self.states = {self.startState : State(self.startState)}
lastWord = None
for word in words:
if word.endswith('\n'):
word = word[:-1]
lastStateName, currentSuffix = self.getCommonPrefix(word)
if self.hasChildren(lastStateName):
self.replaceOrRegister(lastStateName)
self.addSuffix(lastStateName, currentSuffix)
self.replaceOrRegister(self.startState)
def createFromArbitraryListOfWords(self, words):
self.register = {}
self.finalStates = []
self.startState = self.nameGenerator.generate()
self.states = {self.startState : State(self.startState)}
| input = list(self.states[stateName].transitions.keys())
input.sort()
return (self.states[stateName].transitions[input[-1]], input[-1]) | identifier_body |
getTemp.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import re, os, time
# function: read and parse sensor data file
def read_sensor(path):
value = "U"
try:
f = open(path, "r")
line = f.readline()
if re.match(r"([0-9a-f]{2} ){9}: crc=[0-9a-f]{2} YES", line):
line = f.readline()
m = re.match(r"([0-9a-f]{2} ){9}t=([+-]?[0-9]+)", line)
if m:
value = str(round(float(m.group(2)) / 1000.0,1))
f.close()
except (IOError), e:
print time.strftime("%x %X"), "Error reading", path, ": ", e
return value
# define pathes to 1-wire sensor data
pathes = ( | # path = "/sys/bus/w1/devices/28-0314640daeff/w1_slave"
# print read_sensor(path)
# time.sleep(30)
flag = 1
temp = 0
temp2 = 0
while (flag):
temp2 = temp
temp = read_sensor("/sys/bus/w1/devices/28-0314640daeff/w1_slave")
if temp2 != temp:
print temp
time.sleep(11) | "/sys/bus/w1/devices/28-0314640daeff/w1_slave"
)
# read sensor data
#for path in pathes: | random_line_split |
getTemp.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import re, os, time
# function: read and parse sensor data file
def | (path):
value = "U"
try:
f = open(path, "r")
line = f.readline()
if re.match(r"([0-9a-f]{2} ){9}: crc=[0-9a-f]{2} YES", line):
line = f.readline()
m = re.match(r"([0-9a-f]{2} ){9}t=([+-]?[0-9]+)", line)
if m:
value = str(round(float(m.group(2)) / 1000.0,1))
f.close()
except (IOError), e:
print time.strftime("%x %X"), "Error reading", path, ": ", e
return value
# define pathes to 1-wire sensor data
pathes = (
"/sys/bus/w1/devices/28-0314640daeff/w1_slave"
)
# read sensor data
#for path in pathes:
# path = "/sys/bus/w1/devices/28-0314640daeff/w1_slave"
# print read_sensor(path)
# time.sleep(30)
flag = 1
temp = 0
temp2 = 0
while (flag):
temp2 = temp
temp = read_sensor("/sys/bus/w1/devices/28-0314640daeff/w1_slave")
if temp2 != temp:
print temp
time.sleep(11)
| read_sensor | identifier_name |
getTemp.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import re, os, time
# function: read and parse sensor data file
def read_sensor(path):
|
# define pathes to 1-wire sensor data
pathes = (
"/sys/bus/w1/devices/28-0314640daeff/w1_slave"
)
# read sensor data
#for path in pathes:
# path = "/sys/bus/w1/devices/28-0314640daeff/w1_slave"
# print read_sensor(path)
# time.sleep(30)
flag = 1
temp = 0
temp2 = 0
while (flag):
temp2 = temp
temp = read_sensor("/sys/bus/w1/devices/28-0314640daeff/w1_slave")
if temp2 != temp:
print temp
time.sleep(11)
| value = "U"
try:
f = open(path, "r")
line = f.readline()
if re.match(r"([0-9a-f]{2} ){9}: crc=[0-9a-f]{2} YES", line):
line = f.readline()
m = re.match(r"([0-9a-f]{2} ){9}t=([+-]?[0-9]+)", line)
if m:
value = str(round(float(m.group(2)) / 1000.0,1))
f.close()
except (IOError), e:
print time.strftime("%x %X"), "Error reading", path, ": ", e
return value | identifier_body |
getTemp.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import re, os, time
# function: read and parse sensor data file
def read_sensor(path):
value = "U"
try:
f = open(path, "r")
line = f.readline()
if re.match(r"([0-9a-f]{2} ){9}: crc=[0-9a-f]{2} YES", line):
line = f.readline()
m = re.match(r"([0-9a-f]{2} ){9}t=([+-]?[0-9]+)", line)
if m:
value = str(round(float(m.group(2)) / 1000.0,1))
f.close()
except (IOError), e:
print time.strftime("%x %X"), "Error reading", path, ": ", e
return value
# define pathes to 1-wire sensor data
pathes = (
"/sys/bus/w1/devices/28-0314640daeff/w1_slave"
)
# read sensor data
#for path in pathes:
# path = "/sys/bus/w1/devices/28-0314640daeff/w1_slave"
# print read_sensor(path)
# time.sleep(30)
flag = 1
temp = 0
temp2 = 0
while (flag):
| temp2 = temp
temp = read_sensor("/sys/bus/w1/devices/28-0314640daeff/w1_slave")
if temp2 != temp:
print temp
time.sleep(11) | conditional_block |
|
collection.rs | //! This module contains code to parse all supported collection formats.
use std::fs::File;
use std::io::Read;
use crate::level::*;
use crate::util::*;
enum FileFormat {
Ascii,
Xml,
}
/// A collection of levels. This type contains logic for parsing a collection file. Other than
/// that, it is simply a list of Levels together with some metadata.
#[derive(Debug)]
pub struct Collection {
/// The full name of the collection.
name: String,
/// The name of the file containing the level collection.
short_name: String,
description: Option<String>,
number_of_levels: usize,
/// All levels of this collection. This variable is only written to when loading the
/// collection.
levels: Vec<Level>,
}
impl Collection {
#[cfg(test)]
pub fn from_levels(name: &str, levels: &[Level]) -> Collection {
Collection {
name: name.into(),
short_name: name.into(),
description: None,
number_of_levels: levels.len(),
levels: levels.into(),
}
}
/// Load a level set with the given name, whatever the format might be.
pub fn parse(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, true)
}
/// Figure out title, description, number of levels, etc. of a collection without parsing each
/// level.
pub fn parse_metadata(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, false)
}
fn parse_helper(short_name: &str, parse_levels: bool) -> Result<Collection, SokobanError> {
let mut level_path = ASSETS.clone();
level_path.push("levels");
level_path.push(short_name);
let (level_file, file_format) = {
level_path.set_extension("slc");
if let Ok(f) = File::open(&level_path) {
(f, FileFormat::Xml)
} else {
level_path.set_extension("lvl");
match File::open(level_path) {
Ok(f) => (f, FileFormat::Ascii),
Err(e) => return Err(SokobanError::from(e)),
}
}
};
Ok(match file_format {
FileFormat::Ascii => Collection::parse_lvl(short_name, level_file, parse_levels)?,
FileFormat::Xml => Collection::parse_xml(short_name, level_file, parse_levels)?,
})
}
/// Load a file containing a bunch of levels separated by an empty line, i.e. the usual ASCII
/// format.
fn parse_lvl(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
#[cfg(unix)]
const EMPTY_LINE: &str = "\n\n";
#[cfg(windows)]
const EMPTY_LINE: &str = "\r\n\r\n";
let eol = |c| c == '\n' || c == '\r'; |
let level_strings: Vec<_> = content
.split(EMPTY_LINE)
.map(|x| x.trim_matches(&eol))
.filter(|x| !x.is_empty())
.collect();
let name = level_strings[0].lines().next().unwrap();
let description = level_strings[0]
.splitn(1, &eol)
.last()
.map(|x| x.trim().to_owned());
// Parse the individual levels
let (num, levels) = {
if parse_levels {
let lvls = level_strings[1..]
.iter()
.enumerate()
.map(|(i, l)| Level::parse(i, l.trim_matches(&eol)))
.collect::<Result<Vec<_>, _>>()?;
(lvls.len(), lvls)
} else {
(level_strings.len() - 1, vec![])
}
};
Ok(Collection {
name: name.to_string(),
short_name: short_name.to_string(),
description,
number_of_levels: num,
levels,
})
}
/// Load a level set in the XML-based .slc format.
fn parse_xml(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
use quick_xml::events::Event;
use quick_xml::Reader;
enum State {
Nothing,
Title,
Description,
Email,
Url,
Line,
}
let file = ::std::io::BufReader::new(file);
let mut reader = Reader::from_reader(file);
let mut state = State::Nothing;
// Collection attributes
let mut title = String::new();
let mut description = String::new();
let mut email = String::new();
let mut url = String::new();
let mut levels = vec![];
// Level attributes
let mut num = 0;
let mut level_lines = String::new();
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) => match e.name() {
b"Title" => {
state = State::Title;
title.clear();
}
b"Description" => state = State::Description,
b"Email" => state = State::Email,
b"Url" => state = State::Url,
b"Level" => level_lines.clear(),
b"L" => state = State::Line,
_ => {}
},
Ok(Event::End(e)) => match e.name() {
b"Title" | b"Description" | b"Email" | b"Url" => state = State::Nothing,
b"Level" => {
if parse_levels {
levels.push(Level::parse(num, &level_lines)?);
}
num += 1;
}
b"L" => {
state = State::Nothing;
level_lines.push('\n');
}
_ => {}
},
Ok(Event::Text(ref e)) => match state {
State::Nothing => {}
State::Line if !parse_levels => {}
_ => {
let s = e.unescape_and_decode(&reader).unwrap();
match state {
State::Title => title.push_str(&s),
State::Description => description.push_str(&s),
State::Email => email.push_str(&s),
State::Url => url.push_str(&s),
State::Line => level_lines.push_str(&s),
_ => unreachable!(),
}
}
},
Ok(Event::Eof) => break,
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => {}
}
}
Ok(Collection {
name: title,
short_name: short_name.to_string(),
description: if description.is_empty() {
None
} else {
Some(description)
},
number_of_levels: num,
levels,
})
}
// Accessor methods
pub fn name(&self) -> &str {
&self.name
}
pub fn short_name(&self) -> &str {
&self.short_name
}
pub fn description(&self) -> Option<&str> {
match self.description {
Some(ref x) => Some(&x),
None => None,
}
}
pub fn first_level(&self) -> &Level {
&self.levels[0]
}
/// Get all levels.
pub fn levels(&self) -> &[Level] {
self.levels.as_ref()
}
pub fn number_of_levels(&self) -> usize {
self.number_of_levels
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn load_test_collections() {
assert!(Collection::parse("test_2").is_ok());
assert!(Collection::parse_metadata("test_2").is_ok());
assert!(Collection::parse("test3iuntrenutineaniutea").is_err());
assert!(Collection::parse_metadata("test3iuntrenutineaniutea").is_err());
}
} | let mut file = file;
// Read the collection’s file
let mut content = "".to_string();
file.read_to_string(&mut content)?; | random_line_split |
collection.rs | //! This module contains code to parse all supported collection formats.
use std::fs::File;
use std::io::Read;
use crate::level::*;
use crate::util::*;
enum FileFormat {
Ascii,
Xml,
}
/// A collection of levels. This type contains logic for parsing a collection file. Other than
/// that, it is simply a list of Levels together with some metadata.
#[derive(Debug)]
pub struct Collection {
/// The full name of the collection.
name: String,
/// The name of the file containing the level collection.
short_name: String,
description: Option<String>,
number_of_levels: usize,
/// All levels of this collection. This variable is only written to when loading the
/// collection.
levels: Vec<Level>,
}
impl Collection {
#[cfg(test)]
pub fn from_levels(name: &str, levels: &[Level]) -> Collection {
Collection {
name: name.into(),
short_name: name.into(),
description: None,
number_of_levels: levels.len(),
levels: levels.into(),
}
}
/// Load a level set with the given name, whatever the format might be.
pub fn parse(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, true)
}
/// Figure out title, description, number of levels, etc. of a collection without parsing each
/// level.
pub fn parse_metadata(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, false)
}
fn parse_helper(short_name: &str, parse_levels: bool) -> Result<Collection, SokobanError> {
let mut level_path = ASSETS.clone();
level_path.push("levels");
level_path.push(short_name);
let (level_file, file_format) = {
level_path.set_extension("slc");
if let Ok(f) = File::open(&level_path) {
(f, FileFormat::Xml)
} else {
level_path.set_extension("lvl");
match File::open(level_path) {
Ok(f) => (f, FileFormat::Ascii),
Err(e) => return Err(SokobanError::from(e)),
}
}
};
Ok(match file_format {
FileFormat::Ascii => Collection::parse_lvl(short_name, level_file, parse_levels)?,
FileFormat::Xml => Collection::parse_xml(short_name, level_file, parse_levels)?,
})
}
/// Load a file containing a bunch of levels separated by an empty line, i.e. the usual ASCII
/// format.
fn parse_lvl(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
#[cfg(unix)]
const EMPTY_LINE: &str = "\n\n";
#[cfg(windows)]
const EMPTY_LINE: &str = "\r\n\r\n";
let eol = |c| c == '\n' || c == '\r';
let mut file = file;
// Read the collection’s file
let mut content = "".to_string();
file.read_to_string(&mut content)?;
let level_strings: Vec<_> = content
.split(EMPTY_LINE)
.map(|x| x.trim_matches(&eol))
.filter(|x| !x.is_empty())
.collect();
let name = level_strings[0].lines().next().unwrap();
let description = level_strings[0]
.splitn(1, &eol)
.last()
.map(|x| x.trim().to_owned());
// Parse the individual levels
let (num, levels) = {
if parse_levels {
let lvls = level_strings[1..]
.iter()
.enumerate()
.map(|(i, l)| Level::parse(i, l.trim_matches(&eol)))
.collect::<Result<Vec<_>, _>>()?;
(lvls.len(), lvls)
} else {
(level_strings.len() - 1, vec![])
}
};
Ok(Collection {
name: name.to_string(),
short_name: short_name.to_string(),
description,
number_of_levels: num,
levels,
})
}
/// Load a level set in the XML-based .slc format.
fn parse_xml(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
use quick_xml::events::Event;
use quick_xml::Reader;
enum State {
Nothing,
Title,
Description,
Email,
Url,
Line,
}
let file = ::std::io::BufReader::new(file);
let mut reader = Reader::from_reader(file);
let mut state = State::Nothing;
// Collection attributes
let mut title = String::new();
let mut description = String::new();
let mut email = String::new();
let mut url = String::new();
let mut levels = vec![];
// Level attributes
let mut num = 0;
let mut level_lines = String::new();
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) => match e.name() {
b"Title" => {
state = State::Title;
title.clear();
}
b"Description" => state = State::Description,
b"Email" => state = State::Email,
b"Url" => state = State::Url,
b"Level" => level_lines.clear(),
b"L" => state = State::Line,
_ => {}
},
Ok(Event::End(e)) => match e.name() {
b"Title" | b"Description" | b"Email" | b"Url" => state = State::Nothing,
b"Level" => {
if parse_levels {
levels.push(Level::parse(num, &level_lines)?);
}
num += 1;
}
b"L" => {
state = State::Nothing;
level_lines.push('\n');
}
_ => {}
},
Ok(Event::Text(ref e)) => match state {
State::Nothing => {}
State::Line if !parse_levels => {}
_ => {
let s = e.unescape_and_decode(&reader).unwrap();
match state {
State::Title => title.push_str(&s),
State::Description => description.push_str(&s),
State::Email => email.push_str(&s),
State::Url => url.push_str(&s),
State::Line => level_lines.push_str(&s),
_ => unreachable!(),
}
}
},
Ok(Event::Eof) => break,
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => {}
}
}
Ok(Collection {
name: title,
short_name: short_name.to_string(),
description: if description.is_empty() {
None
} else {
Some(description)
},
number_of_levels: num,
levels,
})
}
// Accessor methods
pub fn name(&self) -> &str {
&self.name
}
pub fn short_name(&self) -> &str {
&self.short_name
}
pub fn de | self) -> Option<&str> {
match self.description {
Some(ref x) => Some(&x),
None => None,
}
}
pub fn first_level(&self) -> &Level {
&self.levels[0]
}
/// Get all levels.
pub fn levels(&self) -> &[Level] {
self.levels.as_ref()
}
pub fn number_of_levels(&self) -> usize {
self.number_of_levels
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn load_test_collections() {
assert!(Collection::parse("test_2").is_ok());
assert!(Collection::parse_metadata("test_2").is_ok());
assert!(Collection::parse("test3iuntrenutineaniutea").is_err());
assert!(Collection::parse_metadata("test3iuntrenutineaniutea").is_err());
}
}
| scription(& | identifier_name |
collection.rs | //! This module contains code to parse all supported collection formats.
use std::fs::File;
use std::io::Read;
use crate::level::*;
use crate::util::*;
enum FileFormat {
Ascii,
Xml,
}
/// A collection of levels. This type contains logic for parsing a collection file. Other than
/// that, it is simply a list of Levels together with some metadata.
#[derive(Debug)]
pub struct Collection {
/// The full name of the collection.
name: String,
/// The name of the file containing the level collection.
short_name: String,
description: Option<String>,
number_of_levels: usize,
/// All levels of this collection. This variable is only written to when loading the
/// collection.
levels: Vec<Level>,
}
impl Collection {
#[cfg(test)]
pub fn from_levels(name: &str, levels: &[Level]) -> Collection {
Collection {
name: name.into(),
short_name: name.into(),
description: None,
number_of_levels: levels.len(),
levels: levels.into(),
}
}
/// Load a level set with the given name, whatever the format might be.
pub fn parse(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, true)
}
/// Figure out title, description, number of levels, etc. of a collection without parsing each
/// level.
pub fn parse_metadata(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, false)
}
fn parse_helper(short_name: &str, parse_levels: bool) -> Result<Collection, SokobanError> {
let mut level_path = ASSETS.clone();
level_path.push("levels");
level_path.push(short_name);
let (level_file, file_format) = {
level_path.set_extension("slc");
if let Ok(f) = File::open(&level_path) {
(f, FileFormat::Xml)
} else {
level_path.set_extension("lvl");
match File::open(level_path) {
Ok(f) => (f, FileFormat::Ascii),
Err(e) => return Err(SokobanError::from(e)),
}
}
};
Ok(match file_format {
FileFormat::Ascii => Collection::parse_lvl(short_name, level_file, parse_levels)?,
FileFormat::Xml => Collection::parse_xml(short_name, level_file, parse_levels)?,
})
}
/// Load a file containing a bunch of levels separated by an empty line, i.e. the usual ASCII
/// format.
fn parse_lvl(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
#[cfg(unix)]
const EMPTY_LINE: &str = "\n\n";
#[cfg(windows)]
const EMPTY_LINE: &str = "\r\n\r\n";
let eol = |c| c == '\n' || c == '\r';
let mut file = file;
// Read the collection’s file
let mut content = "".to_string();
file.read_to_string(&mut content)?;
let level_strings: Vec<_> = content
.split(EMPTY_LINE)
.map(|x| x.trim_matches(&eol))
.filter(|x| !x.is_empty())
.collect();
let name = level_strings[0].lines().next().unwrap();
let description = level_strings[0]
.splitn(1, &eol)
.last()
.map(|x| x.trim().to_owned());
// Parse the individual levels
let (num, levels) = {
if parse_levels {
let lvls = level_strings[1..]
.iter()
.enumerate()
.map(|(i, l)| Level::parse(i, l.trim_matches(&eol)))
.collect::<Result<Vec<_>, _>>()?;
(lvls.len(), lvls)
} else {
(level_strings.len() - 1, vec![])
}
};
Ok(Collection {
name: name.to_string(),
short_name: short_name.to_string(),
description,
number_of_levels: num,
levels,
})
}
/// Load a level set in the XML-based .slc format.
fn parse_xml(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
use quick_xml::events::Event;
use quick_xml::Reader;
enum State {
Nothing,
Title,
Description,
Email,
Url,
Line,
}
let file = ::std::io::BufReader::new(file);
let mut reader = Reader::from_reader(file);
let mut state = State::Nothing;
// Collection attributes
let mut title = String::new();
let mut description = String::new();
let mut email = String::new();
let mut url = String::new();
let mut levels = vec![];
// Level attributes
let mut num = 0;
let mut level_lines = String::new();
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) => match e.name() {
b"Title" => {
state = State::Title;
title.clear();
}
b"Description" => state = State::Description,
b"Email" => state = State::Email,
b"Url" => state = State::Url,
b"Level" => level_lines.clear(),
b"L" => state = State::Line,
_ => {}
},
Ok(Event::End(e)) => match e.name() {
b"Title" | b"Description" | b"Email" | b"Url" => state = State::Nothing,
b"Level" => {
if parse_levels {
levels.push(Level::parse(num, &level_lines)?);
}
num += 1;
}
b"L" => {
state = State::Nothing;
level_lines.push('\n');
}
_ => {} | },
Ok(Event::Text(ref e)) => match state {
State::Nothing => {}
State::Line if !parse_levels => {}
_ => {
let s = e.unescape_and_decode(&reader).unwrap();
match state {
State::Title => title.push_str(&s),
State::Description => description.push_str(&s),
State::Email => email.push_str(&s),
State::Url => url.push_str(&s),
State::Line => level_lines.push_str(&s),
_ => unreachable!(),
}
}
},
Ok(Event::Eof) => break,
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => {}
}
}
Ok(Collection {
name: title,
short_name: short_name.to_string(),
description: if description.is_empty() {
None
} else {
Some(description)
},
number_of_levels: num,
levels,
})
}
// Accessor methods
pub fn name(&self) -> &str {
&self.name
}
pub fn short_name(&self) -> &str {
&self.short_name
}
pub fn description(&self) -> Option<&str> {
match self.description {
Some(ref x) => Some(&x),
None => None,
}
}
pub fn first_level(&self) -> &Level {
&self.levels[0]
}
/// Get all levels.
pub fn levels(&self) -> &[Level] {
self.levels.as_ref()
}
pub fn number_of_levels(&self) -> usize {
self.number_of_levels
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn load_test_collections() {
assert!(Collection::parse("test_2").is_ok());
assert!(Collection::parse_metadata("test_2").is_ok());
assert!(Collection::parse("test3iuntrenutineaniutea").is_err());
assert!(Collection::parse_metadata("test3iuntrenutineaniutea").is_err());
}
}
| conditional_block |
|
translate.py | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Translation GUI
$Id: translate.py 26889 2004-08-04 04:00:36Z pruggera $
"""
__docformat__ = 'restructuredtext'
from zope.app.i18n.browser import BaseView
class Translate(BaseView):
def getMessages(self):
"""Get messages"""
filter = self.request.get('filter', '%')
messages = []
for msg_id in self.context.getMessageIds(filter):
messages.append((msg_id, len(messages)))
return messages
def getTranslation(self, msgid, target_lang):
return self.context.translate(msgid, target_language=target_lang)
def getEditLanguages(self):
'''get the languages that are selected for editing'''
languages = self.request.cookies.get('edit_languages', '')
return filter(None, languages.split(','))
def editMessage(self):
msg_id = self.request['msg_id']
for language in self.getEditLanguages():
msg = self.request['msg_lang_%s' %language]
if msg != self.context.translate(msg_id,
target_language=language):
self.context.updateMessage(msg_id, msg, language)
return self.request.response.redirect(self.request.URL[-1])
def editMessages(self):
# Handle new Messages
for count in range(5):
msg_id = self.request.get('new-msg_id-%i' %count, '')
if msg_id:
for language in self.getEditLanguages():
msg = self.request.get('new-%s-%i' %(language, count),
msg_id)
self.context.addMessage(msg_id, msg, language)
# Handle edited Messages
keys = filter(lambda k: k.startswith('edit-msg_id-'),
self.request.keys())
keys = map(lambda k: k[12:], keys)
for key in keys:
msg_id = self.request['edit-msg_id-'+key]
for language in self.getEditLanguages():
msg = self.request['edit-%s-%s' %(language, key)]
if msg != self.context.translate(msg_id,
target_language=language):
self.context.updateMessage(msg_id, msg, language)
return self.request.response.redirect(self.request.URL[-1])
def deleteMessages(self, message_ids):
for id in message_ids:
msgid = self.request.form['edit-msg_id-%s' %id]
for language in self.context.getAvailableLanguages():
# Some we edit a language, but no translation exists...
try:
self.context.deleteMessage(msgid, language)
except KeyError:
pass
return self.request.response.redirect(self.request.URL[-1])
def addLanguage(self, language):
self.context.addLanguage(language)
return self.request.response.redirect(self.request.URL[-1])
def changeEditLanguages(self, languages=[]):
self.request.response.setCookie('edit_languages',
','.join(languages))
return self.request.response.redirect(self.request.URL[-1])
def changeFilter(self):
filter = self.request.get('filter', '%')
self.request.response.setCookie('filter', filter)
return self.request.response.redirect(self.request.URL[-1])
def deleteLanguages(self, languages): | for language in languages:
self.context.deleteLanguage(language)
return self.request.response.redirect(self.request.URL[-1]) | random_line_split |
|
translate.py | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Translation GUI
$Id: translate.py 26889 2004-08-04 04:00:36Z pruggera $
"""
__docformat__ = 'restructuredtext'
from zope.app.i18n.browser import BaseView
class Translate(BaseView):
def getMessages(self):
"""Get messages"""
filter = self.request.get('filter', '%')
messages = []
for msg_id in self.context.getMessageIds(filter):
messages.append((msg_id, len(messages)))
return messages
def getTranslation(self, msgid, target_lang):
return self.context.translate(msgid, target_language=target_lang)
def getEditLanguages(self):
'''get the languages that are selected for editing'''
languages = self.request.cookies.get('edit_languages', '')
return filter(None, languages.split(','))
def editMessage(self):
|
def editMessages(self):
# Handle new Messages
for count in range(5):
msg_id = self.request.get('new-msg_id-%i' %count, '')
if msg_id:
for language in self.getEditLanguages():
msg = self.request.get('new-%s-%i' %(language, count),
msg_id)
self.context.addMessage(msg_id, msg, language)
# Handle edited Messages
keys = filter(lambda k: k.startswith('edit-msg_id-'),
self.request.keys())
keys = map(lambda k: k[12:], keys)
for key in keys:
msg_id = self.request['edit-msg_id-'+key]
for language in self.getEditLanguages():
msg = self.request['edit-%s-%s' %(language, key)]
if msg != self.context.translate(msg_id,
target_language=language):
self.context.updateMessage(msg_id, msg, language)
return self.request.response.redirect(self.request.URL[-1])
def deleteMessages(self, message_ids):
for id in message_ids:
msgid = self.request.form['edit-msg_id-%s' %id]
for language in self.context.getAvailableLanguages():
# Some we edit a language, but no translation exists...
try:
self.context.deleteMessage(msgid, language)
except KeyError:
pass
return self.request.response.redirect(self.request.URL[-1])
def addLanguage(self, language):
self.context.addLanguage(language)
return self.request.response.redirect(self.request.URL[-1])
def changeEditLanguages(self, languages=[]):
self.request.response.setCookie('edit_languages',
','.join(languages))
return self.request.response.redirect(self.request.URL[-1])
def changeFilter(self):
filter = self.request.get('filter', '%')
self.request.response.setCookie('filter', filter)
return self.request.response.redirect(self.request.URL[-1])
def deleteLanguages(self, languages):
for language in languages:
self.context.deleteLanguage(language)
return self.request.response.redirect(self.request.URL[-1])
| msg_id = self.request['msg_id']
for language in self.getEditLanguages():
msg = self.request['msg_lang_%s' %language]
if msg != self.context.translate(msg_id,
target_language=language):
self.context.updateMessage(msg_id, msg, language)
return self.request.response.redirect(self.request.URL[-1]) | identifier_body |
translate.py | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Translation GUI
$Id: translate.py 26889 2004-08-04 04:00:36Z pruggera $
"""
__docformat__ = 'restructuredtext'
from zope.app.i18n.browser import BaseView
class Translate(BaseView):
def getMessages(self):
"""Get messages"""
filter = self.request.get('filter', '%')
messages = []
for msg_id in self.context.getMessageIds(filter):
messages.append((msg_id, len(messages)))
return messages
def getTranslation(self, msgid, target_lang):
return self.context.translate(msgid, target_language=target_lang)
def getEditLanguages(self):
'''get the languages that are selected for editing'''
languages = self.request.cookies.get('edit_languages', '')
return filter(None, languages.split(','))
def editMessage(self):
msg_id = self.request['msg_id']
for language in self.getEditLanguages():
msg = self.request['msg_lang_%s' %language]
if msg != self.context.translate(msg_id,
target_language=language):
self.context.updateMessage(msg_id, msg, language)
return self.request.response.redirect(self.request.URL[-1])
def editMessages(self):
# Handle new Messages
for count in range(5):
msg_id = self.request.get('new-msg_id-%i' %count, '')
if msg_id:
|
# Handle edited Messages
keys = filter(lambda k: k.startswith('edit-msg_id-'),
self.request.keys())
keys = map(lambda k: k[12:], keys)
for key in keys:
msg_id = self.request['edit-msg_id-'+key]
for language in self.getEditLanguages():
msg = self.request['edit-%s-%s' %(language, key)]
if msg != self.context.translate(msg_id,
target_language=language):
self.context.updateMessage(msg_id, msg, language)
return self.request.response.redirect(self.request.URL[-1])
def deleteMessages(self, message_ids):
for id in message_ids:
msgid = self.request.form['edit-msg_id-%s' %id]
for language in self.context.getAvailableLanguages():
# Some we edit a language, but no translation exists...
try:
self.context.deleteMessage(msgid, language)
except KeyError:
pass
return self.request.response.redirect(self.request.URL[-1])
def addLanguage(self, language):
self.context.addLanguage(language)
return self.request.response.redirect(self.request.URL[-1])
def changeEditLanguages(self, languages=[]):
self.request.response.setCookie('edit_languages',
','.join(languages))
return self.request.response.redirect(self.request.URL[-1])
def changeFilter(self):
filter = self.request.get('filter', '%')
self.request.response.setCookie('filter', filter)
return self.request.response.redirect(self.request.URL[-1])
def deleteLanguages(self, languages):
for language in languages:
self.context.deleteLanguage(language)
return self.request.response.redirect(self.request.URL[-1])
| for language in self.getEditLanguages():
msg = self.request.get('new-%s-%i' %(language, count),
msg_id)
self.context.addMessage(msg_id, msg, language) | conditional_block |
translate.py | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Translation GUI
$Id: translate.py 26889 2004-08-04 04:00:36Z pruggera $
"""
__docformat__ = 'restructuredtext'
from zope.app.i18n.browser import BaseView
class | (BaseView):
def getMessages(self):
"""Get messages"""
filter = self.request.get('filter', '%')
messages = []
for msg_id in self.context.getMessageIds(filter):
messages.append((msg_id, len(messages)))
return messages
def getTranslation(self, msgid, target_lang):
return self.context.translate(msgid, target_language=target_lang)
def getEditLanguages(self):
'''get the languages that are selected for editing'''
languages = self.request.cookies.get('edit_languages', '')
return filter(None, languages.split(','))
def editMessage(self):
msg_id = self.request['msg_id']
for language in self.getEditLanguages():
msg = self.request['msg_lang_%s' %language]
if msg != self.context.translate(msg_id,
target_language=language):
self.context.updateMessage(msg_id, msg, language)
return self.request.response.redirect(self.request.URL[-1])
def editMessages(self):
# Handle new Messages
for count in range(5):
msg_id = self.request.get('new-msg_id-%i' %count, '')
if msg_id:
for language in self.getEditLanguages():
msg = self.request.get('new-%s-%i' %(language, count),
msg_id)
self.context.addMessage(msg_id, msg, language)
# Handle edited Messages
keys = filter(lambda k: k.startswith('edit-msg_id-'),
self.request.keys())
keys = map(lambda k: k[12:], keys)
for key in keys:
msg_id = self.request['edit-msg_id-'+key]
for language in self.getEditLanguages():
msg = self.request['edit-%s-%s' %(language, key)]
if msg != self.context.translate(msg_id,
target_language=language):
self.context.updateMessage(msg_id, msg, language)
return self.request.response.redirect(self.request.URL[-1])
def deleteMessages(self, message_ids):
for id in message_ids:
msgid = self.request.form['edit-msg_id-%s' %id]
for language in self.context.getAvailableLanguages():
# Some we edit a language, but no translation exists...
try:
self.context.deleteMessage(msgid, language)
except KeyError:
pass
return self.request.response.redirect(self.request.URL[-1])
def addLanguage(self, language):
self.context.addLanguage(language)
return self.request.response.redirect(self.request.URL[-1])
def changeEditLanguages(self, languages=[]):
self.request.response.setCookie('edit_languages',
','.join(languages))
return self.request.response.redirect(self.request.URL[-1])
def changeFilter(self):
filter = self.request.get('filter', '%')
self.request.response.setCookie('filter', filter)
return self.request.response.redirect(self.request.URL[-1])
def deleteLanguages(self, languages):
for language in languages:
self.context.deleteLanguage(language)
return self.request.response.redirect(self.request.URL[-1])
| Translate | identifier_name |
chain.rs | extern crate futures;
extern crate tokio_core;
use std::net::TcpStream;
use std::thread;
use std::io::{Write, Read};
use futures::Future;
use futures::stream::Stream;
use tokio_core::io::read_to_end;
use tokio_core::net::TcpListener;
use tokio_core::reactor::Core;
macro_rules! t {
($e:expr) => (match $e {
Ok(e) => e,
Err(e) => panic!("{} failed with {:?}", stringify!($e), e),
})
}
#[test]
fn | () {
let mut l = t!(Core::new());
let srv = t!(TcpListener::bind(&t!("127.0.0.1:0".parse()), &l.handle()));
let addr = t!(srv.local_addr());
let t = thread::spawn(move || {
let mut s1 = TcpStream::connect(&addr).unwrap();
s1.write_all(b"foo ").unwrap();
let mut s2 = TcpStream::connect(&addr).unwrap();
s2.write_all(b"bar ").unwrap();
let mut s3 = TcpStream::connect(&addr).unwrap();
s3.write_all(b"baz").unwrap();
});
let clients = srv.incoming().map(|e| e.0).take(3);
let copied = clients.collect().and_then(|clients| {
let mut clients = clients.into_iter();
let a = clients.next().unwrap();
let b = clients.next().unwrap();
let c = clients.next().unwrap();
read_to_end(a.chain(b).chain(c), Vec::new())
});
let (_, data) = t!(l.run(copied));
t.join().unwrap();
assert_eq!(data, b"foo bar baz");
}
| chain_clients | identifier_name |
chain.rs | extern crate futures;
extern crate tokio_core;
use std::net::TcpStream;
use std::thread;
use std::io::{Write, Read};
use futures::Future;
use futures::stream::Stream;
use tokio_core::io::read_to_end;
use tokio_core::net::TcpListener;
use tokio_core::reactor::Core;
macro_rules! t {
($e:expr) => (match $e {
Ok(e) => e,
Err(e) => panic!("{} failed with {:?}", stringify!($e), e),
})
}
#[test]
fn chain_clients() {
let mut l = t!(Core::new());
let srv = t!(TcpListener::bind(&t!("127.0.0.1:0".parse()), &l.handle()));
let addr = t!(srv.local_addr());
let t = thread::spawn(move || {
let mut s1 = TcpStream::connect(&addr).unwrap();
s1.write_all(b"foo ").unwrap();
let mut s2 = TcpStream::connect(&addr).unwrap();
s2.write_all(b"bar ").unwrap();
let mut s3 = TcpStream::connect(&addr).unwrap();
s3.write_all(b"baz").unwrap();
});
let clients = srv.incoming().map(|e| e.0).take(3);
let copied = clients.collect().and_then(|clients| {
let mut clients = clients.into_iter();
let a = clients.next().unwrap();
let b = clients.next().unwrap();
let c = clients.next().unwrap();
read_to_end(a.chain(b).chain(c), Vec::new())
});
let (_, data) = t!(l.run(copied)); | t.join().unwrap();
assert_eq!(data, b"foo bar baz");
} | random_line_split |
|
chain.rs | extern crate futures;
extern crate tokio_core;
use std::net::TcpStream;
use std::thread;
use std::io::{Write, Read};
use futures::Future;
use futures::stream::Stream;
use tokio_core::io::read_to_end;
use tokio_core::net::TcpListener;
use tokio_core::reactor::Core;
macro_rules! t {
($e:expr) => (match $e {
Ok(e) => e,
Err(e) => panic!("{} failed with {:?}", stringify!($e), e),
})
}
#[test]
fn chain_clients() |
read_to_end(a.chain(b).chain(c), Vec::new())
});
let (_, data) = t!(l.run(copied));
t.join().unwrap();
assert_eq!(data, b"foo bar baz");
}
| {
let mut l = t!(Core::new());
let srv = t!(TcpListener::bind(&t!("127.0.0.1:0".parse()), &l.handle()));
let addr = t!(srv.local_addr());
let t = thread::spawn(move || {
let mut s1 = TcpStream::connect(&addr).unwrap();
s1.write_all(b"foo ").unwrap();
let mut s2 = TcpStream::connect(&addr).unwrap();
s2.write_all(b"bar ").unwrap();
let mut s3 = TcpStream::connect(&addr).unwrap();
s3.write_all(b"baz").unwrap();
});
let clients = srv.incoming().map(|e| e.0).take(3);
let copied = clients.collect().and_then(|clients| {
let mut clients = clients.into_iter();
let a = clients.next().unwrap();
let b = clients.next().unwrap();
let c = clients.next().unwrap(); | identifier_body |
processor-info.component.ts | import { Component, AfterViewInit, OnDestroy, ViewChild, ElementRef, Input } from "@angular/core";
import Overlay from 'ol/overlay';
import Proj from 'ol/proj';
import { MapService } from '../../map/map.service';
import {TimeService} from '../../app/time.service';
import {ProcessorsService} from '../processors.service'
import { ProcessorLayer } from '../processor-layer';
@Component({
selector: 'processor-info',
templateUrl: './processor-info.component.html',
styleUrls: ['./processor-info.component.scss']
})
export class ProcessorInfoComponent implements AfterViewInit, OnDestroy {
@ViewChild("container") container: ElementRef;
private popup;
private viewer;
public position = [0,0];
public showTimeseriesButton;
public data = {
status: 'empty',
value: 0
}
constructor(
private mapService: MapService,
private processorLayer: ProcessorLayer,
private processorService: ProcessorsService,
private timeService: TimeService
) {
this.processorService.getTimeSeriesState().subscribe((value)=>{
this.position = value.coordinates;
let activeProcessor = this.processorService.getActiveProcessor();
this.showTimeseriesButton = !value.enabled && activeProcessor && activeProcessor.hasTimeDimension();
});
}
ngAfterViewInit() {
let overlay = new Overlay({
element: this.container.nativeElement,
autoPan: true,
autoPanAnimation: {
duration: 250
},
positioning: 'bottom-left'
});
this.mapService.getViewer().then((viewer) => {
this.viewer = viewer;
viewer.addOverlay(overlay);
viewer.on('singleclick', this.onMapClick, this);
this.container.nativeElement.addEventListener('mousedown', (evt) => {
let down_coord = viewer.getEventCoordinate(evt);
let overlay_coord = overlay.getPosition();
let offset = [overlay_coord[0] - down_coord[0], overlay_coord[1] - down_coord[1]];
let that = this;
let moved = false;
function move(evt) {
let coord = viewer.getEventCoordinate(evt);
if (down_coord[0] == coord[0] && down_coord[1] == coord[1]) {
return;
}
that.position = [coord[0] + offset[0], coord[1] + offset[1]]
overlay.setPosition(that.position);
that.data = {
status: 'empty',
value: 0
}
moved = true;
}
function | (evt) {
window.removeEventListener('mousemove', move);
window.removeEventListener('mouseup', end);
if (moved)
that.retrieveDataValue(that.position);
}
window.addEventListener('mousemove', move);
window.addEventListener('mouseup', end);
});
});
this.popup = overlay;
this.timeService.getSelectedDate().subscribe((dt)=>{
if (this.popup.getPosition() != null) {
this.retrieveDataValue(this.position);
}
});
}
getGeoCoordinates(coord) {
if (!this.viewer) {
return null;
}
if (this.viewer.getView().getProjection() != 'EPSG:4326') {
coord = Proj.transform(coord, this.viewer.getView().getProjection(), 'EPSG:4326');
}
return coord;
}
retrieveDataValue(coordinate) {
this.data = {
status: 'loading',
value: 0
}
this.processorLayer.getSourceValue(coordinate).then((value)=>{
this.data = {
status: 'ready',
value: value
}
});
this.processorService.setTimeSeriesState({
coordinates: coordinate
});
}
onTimeSeriesClick() {
this.processorService.setTimeSeriesState({
enabled: true
});
}
closePopup() {
this.popup.setPosition(null);
this.processorService.setTimeSeriesState({
enabled: false
});
}
ngOnDestroy() {
this.mapService.getViewer().then((viewer)=>{
viewer.removeOverlay(this.popup);
viewer.un('singleclick', this.onMapClick, this);
});
}
private onMapClick(evt) {
let coordinate = evt.coordinate;
this.position = coordinate;
this.popup.setPosition(this.position);
this.retrieveDataValue(coordinate);
}
} | end | identifier_name |
processor-info.component.ts | import { Component, AfterViewInit, OnDestroy, ViewChild, ElementRef, Input } from "@angular/core";
import Overlay from 'ol/overlay';
import Proj from 'ol/proj';
import { MapService } from '../../map/map.service';
import {TimeService} from '../../app/time.service';
import {ProcessorsService} from '../processors.service'
import { ProcessorLayer } from '../processor-layer';
@Component({
selector: 'processor-info',
templateUrl: './processor-info.component.html',
styleUrls: ['./processor-info.component.scss']
})
export class ProcessorInfoComponent implements AfterViewInit, OnDestroy {
@ViewChild("container") container: ElementRef;
private popup;
private viewer;
public position = [0,0];
public showTimeseriesButton;
public data = {
status: 'empty',
value: 0
}
constructor(
private mapService: MapService,
private processorLayer: ProcessorLayer,
private processorService: ProcessorsService,
private timeService: TimeService
) {
this.processorService.getTimeSeriesState().subscribe((value)=>{
this.position = value.coordinates;
let activeProcessor = this.processorService.getActiveProcessor();
this.showTimeseriesButton = !value.enabled && activeProcessor && activeProcessor.hasTimeDimension();
});
}
ngAfterViewInit() {
let overlay = new Overlay({
element: this.container.nativeElement,
autoPan: true,
autoPanAnimation: {
duration: 250
},
positioning: 'bottom-left'
});
this.mapService.getViewer().then((viewer) => {
this.viewer = viewer;
viewer.addOverlay(overlay);
viewer.on('singleclick', this.onMapClick, this);
this.container.nativeElement.addEventListener('mousedown', (evt) => {
let down_coord = viewer.getEventCoordinate(evt);
let overlay_coord = overlay.getPosition();
let offset = [overlay_coord[0] - down_coord[0], overlay_coord[1] - down_coord[1]];
let that = this;
let moved = false;
function move(evt) {
let coord = viewer.getEventCoordinate(evt);
if (down_coord[0] == coord[0] && down_coord[1] == coord[1]) {
return;
} | }
moved = true;
}
function end(evt) {
window.removeEventListener('mousemove', move);
window.removeEventListener('mouseup', end);
if (moved)
that.retrieveDataValue(that.position);
}
window.addEventListener('mousemove', move);
window.addEventListener('mouseup', end);
});
});
this.popup = overlay;
this.timeService.getSelectedDate().subscribe((dt)=>{
if (this.popup.getPosition() != null) {
this.retrieveDataValue(this.position);
}
});
}
getGeoCoordinates(coord) {
if (!this.viewer) {
return null;
}
if (this.viewer.getView().getProjection() != 'EPSG:4326') {
coord = Proj.transform(coord, this.viewer.getView().getProjection(), 'EPSG:4326');
}
return coord;
}
retrieveDataValue(coordinate) {
this.data = {
status: 'loading',
value: 0
}
this.processorLayer.getSourceValue(coordinate).then((value)=>{
this.data = {
status: 'ready',
value: value
}
});
this.processorService.setTimeSeriesState({
coordinates: coordinate
});
}
onTimeSeriesClick() {
this.processorService.setTimeSeriesState({
enabled: true
});
}
closePopup() {
this.popup.setPosition(null);
this.processorService.setTimeSeriesState({
enabled: false
});
}
ngOnDestroy() {
this.mapService.getViewer().then((viewer)=>{
viewer.removeOverlay(this.popup);
viewer.un('singleclick', this.onMapClick, this);
});
}
private onMapClick(evt) {
let coordinate = evt.coordinate;
this.position = coordinate;
this.popup.setPosition(this.position);
this.retrieveDataValue(coordinate);
}
} | that.position = [coord[0] + offset[0], coord[1] + offset[1]]
overlay.setPosition(that.position);
that.data = {
status: 'empty',
value: 0 | random_line_split |
processor-info.component.ts | import { Component, AfterViewInit, OnDestroy, ViewChild, ElementRef, Input } from "@angular/core";
import Overlay from 'ol/overlay';
import Proj from 'ol/proj';
import { MapService } from '../../map/map.service';
import {TimeService} from '../../app/time.service';
import {ProcessorsService} from '../processors.service'
import { ProcessorLayer } from '../processor-layer';
@Component({
selector: 'processor-info',
templateUrl: './processor-info.component.html',
styleUrls: ['./processor-info.component.scss']
})
export class ProcessorInfoComponent implements AfterViewInit, OnDestroy {
@ViewChild("container") container: ElementRef;
private popup;
private viewer;
public position = [0,0];
public showTimeseriesButton;
public data = {
status: 'empty',
value: 0
}
constructor(
private mapService: MapService,
private processorLayer: ProcessorLayer,
private processorService: ProcessorsService,
private timeService: TimeService
) {
this.processorService.getTimeSeriesState().subscribe((value)=>{
this.position = value.coordinates;
let activeProcessor = this.processorService.getActiveProcessor();
this.showTimeseriesButton = !value.enabled && activeProcessor && activeProcessor.hasTimeDimension();
});
}
ngAfterViewInit() {
let overlay = new Overlay({
element: this.container.nativeElement,
autoPan: true,
autoPanAnimation: {
duration: 250
},
positioning: 'bottom-left'
});
this.mapService.getViewer().then((viewer) => {
this.viewer = viewer;
viewer.addOverlay(overlay);
viewer.on('singleclick', this.onMapClick, this);
this.container.nativeElement.addEventListener('mousedown', (evt) => {
let down_coord = viewer.getEventCoordinate(evt);
let overlay_coord = overlay.getPosition();
let offset = [overlay_coord[0] - down_coord[0], overlay_coord[1] - down_coord[1]];
let that = this;
let moved = false;
function move(evt) {
let coord = viewer.getEventCoordinate(evt);
if (down_coord[0] == coord[0] && down_coord[1] == coord[1]) {
return;
}
that.position = [coord[0] + offset[0], coord[1] + offset[1]]
overlay.setPosition(that.position);
that.data = {
status: 'empty',
value: 0
}
moved = true;
}
function end(evt) {
window.removeEventListener('mousemove', move);
window.removeEventListener('mouseup', end);
if (moved)
that.retrieveDataValue(that.position);
}
window.addEventListener('mousemove', move);
window.addEventListener('mouseup', end);
});
});
this.popup = overlay;
this.timeService.getSelectedDate().subscribe((dt)=>{
if (this.popup.getPosition() != null) |
});
}
getGeoCoordinates(coord) {
if (!this.viewer) {
return null;
}
if (this.viewer.getView().getProjection() != 'EPSG:4326') {
coord = Proj.transform(coord, this.viewer.getView().getProjection(), 'EPSG:4326');
}
return coord;
}
retrieveDataValue(coordinate) {
this.data = {
status: 'loading',
value: 0
}
this.processorLayer.getSourceValue(coordinate).then((value)=>{
this.data = {
status: 'ready',
value: value
}
});
this.processorService.setTimeSeriesState({
coordinates: coordinate
});
}
onTimeSeriesClick() {
this.processorService.setTimeSeriesState({
enabled: true
});
}
closePopup() {
this.popup.setPosition(null);
this.processorService.setTimeSeriesState({
enabled: false
});
}
ngOnDestroy() {
this.mapService.getViewer().then((viewer)=>{
viewer.removeOverlay(this.popup);
viewer.un('singleclick', this.onMapClick, this);
});
}
private onMapClick(evt) {
let coordinate = evt.coordinate;
this.position = coordinate;
this.popup.setPosition(this.position);
this.retrieveDataValue(coordinate);
}
} | {
this.retrieveDataValue(this.position);
} | conditional_block |
processor-info.component.ts | import { Component, AfterViewInit, OnDestroy, ViewChild, ElementRef, Input } from "@angular/core";
import Overlay from 'ol/overlay';
import Proj from 'ol/proj';
import { MapService } from '../../map/map.service';
import {TimeService} from '../../app/time.service';
import {ProcessorsService} from '../processors.service'
import { ProcessorLayer } from '../processor-layer';
@Component({
selector: 'processor-info',
templateUrl: './processor-info.component.html',
styleUrls: ['./processor-info.component.scss']
})
export class ProcessorInfoComponent implements AfterViewInit, OnDestroy {
@ViewChild("container") container: ElementRef;
private popup;
private viewer;
public position = [0,0];
public showTimeseriesButton;
public data = {
status: 'empty',
value: 0
}
constructor(
private mapService: MapService,
private processorLayer: ProcessorLayer,
private processorService: ProcessorsService,
private timeService: TimeService
) {
this.processorService.getTimeSeriesState().subscribe((value)=>{
this.position = value.coordinates;
let activeProcessor = this.processorService.getActiveProcessor();
this.showTimeseriesButton = !value.enabled && activeProcessor && activeProcessor.hasTimeDimension();
});
}
ngAfterViewInit() {
let overlay = new Overlay({
element: this.container.nativeElement,
autoPan: true,
autoPanAnimation: {
duration: 250
},
positioning: 'bottom-left'
});
this.mapService.getViewer().then((viewer) => {
this.viewer = viewer;
viewer.addOverlay(overlay);
viewer.on('singleclick', this.onMapClick, this);
this.container.nativeElement.addEventListener('mousedown', (evt) => {
let down_coord = viewer.getEventCoordinate(evt);
let overlay_coord = overlay.getPosition();
let offset = [overlay_coord[0] - down_coord[0], overlay_coord[1] - down_coord[1]];
let that = this;
let moved = false;
function move(evt) {
let coord = viewer.getEventCoordinate(evt);
if (down_coord[0] == coord[0] && down_coord[1] == coord[1]) {
return;
}
that.position = [coord[0] + offset[0], coord[1] + offset[1]]
overlay.setPosition(that.position);
that.data = {
status: 'empty',
value: 0
}
moved = true;
}
function end(evt) {
window.removeEventListener('mousemove', move);
window.removeEventListener('mouseup', end);
if (moved)
that.retrieveDataValue(that.position);
}
window.addEventListener('mousemove', move);
window.addEventListener('mouseup', end);
});
});
this.popup = overlay;
this.timeService.getSelectedDate().subscribe((dt)=>{
if (this.popup.getPosition() != null) {
this.retrieveDataValue(this.position);
}
});
}
getGeoCoordinates(coord) |
retrieveDataValue(coordinate) {
this.data = {
status: 'loading',
value: 0
}
this.processorLayer.getSourceValue(coordinate).then((value)=>{
this.data = {
status: 'ready',
value: value
}
});
this.processorService.setTimeSeriesState({
coordinates: coordinate
});
}
onTimeSeriesClick() {
this.processorService.setTimeSeriesState({
enabled: true
});
}
closePopup() {
this.popup.setPosition(null);
this.processorService.setTimeSeriesState({
enabled: false
});
}
ngOnDestroy() {
this.mapService.getViewer().then((viewer)=>{
viewer.removeOverlay(this.popup);
viewer.un('singleclick', this.onMapClick, this);
});
}
private onMapClick(evt) {
let coordinate = evt.coordinate;
this.position = coordinate;
this.popup.setPosition(this.position);
this.retrieveDataValue(coordinate);
}
} | {
if (!this.viewer) {
return null;
}
if (this.viewer.getView().getProjection() != 'EPSG:4326') {
coord = Proj.transform(coord, this.viewer.getView().getProjection(), 'EPSG:4326');
}
return coord;
} | identifier_body |
menu.ts | console.log('##########################################');
var System = (<any>window).System;
import ng = require('angular2/angular2');
import $ = require('jquery');
const css = require('./menu.css');
var hoverOpenMenuList = [];
console.log(ng,ng.Component);
@ng.Component({
selector:'menu'
})
@ng.View({
template:`
<ul>
<ng-content></ng-content>
</ul>
`,styles:[css],encapsulation:ng.ViewEncapsulation.NONE
})
export class Menu{
$element:JQuery;
constructor(private el:ng.ElementRef){
this.$element = $(el.nativeElement);
var showAction = this.$element.attr('show-action');
if(showAction == "parent:hover") |
}
open(){
this.$element.show();
}
close(){
this.$element.hide();
}
static closeAlHoverMenu(){
for(var i in hoverOpenMenuList){
var menu = hoverOpenMenuList[i];
menu.close();
}
}
}
@ng.Component({
selector:'item'
})
@ng.View({
template:`
<li>
<ng-content></ng-content>
</li>
`,
directives:[]
})
export class Item{
$element:JQuery;
constructor(private el:ng.ElementRef){
this.$element = $(el.nativeElement);
}
}
| {
var parent = this.$element.parent();
parent.hover((e)=>{
Menu.closeAlHoverMenu();
var offset = parent.offset();
this.$element.css({
position:'fixed',top:offset.top,left:offset.left + parent[0].scrollWidth
});
this.open();
hoverOpenMenuList.push(this);
},(e)=>{
//this.$element.hide()
})
} | conditional_block |
menu.ts | console.log('##########################################');
var System = (<any>window).System;
import ng = require('angular2/angular2');
import $ = require('jquery');
const css = require('./menu.css');
var hoverOpenMenuList = [];
console.log(ng,ng.Component);
@ng.Component({
selector:'menu'
})
@ng.View({
template:`
<ul>
<ng-content></ng-content>
</ul>
`,styles:[css],encapsulation:ng.ViewEncapsulation.NONE
})
export class Menu{
$element:JQuery;
constructor(private el:ng.ElementRef) |
open(){
this.$element.show();
}
close(){
this.$element.hide();
}
static closeAlHoverMenu(){
for(var i in hoverOpenMenuList){
var menu = hoverOpenMenuList[i];
menu.close();
}
}
}
@ng.Component({
selector:'item'
})
@ng.View({
template:`
<li>
<ng-content></ng-content>
</li>
`,
directives:[]
})
export class Item{
$element:JQuery;
constructor(private el:ng.ElementRef){
this.$element = $(el.nativeElement);
}
}
| {
this.$element = $(el.nativeElement);
var showAction = this.$element.attr('show-action');
if(showAction == "parent:hover"){
var parent = this.$element.parent();
parent.hover((e)=>{
Menu.closeAlHoverMenu();
var offset = parent.offset();
this.$element.css({
position:'fixed',top:offset.top,left:offset.left + parent[0].scrollWidth
});
this.open();
hoverOpenMenuList.push(this);
},(e)=>{
//this.$element.hide()
})
}
} | identifier_body |
menu.ts | console.log('##########################################');
var System = (<any>window).System;
import ng = require('angular2/angular2');
import $ = require('jquery');
const css = require('./menu.css');
var hoverOpenMenuList = [];
console.log(ng,ng.Component);
@ng.Component({
selector:'menu'
})
@ng.View({
template:`
<ul>
<ng-content></ng-content>
</ul>
`,styles:[css],encapsulation:ng.ViewEncapsulation.NONE
})
export class Menu{
$element:JQuery;
constructor(private el:ng.ElementRef){
this.$element = $(el.nativeElement);
var showAction = this.$element.attr('show-action');
if(showAction == "parent:hover"){
var parent = this.$element.parent();
parent.hover((e)=>{
Menu.closeAlHoverMenu();
var offset = parent.offset();
this.$element.css({
position:'fixed',top:offset.top,left:offset.left + parent[0].scrollWidth
});
this.open();
hoverOpenMenuList.push(this);
},(e)=>{
//this.$element.hide()
})
}
}
open(){
this.$element.show();
}
close(){
this.$element.hide();
}
static closeAlHoverMenu(){
for(var i in hoverOpenMenuList){
var menu = hoverOpenMenuList[i];
menu.close();
}
}
}
@ng.Component({
selector:'item'
})
@ng.View({
template:`
<li>
<ng-content></ng-content>
</li>
`,
directives:[]
})
export class | {
$element:JQuery;
constructor(private el:ng.ElementRef){
this.$element = $(el.nativeElement);
}
}
| Item | identifier_name |
menu.ts | console.log('##########################################');
var System = (<any>window).System;
import ng = require('angular2/angular2');
import $ = require('jquery');
const css = require('./menu.css');
var hoverOpenMenuList = [];
console.log(ng,ng.Component);
@ng.Component({
selector:'menu'
})
@ng.View({
template:`
<ul>
<ng-content></ng-content>
</ul>
`,styles:[css],encapsulation:ng.ViewEncapsulation.NONE
})
export class Menu{
$element:JQuery; | if(showAction == "parent:hover"){
var parent = this.$element.parent();
parent.hover((e)=>{
Menu.closeAlHoverMenu();
var offset = parent.offset();
this.$element.css({
position:'fixed',top:offset.top,left:offset.left + parent[0].scrollWidth
});
this.open();
hoverOpenMenuList.push(this);
},(e)=>{
//this.$element.hide()
})
}
}
open(){
this.$element.show();
}
close(){
this.$element.hide();
}
static closeAlHoverMenu(){
for(var i in hoverOpenMenuList){
var menu = hoverOpenMenuList[i];
menu.close();
}
}
}
@ng.Component({
selector:'item'
})
@ng.View({
template:`
<li>
<ng-content></ng-content>
</li>
`,
directives:[]
})
export class Item{
$element:JQuery;
constructor(private el:ng.ElementRef){
this.$element = $(el.nativeElement);
}
} |
constructor(private el:ng.ElementRef){
this.$element = $(el.nativeElement);
var showAction = this.$element.attr('show-action'); | random_line_split |
cms.base.js | /*##################################################|*/
/* #CMS.BASE# */
(function namespacing(CMS) {
CMS.$(document).ready(function ($) {
// assign correct jquery to $ namespace
$ = CMS.$ || $;
// the following is added because IE is stupid
// $.ajax requests in IE8 fail without this hack
// ref: http://stackoverflow.com/questions/4557532/jquery-ajax-requests-failing-in-ie8-with-message-error-this-method-cannot-be-c
$.ajaxSetup({
xhr: function() {
try{
if(window.ActiveXObject)
return new window.ActiveXObject("Microsoft.XMLHTTP");
} catch(e) { }
return new window.XMLHttpRequest();
}
});
/*!
* Adds security methods to api namespace
* @public_methods:
* - CMS.API.Security.csrf();
* @compatibility: IE >= 7, FF >= 3, Safari >= 4, Chrome > =4, Opera >= 10
*/
CMS.API.Security = {
csrf: function () {
$.ajaxSetup({
beforeSend: function (xhr, settings) {
if (typeof(settings.csrfTokenSet) != undefined && settings.csrfTokenSet) {
// CSRF token has already been set elsewhere so we won't touch it.
return true;
}
// get cookies without jquery.cookie.js
function getCookie(name) {
var cookieValue = null;
if(document.cookie && (document.cookie != '')) {
var cookies = document.cookie.split(';');
for (var i = 0; i < cookies.length; i++) {
var cookie = $.trim(cookies[i]);
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) == (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
// do some url checks
var base_doc_url = document.URL.match(/^http[s]{0,1}:\/\/[^\/]+\//)[0];
var base_settings_url = settings.url.match(/^http[s]{0,1}:\/\/[^\/]+\//);
if(base_settings_url != null) {
base_settings_url = base_settings_url[0];
}
if(!(/^http:.*/.test(settings.url) || /^https:.*/.test(settings.url)) || base_doc_url == base_settings_url) {
// Only send the token to relative URLs i.e. locally.
xhr.setRequestHeader("X-CSRFToken", getCookie(csrf_cookie_name));
settings.csrfTokenSet = true;
}
}
});
return 'ready';
}
};
/*!
* Adds helper methods to api namespace
* @public_methods:
* - CMS.API.Helpers.reloadBrowser();
* - CMS.API.Helpers.getUrl(urlString);
* - CMS.API.Helpers.setUrl(urlString, options);
*/
CMS.API.Helpers = {
reloadBrowser: function () {
window.location.reload();
},
getUrl: function(str) {
var o = {
'strictMode': false,
'key': ["source","protocol","authority","userInfo","user","password","host","port","relative","path","directory","file","query","anchor"],
'q': { 'name': 'queryKey', 'parser': /(?:^|&)([^&=]*)=?([^&]*)/g },
'parser': {
'strict': /^(?:([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?((((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/,
'loose': /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/
}
};
var m = o.parser[o.strictMode ? 'strict' : 'loose'].exec(str), uri = {}, i = 14;
while(i--) uri[o.key[i]] = m[i] || ''; |
return uri;
},
setUrl: function (str, options) {
var uri = str;
// now we neet to get the partials of the element
var getUrlObj = this.getUrl(uri);
var query = getUrlObj.queryKey;
var serialized = '';
var index = 0;
// we could loop the query and replace the param at the right place
// but instead of replacing it just append it to the end of the query so its more visible
if(options && options.removeParam) delete query[options.removeParam];
if(options && options.addParam) query[options.addParam.split('=')[0]] = options.addParam.split('=')[1];
$.each(query, function (key, value) {
// add &
if(index != 0) serialized += '&';
// if a value is given attach it
serialized += (value) ? (key + '=' + value) : (key);
index++;
});
// check if we should add the questionmark
var addition = (serialized === '') ? '' : '?';
var anchor = (getUrlObj.anchor) ? '#' + getUrlObj.anchor : '';
uri = getUrlObj.protocol + '://' + getUrlObj.authority + getUrlObj.directory + getUrlObj.file + addition + serialized + anchor;
return uri;
}
};
});
})(window.CMS); |
uri[o.q.name] = {};
uri[o.key[12]].replace(o.q.parser, function ($0, $1, $2) {
if($1) { uri[o.q.name][$1] = $2; }
}); | random_line_split |
cms.base.js | /*##################################################|*/
/* #CMS.BASE# */
(function namespacing(CMS) {
CMS.$(document).ready(function ($) {
// assign correct jquery to $ namespace
$ = CMS.$ || $;
// the following is added because IE is stupid
// $.ajax requests in IE8 fail without this hack
// ref: http://stackoverflow.com/questions/4557532/jquery-ajax-requests-failing-in-ie8-with-message-error-this-method-cannot-be-c
$.ajaxSetup({
xhr: function() {
try{
if(window.ActiveXObject)
return new window.ActiveXObject("Microsoft.XMLHTTP");
} catch(e) { }
return new window.XMLHttpRequest();
}
});
/*!
* Adds security methods to api namespace
* @public_methods:
* - CMS.API.Security.csrf();
* @compatibility: IE >= 7, FF >= 3, Safari >= 4, Chrome > =4, Opera >= 10
*/
CMS.API.Security = {
csrf: function () {
$.ajaxSetup({
beforeSend: function (xhr, settings) {
if (typeof(settings.csrfTokenSet) != undefined && settings.csrfTokenSet) {
// CSRF token has already been set elsewhere so we won't touch it.
return true;
}
// get cookies without jquery.cookie.js
function getCookie(name) |
// do some url checks
var base_doc_url = document.URL.match(/^http[s]{0,1}:\/\/[^\/]+\//)[0];
var base_settings_url = settings.url.match(/^http[s]{0,1}:\/\/[^\/]+\//);
if(base_settings_url != null) {
base_settings_url = base_settings_url[0];
}
if(!(/^http:.*/.test(settings.url) || /^https:.*/.test(settings.url)) || base_doc_url == base_settings_url) {
// Only send the token to relative URLs i.e. locally.
xhr.setRequestHeader("X-CSRFToken", getCookie(csrf_cookie_name));
settings.csrfTokenSet = true;
}
}
});
return 'ready';
}
};
/*!
* Adds helper methods to api namespace
* @public_methods:
* - CMS.API.Helpers.reloadBrowser();
* - CMS.API.Helpers.getUrl(urlString);
* - CMS.API.Helpers.setUrl(urlString, options);
*/
CMS.API.Helpers = {
reloadBrowser: function () {
window.location.reload();
},
getUrl: function(str) {
var o = {
'strictMode': false,
'key': ["source","protocol","authority","userInfo","user","password","host","port","relative","path","directory","file","query","anchor"],
'q': { 'name': 'queryKey', 'parser': /(?:^|&)([^&=]*)=?([^&]*)/g },
'parser': {
'strict': /^(?:([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?((((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/,
'loose': /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/
}
};
var m = o.parser[o.strictMode ? 'strict' : 'loose'].exec(str), uri = {}, i = 14;
while(i--) uri[o.key[i]] = m[i] || '';
uri[o.q.name] = {};
uri[o.key[12]].replace(o.q.parser, function ($0, $1, $2) {
if($1) { uri[o.q.name][$1] = $2; }
});
return uri;
},
setUrl: function (str, options) {
var uri = str;
// now we neet to get the partials of the element
var getUrlObj = this.getUrl(uri);
var query = getUrlObj.queryKey;
var serialized = '';
var index = 0;
// we could loop the query and replace the param at the right place
// but instead of replacing it just append it to the end of the query so its more visible
if(options && options.removeParam) delete query[options.removeParam];
if(options && options.addParam) query[options.addParam.split('=')[0]] = options.addParam.split('=')[1];
$.each(query, function (key, value) {
// add &
if(index != 0) serialized += '&';
// if a value is given attach it
serialized += (value) ? (key + '=' + value) : (key);
index++;
});
// check if we should add the questionmark
var addition = (serialized === '') ? '' : '?';
var anchor = (getUrlObj.anchor) ? '#' + getUrlObj.anchor : '';
uri = getUrlObj.protocol + '://' + getUrlObj.authority + getUrlObj.directory + getUrlObj.file + addition + serialized + anchor;
return uri;
}
};
});
})(window.CMS);
| {
var cookieValue = null;
if(document.cookie && (document.cookie != '')) {
var cookies = document.cookie.split(';');
for (var i = 0; i < cookies.length; i++) {
var cookie = $.trim(cookies[i]);
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) == (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
} | identifier_body |
cms.base.js | /*##################################################|*/
/* #CMS.BASE# */
(function namespacing(CMS) {
CMS.$(document).ready(function ($) {
// assign correct jquery to $ namespace
$ = CMS.$ || $;
// the following is added because IE is stupid
// $.ajax requests in IE8 fail without this hack
// ref: http://stackoverflow.com/questions/4557532/jquery-ajax-requests-failing-in-ie8-with-message-error-this-method-cannot-be-c
$.ajaxSetup({
xhr: function() {
try{
if(window.ActiveXObject)
return new window.ActiveXObject("Microsoft.XMLHTTP");
} catch(e) { }
return new window.XMLHttpRequest();
}
});
/*!
* Adds security methods to api namespace
* @public_methods:
* - CMS.API.Security.csrf();
* @compatibility: IE >= 7, FF >= 3, Safari >= 4, Chrome > =4, Opera >= 10
*/
CMS.API.Security = {
csrf: function () {
$.ajaxSetup({
beforeSend: function (xhr, settings) {
if (typeof(settings.csrfTokenSet) != undefined && settings.csrfTokenSet) {
// CSRF token has already been set elsewhere so we won't touch it.
return true;
}
// get cookies without jquery.cookie.js
function | (name) {
var cookieValue = null;
if(document.cookie && (document.cookie != '')) {
var cookies = document.cookie.split(';');
for (var i = 0; i < cookies.length; i++) {
var cookie = $.trim(cookies[i]);
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) == (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
// do some url checks
var base_doc_url = document.URL.match(/^http[s]{0,1}:\/\/[^\/]+\//)[0];
var base_settings_url = settings.url.match(/^http[s]{0,1}:\/\/[^\/]+\//);
if(base_settings_url != null) {
base_settings_url = base_settings_url[0];
}
if(!(/^http:.*/.test(settings.url) || /^https:.*/.test(settings.url)) || base_doc_url == base_settings_url) {
// Only send the token to relative URLs i.e. locally.
xhr.setRequestHeader("X-CSRFToken", getCookie(csrf_cookie_name));
settings.csrfTokenSet = true;
}
}
});
return 'ready';
}
};
/*!
* Adds helper methods to api namespace
* @public_methods:
* - CMS.API.Helpers.reloadBrowser();
* - CMS.API.Helpers.getUrl(urlString);
* - CMS.API.Helpers.setUrl(urlString, options);
*/
CMS.API.Helpers = {
reloadBrowser: function () {
window.location.reload();
},
getUrl: function(str) {
var o = {
'strictMode': false,
'key': ["source","protocol","authority","userInfo","user","password","host","port","relative","path","directory","file","query","anchor"],
'q': { 'name': 'queryKey', 'parser': /(?:^|&)([^&=]*)=?([^&]*)/g },
'parser': {
'strict': /^(?:([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?((((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/,
'loose': /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/
}
};
var m = o.parser[o.strictMode ? 'strict' : 'loose'].exec(str), uri = {}, i = 14;
while(i--) uri[o.key[i]] = m[i] || '';
uri[o.q.name] = {};
uri[o.key[12]].replace(o.q.parser, function ($0, $1, $2) {
if($1) { uri[o.q.name][$1] = $2; }
});
return uri;
},
setUrl: function (str, options) {
var uri = str;
// now we neet to get the partials of the element
var getUrlObj = this.getUrl(uri);
var query = getUrlObj.queryKey;
var serialized = '';
var index = 0;
// we could loop the query and replace the param at the right place
// but instead of replacing it just append it to the end of the query so its more visible
if(options && options.removeParam) delete query[options.removeParam];
if(options && options.addParam) query[options.addParam.split('=')[0]] = options.addParam.split('=')[1];
$.each(query, function (key, value) {
// add &
if(index != 0) serialized += '&';
// if a value is given attach it
serialized += (value) ? (key + '=' + value) : (key);
index++;
});
// check if we should add the questionmark
var addition = (serialized === '') ? '' : '?';
var anchor = (getUrlObj.anchor) ? '#' + getUrlObj.anchor : '';
uri = getUrlObj.protocol + '://' + getUrlObj.authority + getUrlObj.directory + getUrlObj.file + addition + serialized + anchor;
return uri;
}
};
});
})(window.CMS);
| getCookie | identifier_name |
custom.js | // slideshow settings
$(document).ready(function() {
$('.slideshow').cycle({
fx: 'fade' // transition type : fade, scrollUp, shuffle, etc...
});
});
$(document).ready( function() {
Cufon.replace('.footer-one-third h2', { fontFamily: 'ColaborateLight', fontSize: '20px', color:'#cdb380' } );
Cufon.replace('.footer-one-third h3', { fontFamily: 'ColaborateLight', fontSize: '20px', color:'#cdb380' } );
});
$(document).ready( function() {
Cufon.replace('#content h1', { fontFamily: 'ColaborateLight', fontSize: '40px', color:'#000000' } );
Cufon.replace('#content h2', { fontFamily: 'ColaborateLight', fontSize: '22px', color:'#000000' } );
Cufon.replace('#content h3', { fontFamily: 'ColaborateLight', fontSize: '18px', color:'#000000' } );
Cufon.replace('h3.post-title', { fontFamily: 'ColaborateLight', fontSize: '30px', color:'#000000' } );
Cufon.replace('h2.date-header', { fontFamily: 'ColaborateLight', fontSize: '10px', color:'#000000' } );
$('.rounded').corner(); |
}); |
$('#sidebar .widget').corner(); | random_line_split |
eventlist.component.ts | import {Component, EventEmitter, OnInit, TemplateRef, ViewEncapsulation} from '@angular/core';
import {routerTransition} from '../../router.animations';
import { CalendarService } from '../calendar.service'; | @Component({
selector: 'app-eventlist',
templateUrl: './eventlist.component.html',
styleUrls: ['./eventlist.component.scss'],
animations: [routerTransition()],
host: {'[@routerTransition]': ''}
})
export class EventlistComponent implements OnInit {
viewDate: Date = new Date();
days : Array<Eventdate>;
image;
constructor(private http: Http, public calser: CalendarService) { }
ngOnInit() {
let year = '' + this.viewDate.getFullYear();
let month = '' + (this.viewDate.getMonth() + 1);
this.calser.getMonthData(year, month).then((json: Array<Eventdate>) =>{
this.days = json;
});
this.http.get(environment.API_ENDPOINT+'pageimage')
.map((res: Response) => res.json()).subscribe((json: Object) =>{
this.image = json[0]['main_image']['image'];
});
}
reload(){
let year = '' + this.viewDate.getFullYear();
let month = '' + (this.viewDate.getMonth() + 1);
this.calser.getMonthData(year, month).then((json: Array<Eventdate>) =>{
this.days = json;
});
}
previous() {
this.viewDate.setMonth(this.viewDate.getMonth() - 1);
this.reload();
}
next() {
this.viewDate.setMonth(this.viewDate.getMonth() + 1);
this.reload();
}
} | import { Eventdate, Windy } from '../eventdate';
import 'rxjs/Rx';
import {environment} from "../../../environments/environment";
import { Http, Response } from '@angular/http';
| random_line_split |
eventlist.component.ts | import {Component, EventEmitter, OnInit, TemplateRef, ViewEncapsulation} from '@angular/core';
import {routerTransition} from '../../router.animations';
import { CalendarService } from '../calendar.service';
import { Eventdate, Windy } from '../eventdate';
import 'rxjs/Rx';
import {environment} from "../../../environments/environment";
import { Http, Response } from '@angular/http';
@Component({
selector: 'app-eventlist',
templateUrl: './eventlist.component.html',
styleUrls: ['./eventlist.component.scss'],
animations: [routerTransition()],
host: {'[@routerTransition]': ''}
})
export class EventlistComponent implements OnInit {
viewDate: Date = new Date();
days : Array<Eventdate>;
image;
constructor(private http: Http, public calser: CalendarService) { }
ngOnInit() {
let year = '' + this.viewDate.getFullYear();
let month = '' + (this.viewDate.getMonth() + 1);
this.calser.getMonthData(year, month).then((json: Array<Eventdate>) =>{
this.days = json;
});
this.http.get(environment.API_ENDPOINT+'pageimage')
.map((res: Response) => res.json()).subscribe((json: Object) =>{
this.image = json[0]['main_image']['image'];
});
}
reload(){
let year = '' + this.viewDate.getFullYear();
let month = '' + (this.viewDate.getMonth() + 1);
this.calser.getMonthData(year, month).then((json: Array<Eventdate>) =>{
this.days = json;
});
}
previous() {
this.viewDate.setMonth(this.viewDate.getMonth() - 1);
this.reload();
}
next() |
}
| {
this.viewDate.setMonth(this.viewDate.getMonth() + 1);
this.reload();
} | identifier_body |
eventlist.component.ts | import {Component, EventEmitter, OnInit, TemplateRef, ViewEncapsulation} from '@angular/core';
import {routerTransition} from '../../router.animations';
import { CalendarService } from '../calendar.service';
import { Eventdate, Windy } from '../eventdate';
import 'rxjs/Rx';
import {environment} from "../../../environments/environment";
import { Http, Response } from '@angular/http';
@Component({
selector: 'app-eventlist',
templateUrl: './eventlist.component.html',
styleUrls: ['./eventlist.component.scss'],
animations: [routerTransition()],
host: {'[@routerTransition]': ''}
})
export class EventlistComponent implements OnInit {
viewDate: Date = new Date();
days : Array<Eventdate>;
image;
constructor(private http: Http, public calser: CalendarService) { }
ngOnInit() {
let year = '' + this.viewDate.getFullYear();
let month = '' + (this.viewDate.getMonth() + 1);
this.calser.getMonthData(year, month).then((json: Array<Eventdate>) =>{
this.days = json;
});
this.http.get(environment.API_ENDPOINT+'pageimage')
.map((res: Response) => res.json()).subscribe((json: Object) =>{
this.image = json[0]['main_image']['image'];
});
}
reload(){
let year = '' + this.viewDate.getFullYear();
let month = '' + (this.viewDate.getMonth() + 1);
this.calser.getMonthData(year, month).then((json: Array<Eventdate>) =>{
this.days = json;
});
}
previous() {
this.viewDate.setMonth(this.viewDate.getMonth() - 1);
this.reload();
}
| () {
this.viewDate.setMonth(this.viewDate.getMonth() + 1);
this.reload();
}
}
| next | identifier_name |
vmem_serialize.rs | // Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
use crate::otp::lc_state::LcSecded;
use crate::util::present::Present;
use std::collections::HashMap;
use std::convert::TryInto;
use std::fmt::Write;
use anyhow::{anyhow, bail, ensure, Result};
use zerocopy::AsBytes;
enum ItemType {
Bytes(Vec<u8>),
Unvalued(usize),
}
/// The hex representation of an OTP item.
pub struct VmemItem {
value: ItemType,
offset: usize,
name: String,
}
impl VmemItem {
pub fn new(bytes: Vec<u8>, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Bytes(bytes),
offset,
name,
}
}
pub fn new_unvalued(size: usize, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Unvalued(size),
offset,
name,
}
}
pub fn size(&self) -> usize {
match &self.value {
ItemType::Bytes(b) => b.len(),
ItemType::Unvalued(size) => *size,
}
}
}
pub type DigestIV = u64;
pub type DigestCnst = u128;
/// Digest information for an OTP partition.
#[derive(PartialEq)]
pub enum DigestType {
Unlocked,
Software,
Hardware(DigestIV, DigestCnst),
}
/// The hex representation of an OTP partition.
pub struct VmemPartition {
/// Items associated with this partition.
items: Vec<VmemItem>,
/// The name of this partition.
/// Used in annotations.
name: String,
/// The type of digest used for this partition.
/// For software digests, the value of the digest is provided and appended to the list of
/// items. For hardware digests, we must compute the digest value and append to the list of
/// items.
digest_type: DigestType,
/// Partition size.
size: usize,
/// The key name for this parition.
/// If specified, the serializer will attempt to scramble this parition using the key named in
/// this field.
key_name: Option<String>,
}
impl VmemPartition {
pub fn new(
name: String,
size: usize,
digest_type: DigestType,
key_name: Option<String>,
) -> VmemPartition {
VmemPartition {
items: Vec::new(),
name,
digest_type,
size,
key_name,
}
}
/// Set the size of the partition.
///
/// For partitions that don't specify their size, this is used to set the size of the partition
/// including the digest.
pub fn set_size(&mut self, size: usize) {
self.size = size;
}
/// Add an item to this partition.
pub fn push_item(&mut self, item: VmemItem) {
self.items.push(item);
}
/// Produces a tuple containing OTP HEX lines with annotations.
fn write_to_buffer(&self, keys: &HashMap<String, Vec<u8>>) -> Result<(Vec<u8>, Vec<String>)> {
if self.size % 8 != 0 {
bail!("Partition {} must be 64-bit alligned", self.name);
}
let mut defined = vec![false; self.size];
let mut annotations: Vec<String> = vec!["unallocated".to_owned(); self.size];
let mut data_bytes: Vec<u8> = vec![0; self.size];
for item in &self.items {
let end = item.offset + item.size();
annotations[item.offset..end].fill(format!("{}: {}", self.name, item.name).to_string());
let defined = &mut defined[item.offset..end];
if let Some(collision) = defined.iter().position(|defined| *defined) {
bail!(
"Unexpected item collision with item {} at 0x{:x}",
item.name,
collision
);
}
defined.fill(true);
if let ItemType::Bytes(bytes) = &item.value {
data_bytes[item.offset..end].copy_from_slice(bytes);
}
}
let mut data_blocks = Vec::<u64>::new();
let mut data_blocks_defined = Vec::<bool>::new();
for (k, chunk) in data_bytes.chunks(8).enumerate() {
data_blocks.push(u64::from_le_bytes(chunk.try_into().unwrap()));
let byte_offset = k * 8;
data_blocks_defined.push(
defined[byte_offset..byte_offset + 8]
.iter()
.fold(false, |a, &b| a || b),
);
}
if let Some(key_name) = &self.key_name {
let key = keys
.get(key_name)
.ok_or_else(|| anyhow!("Key not found {}", key_name))?;
let cipher = Present::try_new(key.clone())?;
for i in 0..data_blocks.len() {
if data_blocks_defined[i] {
data_blocks[i] = cipher.encrypt_block(data_blocks[i]);
}
}
}
if let DigestType::Hardware(iv, fin_const) = self.digest_type {
ensure!(
matches!(data_blocks.last(), None | Some(0)),
"Digest of partition {} cannot be overridden manually",
self.name
);
let last = data_blocks.len() - 1;
data_blocks[last] = present_digest_64(&data_blocks[0..last], iv, fin_const);
}
let data = data_blocks.as_bytes().to_vec();
if data.len() != self.size {
Err(anyhow!("Partition {} size mismatch", self.name))
} else {
Ok((data, annotations))
}
}
}
pub struct VmemImage {
partitions: Vec<VmemPartition>,
width: usize,
depth: usize,
}
impl VmemImage {
pub fn new(partitions: Vec<VmemPartition>, width: usize, depth: usize) -> VmemImage {
VmemImage {
partitions,
width,
depth,
}
}
pub fn generate(
&self,
keys: HashMap<String, Vec<u8>>,
secded: &LcSecded,
) -> Result<Vec<String>> {
let mut data: Vec<u8> = vec![0; self.width * self.depth];
let mut annotations: Vec<String> = vec![Default::default(); data.len()];
let mut offset = 0;
for partition in &self.partitions {
let (part_data, part_annotation) = partition.write_to_buffer(&keys)?;
let end = offset + partition.size;
if end > data.len() {
bail!(
"Partition {} out of bounds, ends at 0x{:x}",
partition.name,
end
);
}
data[offset..end].clone_from_slice(&part_data);
annotations[offset..end].clone_from_slice(&part_annotation);
offset += partition.size;
}
let width_ecc = self.width + secded.ecc_byte_len();
let num_words = data.len() / self.width;
let mut output = vec![format!(
"// OTP memory hexfile with {} x {}bit layout",
self.depth,
width_ecc * 8
)];
for i in 0..num_words {
let mut word = Vec::<u8>::new();
let mut word_annotation = Vec::<String>::new();
for j in 0..self.width {
let idx = i * self.width + j;
word.push(data[idx]);
if !word_annotation.contains(&annotations[idx]) |
}
let word_with_ecc = secded.ecc_encode(word)?;
let mut word_str = String::new();
for byte in word_with_ecc.iter().rev() {
write!(word_str, "{:02x}", byte)?;
}
output.push(format!(
"{} // {:06x}: {}",
word_str,
i * self.width,
word_annotation.join(", ")
));
}
Ok(output)
}
}
fn present_digest_64(message: &[u64], iv: DigestIV, fin_const: DigestCnst) -> u64 {
let mut state = iv;
for i in (0..message.len() + 2).step_by(2) {
let b128: [u8; 16] = if i + 1 < message.len() {
(message[i] as u128) << 64 | message[i + 1] as u128
} else if i < message.len() {
(message[i] as u128) << 64 | message[i] as u128
} else {
fin_const
}
.as_bytes()
.try_into()
.unwrap();
let cipher = Present::new_128(&b128);
state ^= cipher.encrypt_block(state);
}
state
}
| {
word_annotation.push(annotations[idx].clone());
} | conditional_block |
vmem_serialize.rs | // Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
use crate::otp::lc_state::LcSecded;
use crate::util::present::Present;
use std::collections::HashMap;
use std::convert::TryInto;
use std::fmt::Write;
use anyhow::{anyhow, bail, ensure, Result};
use zerocopy::AsBytes;
enum ItemType {
Bytes(Vec<u8>),
Unvalued(usize),
}
/// The hex representation of an OTP item.
pub struct VmemItem {
value: ItemType,
offset: usize,
name: String,
}
impl VmemItem {
pub fn new(bytes: Vec<u8>, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Bytes(bytes),
offset,
name,
}
}
pub fn new_unvalued(size: usize, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Unvalued(size),
offset,
name,
}
}
pub fn size(&self) -> usize {
match &self.value {
ItemType::Bytes(b) => b.len(),
ItemType::Unvalued(size) => *size,
}
}
}
pub type DigestIV = u64;
pub type DigestCnst = u128;
/// Digest information for an OTP partition.
#[derive(PartialEq)]
pub enum DigestType {
Unlocked,
Software,
Hardware(DigestIV, DigestCnst),
}
/// The hex representation of an OTP partition.
pub struct VmemPartition {
/// Items associated with this partition.
items: Vec<VmemItem>,
/// The name of this partition.
/// Used in annotations.
name: String,
/// The type of digest used for this partition.
/// For software digests, the value of the digest is provided and appended to the list of
/// items. For hardware digests, we must compute the digest value and append to the list of
/// items.
digest_type: DigestType,
/// Partition size.
size: usize,
/// The key name for this parition.
/// If specified, the serializer will attempt to scramble this parition using the key named in
/// this field.
key_name: Option<String>,
}
impl VmemPartition {
pub fn new(
name: String,
size: usize,
digest_type: DigestType,
key_name: Option<String>,
) -> VmemPartition {
VmemPartition {
items: Vec::new(),
name,
digest_type,
size,
key_name,
}
}
/// Set the size of the partition.
///
/// For partitions that don't specify their size, this is used to set the size of the partition
/// including the digest.
pub fn set_size(&mut self, size: usize) |
/// Add an item to this partition.
pub fn push_item(&mut self, item: VmemItem) {
self.items.push(item);
}
/// Produces a tuple containing OTP HEX lines with annotations.
fn write_to_buffer(&self, keys: &HashMap<String, Vec<u8>>) -> Result<(Vec<u8>, Vec<String>)> {
if self.size % 8 != 0 {
bail!("Partition {} must be 64-bit alligned", self.name);
}
let mut defined = vec![false; self.size];
let mut annotations: Vec<String> = vec!["unallocated".to_owned(); self.size];
let mut data_bytes: Vec<u8> = vec![0; self.size];
for item in &self.items {
let end = item.offset + item.size();
annotations[item.offset..end].fill(format!("{}: {}", self.name, item.name).to_string());
let defined = &mut defined[item.offset..end];
if let Some(collision) = defined.iter().position(|defined| *defined) {
bail!(
"Unexpected item collision with item {} at 0x{:x}",
item.name,
collision
);
}
defined.fill(true);
if let ItemType::Bytes(bytes) = &item.value {
data_bytes[item.offset..end].copy_from_slice(bytes);
}
}
let mut data_blocks = Vec::<u64>::new();
let mut data_blocks_defined = Vec::<bool>::new();
for (k, chunk) in data_bytes.chunks(8).enumerate() {
data_blocks.push(u64::from_le_bytes(chunk.try_into().unwrap()));
let byte_offset = k * 8;
data_blocks_defined.push(
defined[byte_offset..byte_offset + 8]
.iter()
.fold(false, |a, &b| a || b),
);
}
if let Some(key_name) = &self.key_name {
let key = keys
.get(key_name)
.ok_or_else(|| anyhow!("Key not found {}", key_name))?;
let cipher = Present::try_new(key.clone())?;
for i in 0..data_blocks.len() {
if data_blocks_defined[i] {
data_blocks[i] = cipher.encrypt_block(data_blocks[i]);
}
}
}
if let DigestType::Hardware(iv, fin_const) = self.digest_type {
ensure!(
matches!(data_blocks.last(), None | Some(0)),
"Digest of partition {} cannot be overridden manually",
self.name
);
let last = data_blocks.len() - 1;
data_blocks[last] = present_digest_64(&data_blocks[0..last], iv, fin_const);
}
let data = data_blocks.as_bytes().to_vec();
if data.len() != self.size {
Err(anyhow!("Partition {} size mismatch", self.name))
} else {
Ok((data, annotations))
}
}
}
pub struct VmemImage {
partitions: Vec<VmemPartition>,
width: usize,
depth: usize,
}
impl VmemImage {
pub fn new(partitions: Vec<VmemPartition>, width: usize, depth: usize) -> VmemImage {
VmemImage {
partitions,
width,
depth,
}
}
pub fn generate(
&self,
keys: HashMap<String, Vec<u8>>,
secded: &LcSecded,
) -> Result<Vec<String>> {
let mut data: Vec<u8> = vec![0; self.width * self.depth];
let mut annotations: Vec<String> = vec![Default::default(); data.len()];
let mut offset = 0;
for partition in &self.partitions {
let (part_data, part_annotation) = partition.write_to_buffer(&keys)?;
let end = offset + partition.size;
if end > data.len() {
bail!(
"Partition {} out of bounds, ends at 0x{:x}",
partition.name,
end
);
}
data[offset..end].clone_from_slice(&part_data);
annotations[offset..end].clone_from_slice(&part_annotation);
offset += partition.size;
}
let width_ecc = self.width + secded.ecc_byte_len();
let num_words = data.len() / self.width;
let mut output = vec![format!(
"// OTP memory hexfile with {} x {}bit layout",
self.depth,
width_ecc * 8
)];
for i in 0..num_words {
let mut word = Vec::<u8>::new();
let mut word_annotation = Vec::<String>::new();
for j in 0..self.width {
let idx = i * self.width + j;
word.push(data[idx]);
if !word_annotation.contains(&annotations[idx]) {
word_annotation.push(annotations[idx].clone());
}
}
let word_with_ecc = secded.ecc_encode(word)?;
let mut word_str = String::new();
for byte in word_with_ecc.iter().rev() {
write!(word_str, "{:02x}", byte)?;
}
output.push(format!(
"{} // {:06x}: {}",
word_str,
i * self.width,
word_annotation.join(", ")
));
}
Ok(output)
}
}
fn present_digest_64(message: &[u64], iv: DigestIV, fin_const: DigestCnst) -> u64 {
let mut state = iv;
for i in (0..message.len() + 2).step_by(2) {
let b128: [u8; 16] = if i + 1 < message.len() {
(message[i] as u128) << 64 | message[i + 1] as u128
} else if i < message.len() {
(message[i] as u128) << 64 | message[i] as u128
} else {
fin_const
}
.as_bytes()
.try_into()
.unwrap();
let cipher = Present::new_128(&b128);
state ^= cipher.encrypt_block(state);
}
state
}
| {
self.size = size;
} | identifier_body |
vmem_serialize.rs | // Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details. | // SPDX-License-Identifier: Apache-2.0
use crate::otp::lc_state::LcSecded;
use crate::util::present::Present;
use std::collections::HashMap;
use std::convert::TryInto;
use std::fmt::Write;
use anyhow::{anyhow, bail, ensure, Result};
use zerocopy::AsBytes;
enum ItemType {
Bytes(Vec<u8>),
Unvalued(usize),
}
/// The hex representation of an OTP item.
pub struct VmemItem {
value: ItemType,
offset: usize,
name: String,
}
impl VmemItem {
pub fn new(bytes: Vec<u8>, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Bytes(bytes),
offset,
name,
}
}
pub fn new_unvalued(size: usize, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Unvalued(size),
offset,
name,
}
}
pub fn size(&self) -> usize {
match &self.value {
ItemType::Bytes(b) => b.len(),
ItemType::Unvalued(size) => *size,
}
}
}
pub type DigestIV = u64;
pub type DigestCnst = u128;
/// Digest information for an OTP partition.
#[derive(PartialEq)]
pub enum DigestType {
Unlocked,
Software,
Hardware(DigestIV, DigestCnst),
}
/// The hex representation of an OTP partition.
pub struct VmemPartition {
/// Items associated with this partition.
items: Vec<VmemItem>,
/// The name of this partition.
/// Used in annotations.
name: String,
/// The type of digest used for this partition.
/// For software digests, the value of the digest is provided and appended to the list of
/// items. For hardware digests, we must compute the digest value and append to the list of
/// items.
digest_type: DigestType,
/// Partition size.
size: usize,
/// The key name for this parition.
/// If specified, the serializer will attempt to scramble this parition using the key named in
/// this field.
key_name: Option<String>,
}
impl VmemPartition {
pub fn new(
name: String,
size: usize,
digest_type: DigestType,
key_name: Option<String>,
) -> VmemPartition {
VmemPartition {
items: Vec::new(),
name,
digest_type,
size,
key_name,
}
}
/// Set the size of the partition.
///
/// For partitions that don't specify their size, this is used to set the size of the partition
/// including the digest.
pub fn set_size(&mut self, size: usize) {
self.size = size;
}
/// Add an item to this partition.
pub fn push_item(&mut self, item: VmemItem) {
self.items.push(item);
}
/// Produces a tuple containing OTP HEX lines with annotations.
fn write_to_buffer(&self, keys: &HashMap<String, Vec<u8>>) -> Result<(Vec<u8>, Vec<String>)> {
if self.size % 8 != 0 {
bail!("Partition {} must be 64-bit alligned", self.name);
}
let mut defined = vec![false; self.size];
let mut annotations: Vec<String> = vec!["unallocated".to_owned(); self.size];
let mut data_bytes: Vec<u8> = vec![0; self.size];
for item in &self.items {
let end = item.offset + item.size();
annotations[item.offset..end].fill(format!("{}: {}", self.name, item.name).to_string());
let defined = &mut defined[item.offset..end];
if let Some(collision) = defined.iter().position(|defined| *defined) {
bail!(
"Unexpected item collision with item {} at 0x{:x}",
item.name,
collision
);
}
defined.fill(true);
if let ItemType::Bytes(bytes) = &item.value {
data_bytes[item.offset..end].copy_from_slice(bytes);
}
}
let mut data_blocks = Vec::<u64>::new();
let mut data_blocks_defined = Vec::<bool>::new();
for (k, chunk) in data_bytes.chunks(8).enumerate() {
data_blocks.push(u64::from_le_bytes(chunk.try_into().unwrap()));
let byte_offset = k * 8;
data_blocks_defined.push(
defined[byte_offset..byte_offset + 8]
.iter()
.fold(false, |a, &b| a || b),
);
}
if let Some(key_name) = &self.key_name {
let key = keys
.get(key_name)
.ok_or_else(|| anyhow!("Key not found {}", key_name))?;
let cipher = Present::try_new(key.clone())?;
for i in 0..data_blocks.len() {
if data_blocks_defined[i] {
data_blocks[i] = cipher.encrypt_block(data_blocks[i]);
}
}
}
if let DigestType::Hardware(iv, fin_const) = self.digest_type {
ensure!(
matches!(data_blocks.last(), None | Some(0)),
"Digest of partition {} cannot be overridden manually",
self.name
);
let last = data_blocks.len() - 1;
data_blocks[last] = present_digest_64(&data_blocks[0..last], iv, fin_const);
}
let data = data_blocks.as_bytes().to_vec();
if data.len() != self.size {
Err(anyhow!("Partition {} size mismatch", self.name))
} else {
Ok((data, annotations))
}
}
}
pub struct VmemImage {
partitions: Vec<VmemPartition>,
width: usize,
depth: usize,
}
impl VmemImage {
pub fn new(partitions: Vec<VmemPartition>, width: usize, depth: usize) -> VmemImage {
VmemImage {
partitions,
width,
depth,
}
}
pub fn generate(
&self,
keys: HashMap<String, Vec<u8>>,
secded: &LcSecded,
) -> Result<Vec<String>> {
let mut data: Vec<u8> = vec![0; self.width * self.depth];
let mut annotations: Vec<String> = vec![Default::default(); data.len()];
let mut offset = 0;
for partition in &self.partitions {
let (part_data, part_annotation) = partition.write_to_buffer(&keys)?;
let end = offset + partition.size;
if end > data.len() {
bail!(
"Partition {} out of bounds, ends at 0x{:x}",
partition.name,
end
);
}
data[offset..end].clone_from_slice(&part_data);
annotations[offset..end].clone_from_slice(&part_annotation);
offset += partition.size;
}
let width_ecc = self.width + secded.ecc_byte_len();
let num_words = data.len() / self.width;
let mut output = vec![format!(
"// OTP memory hexfile with {} x {}bit layout",
self.depth,
width_ecc * 8
)];
for i in 0..num_words {
let mut word = Vec::<u8>::new();
let mut word_annotation = Vec::<String>::new();
for j in 0..self.width {
let idx = i * self.width + j;
word.push(data[idx]);
if !word_annotation.contains(&annotations[idx]) {
word_annotation.push(annotations[idx].clone());
}
}
let word_with_ecc = secded.ecc_encode(word)?;
let mut word_str = String::new();
for byte in word_with_ecc.iter().rev() {
write!(word_str, "{:02x}", byte)?;
}
output.push(format!(
"{} // {:06x}: {}",
word_str,
i * self.width,
word_annotation.join(", ")
));
}
Ok(output)
}
}
fn present_digest_64(message: &[u64], iv: DigestIV, fin_const: DigestCnst) -> u64 {
let mut state = iv;
for i in (0..message.len() + 2).step_by(2) {
let b128: [u8; 16] = if i + 1 < message.len() {
(message[i] as u128) << 64 | message[i + 1] as u128
} else if i < message.len() {
(message[i] as u128) << 64 | message[i] as u128
} else {
fin_const
}
.as_bytes()
.try_into()
.unwrap();
let cipher = Present::new_128(&b128);
state ^= cipher.encrypt_block(state);
}
state
} | random_line_split |
|
vmem_serialize.rs | // Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
use crate::otp::lc_state::LcSecded;
use crate::util::present::Present;
use std::collections::HashMap;
use std::convert::TryInto;
use std::fmt::Write;
use anyhow::{anyhow, bail, ensure, Result};
use zerocopy::AsBytes;
enum ItemType {
Bytes(Vec<u8>),
Unvalued(usize),
}
/// The hex representation of an OTP item.
pub struct VmemItem {
value: ItemType,
offset: usize,
name: String,
}
impl VmemItem {
pub fn new(bytes: Vec<u8>, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Bytes(bytes),
offset,
name,
}
}
pub fn new_unvalued(size: usize, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Unvalued(size),
offset,
name,
}
}
pub fn | (&self) -> usize {
match &self.value {
ItemType::Bytes(b) => b.len(),
ItemType::Unvalued(size) => *size,
}
}
}
pub type DigestIV = u64;
pub type DigestCnst = u128;
/// Digest information for an OTP partition.
#[derive(PartialEq)]
pub enum DigestType {
Unlocked,
Software,
Hardware(DigestIV, DigestCnst),
}
/// The hex representation of an OTP partition.
pub struct VmemPartition {
/// Items associated with this partition.
items: Vec<VmemItem>,
/// The name of this partition.
/// Used in annotations.
name: String,
/// The type of digest used for this partition.
/// For software digests, the value of the digest is provided and appended to the list of
/// items. For hardware digests, we must compute the digest value and append to the list of
/// items.
digest_type: DigestType,
/// Partition size.
size: usize,
/// The key name for this parition.
/// If specified, the serializer will attempt to scramble this parition using the key named in
/// this field.
key_name: Option<String>,
}
impl VmemPartition {
pub fn new(
name: String,
size: usize,
digest_type: DigestType,
key_name: Option<String>,
) -> VmemPartition {
VmemPartition {
items: Vec::new(),
name,
digest_type,
size,
key_name,
}
}
/// Set the size of the partition.
///
/// For partitions that don't specify their size, this is used to set the size of the partition
/// including the digest.
pub fn set_size(&mut self, size: usize) {
self.size = size;
}
/// Add an item to this partition.
pub fn push_item(&mut self, item: VmemItem) {
self.items.push(item);
}
/// Produces a tuple containing OTP HEX lines with annotations.
fn write_to_buffer(&self, keys: &HashMap<String, Vec<u8>>) -> Result<(Vec<u8>, Vec<String>)> {
if self.size % 8 != 0 {
bail!("Partition {} must be 64-bit alligned", self.name);
}
let mut defined = vec![false; self.size];
let mut annotations: Vec<String> = vec!["unallocated".to_owned(); self.size];
let mut data_bytes: Vec<u8> = vec![0; self.size];
for item in &self.items {
let end = item.offset + item.size();
annotations[item.offset..end].fill(format!("{}: {}", self.name, item.name).to_string());
let defined = &mut defined[item.offset..end];
if let Some(collision) = defined.iter().position(|defined| *defined) {
bail!(
"Unexpected item collision with item {} at 0x{:x}",
item.name,
collision
);
}
defined.fill(true);
if let ItemType::Bytes(bytes) = &item.value {
data_bytes[item.offset..end].copy_from_slice(bytes);
}
}
let mut data_blocks = Vec::<u64>::new();
let mut data_blocks_defined = Vec::<bool>::new();
for (k, chunk) in data_bytes.chunks(8).enumerate() {
data_blocks.push(u64::from_le_bytes(chunk.try_into().unwrap()));
let byte_offset = k * 8;
data_blocks_defined.push(
defined[byte_offset..byte_offset + 8]
.iter()
.fold(false, |a, &b| a || b),
);
}
if let Some(key_name) = &self.key_name {
let key = keys
.get(key_name)
.ok_or_else(|| anyhow!("Key not found {}", key_name))?;
let cipher = Present::try_new(key.clone())?;
for i in 0..data_blocks.len() {
if data_blocks_defined[i] {
data_blocks[i] = cipher.encrypt_block(data_blocks[i]);
}
}
}
if let DigestType::Hardware(iv, fin_const) = self.digest_type {
ensure!(
matches!(data_blocks.last(), None | Some(0)),
"Digest of partition {} cannot be overridden manually",
self.name
);
let last = data_blocks.len() - 1;
data_blocks[last] = present_digest_64(&data_blocks[0..last], iv, fin_const);
}
let data = data_blocks.as_bytes().to_vec();
if data.len() != self.size {
Err(anyhow!("Partition {} size mismatch", self.name))
} else {
Ok((data, annotations))
}
}
}
pub struct VmemImage {
partitions: Vec<VmemPartition>,
width: usize,
depth: usize,
}
impl VmemImage {
pub fn new(partitions: Vec<VmemPartition>, width: usize, depth: usize) -> VmemImage {
VmemImage {
partitions,
width,
depth,
}
}
pub fn generate(
&self,
keys: HashMap<String, Vec<u8>>,
secded: &LcSecded,
) -> Result<Vec<String>> {
let mut data: Vec<u8> = vec![0; self.width * self.depth];
let mut annotations: Vec<String> = vec![Default::default(); data.len()];
let mut offset = 0;
for partition in &self.partitions {
let (part_data, part_annotation) = partition.write_to_buffer(&keys)?;
let end = offset + partition.size;
if end > data.len() {
bail!(
"Partition {} out of bounds, ends at 0x{:x}",
partition.name,
end
);
}
data[offset..end].clone_from_slice(&part_data);
annotations[offset..end].clone_from_slice(&part_annotation);
offset += partition.size;
}
let width_ecc = self.width + secded.ecc_byte_len();
let num_words = data.len() / self.width;
let mut output = vec![format!(
"// OTP memory hexfile with {} x {}bit layout",
self.depth,
width_ecc * 8
)];
for i in 0..num_words {
let mut word = Vec::<u8>::new();
let mut word_annotation = Vec::<String>::new();
for j in 0..self.width {
let idx = i * self.width + j;
word.push(data[idx]);
if !word_annotation.contains(&annotations[idx]) {
word_annotation.push(annotations[idx].clone());
}
}
let word_with_ecc = secded.ecc_encode(word)?;
let mut word_str = String::new();
for byte in word_with_ecc.iter().rev() {
write!(word_str, "{:02x}", byte)?;
}
output.push(format!(
"{} // {:06x}: {}",
word_str,
i * self.width,
word_annotation.join(", ")
));
}
Ok(output)
}
}
fn present_digest_64(message: &[u64], iv: DigestIV, fin_const: DigestCnst) -> u64 {
let mut state = iv;
for i in (0..message.len() + 2).step_by(2) {
let b128: [u8; 16] = if i + 1 < message.len() {
(message[i] as u128) << 64 | message[i + 1] as u128
} else if i < message.len() {
(message[i] as u128) << 64 | message[i] as u128
} else {
fin_const
}
.as_bytes()
.try_into()
.unwrap();
let cipher = Present::new_128(&b128);
state ^= cipher.encrypt_block(state);
}
state
}
| size | identifier_name |
visualizer.js | "!"], // this is the escaped form of !
[/NOT/i, "!"],
[/\(/, "("],
[/\)/, ")"],
[/(true)(?![a-zA-Z0-9])/i, "TRUE"],
[/(false)(?![a-zA-Z0-9])/i, "FALSE"],
[/[a-zA-Z]+/, "IDENT"],
[/.+/, "DIRTYTEXT"],
];
makeEvaluatorAndInitialize(
new Boolius(tokenDefinitions, grammarObject),
"((d && c)) || (!b && a) && (!d || !a) && (!c || !b)",
"Click operators to expand or collapse. Click leaf nodes to toggle true/false."
);
}
} else if (newMode.indexOf("xml") > -1) {
// the user wants to look at boolean expressions.
// is boolius already loaded?
if (!evaluator || !(evaluator instanceof XMLius)) {
let grammarObject = [
[["OPENCOMMENT", "WILDCARD", "CLOSECOMMENT"], "COMMENT"],
// comments will be engulfed by the text of a node
// and ignored when the node is asked for its text as a string
[["COMMENT"], "#TEXT_NODE"],
[["<", "/", "IDENT", ">"], "CLOSETAG"],
[["<", "IDENT", ">"], "OPENTAG"],
[["<", "IDENT", "/", ">"], "XMLNODE"],
[["<", "IDENT", "IDENT", "=", '"', "WILDCARD", '"'], "OPENTAGSTART"],
/* Some recursive self-nesting here */
[
["OPENTAGSTART", "IDENT", "=", '"', "WILDCARD", '"'],
"OPENTAGSTART",
],
[["OPENTAGSTART", ">"], "OPENTAG"],
// can't have two identifiers in a row, unless we're between an opening and closing tag
// a/k/a node.text
[["IDENT", "IDENT"], "#TEXT_NODE"],
[["IDENT", "#TEXT_NODE"], "#TEXT_NODE"],
[["#TEXT_NODE", "#TEXT_NODE"], "#TEXT_NODE"],
// let's also have nested nodes engulfed in the NODETEXT
[["XMLNODE", "#TEXT_NODE"], "#TEXT_NODE"],
[["XMLNODES", "#TEXT_NODE"], "#TEXT_NODE"],
[["#TEXT_NODE", "XMLNODE"], "#TEXT_NODE"],
[["#TEXT_NODE", "XMLNODES"], "#TEXT_NODE"],
[["OPENTAG", "CLOSETAG"], "XMLNODE"],
[["OPENTAG", "#TEXT_NODE", "CLOSETAG"], "XMLNODE"],
[["OPENTAG", "XMLNODE", "CLOSETAG"], "XMLNODE"],
[["XMLNODE", "XMLNODE"], "XMLNODES"],
[["OPENTAG", "XMLNODES", "CLOSETAG"], "XMLNODE"],
];
let IGNORE = true;
let tokenDefinitions = [
[/\s+/, "", IGNORE],
[/<!--/, "OPENCOMMENT"],
[/-->/, "CLOSECOMMENT"],
[/\//, "/"],
[/>/, ">"],
[/</, "<"],
[/=/, "="],
[/"/, '"'],
[/'/, '"'],
[/[-+]?[0-9]*\.?[0-9]+/, "NUM_LIT"],
[/[a-zA-Z]+[a-zA-Z0-9-]*/, "IDENT"],
// having trapped all these things, what's left is nodetext
[/[^<]+/, "#TEXT_NODE"],
];
makeEvaluatorAndInitialize(
new XMLius(tokenDefinitions, grammarObject),
`<div class="hintwrapper"><div class="hint">Click operators to expand or collapse. Click leaf nodes to toggle true/false.</div><div class="styled-select green semi-square" style="bold"></div></div>`,
"Mouseover nodes to see attributes. Click nodetext to see content."
);
}
}
}
function makeEvaluatorAndInitialize(newEvaluator, statement, hintText) {
assignEvaluator(newEvaluator);
d3.select("#statement").node().value = statement;
d3.select("div.hint").text(hintText);
evaluateStatement();
}
function assignEvaluator(newEvaluator) {
// don't change if the user wants what they already have
if (evaluator && newEvaluator.constructor === evaluator.constructor) return;
evaluator = newEvaluator;
}
var evaluator;
var winWidth = Math.max(1000, window.innerWidth);
let header = document.querySelector("header")[0];
var winHeight = Math.max(500, window.innerHeight - 240);
var winWidth = Math.max(800, window.innerWidth);
var m = [0, 120, 140, 120],
w = winWidth - m[1] - m[3],
h = winHeight - m[0] - m[2],
i = 0,
root;
var tree = d3.layout.tree().size([h, w]);
var diagonal = d3.svg.diagonal().projection(function (d) {
return [d.y, d.x];
});
var vis = d3
.select("#body")
.append("svg:svg")
.attr("width", w + m[1] + m[3])
.attr("height", h + m[0] + m[2])
.append("svg:g")
.attr("transform", "translate(" + m[3] + "," + m[0] + ")");
vis
.append("text")
.attr("opacity", 1)
.attr("y", 246)
.attr("dy", "1.71em")
.style("font-size", "34px")
.style("text-anchor", "end")
.attr("id", "result")
.text("");
d3.select("#testbutton").on("click", function (e) {
evaluateStatement();
});
d3.select("#statement").on("keyup", function () {
if (d3.event.keyCode == 13) {
d3.select("#testbutton").on("click")();
}
});
var parseTree;
function evaluateStatement() {
var statement = d3.select("#statement").node().value;
parseTree = evaluator.parse(statement);
displayJSON(parseTree);
}
function displayJSON(json) {
if (json == null) return;
root = json;
root.x0 = h / 2;
root.y0 = 0;
//d3.select("#statement").val( root.title );
d3.select("#statement").property("value", root.expressionString);
d3.select("#result").text(root.value);
function toggleAll(d, delay) {
if (!delay) delay = 1;
if (d.children) {
toggle(d);
}
if (d._children) {
toggle(d);
}
}
// Initialize the display to show all nodes.
root.children.forEach(toggleAll, 444);
update(root);
}
// Toggle children.
function toggle(d, showOverlay) {
if (d == undefined) return;
//boolean
if (d.value === true || d.value === false) {
if (d.children) {
// hide the children by moving them into _children
d._children = d.children;
d.children = null;
} else {
// bring back the hidden children
d.children = d._children;
d._children = null;
}
var hasNoChildren = !d.children && !d._children;
if (!hasNoChildren) {
// has an array in d.children or d._children
// but it might be empty!
if (d.children && d.children.length == 0) hasNoChildren = true;
if (d._children && d._children.length == 0) hasNoChildren = true;
}
if (hasNoChildren) {
// it's a leaf
// toggle true/false
| (d.value === true || d.value === false) {
d.value = !d.value;
//var myInt = parseInt( d.name );
//conditionTruthValues[ myInt ] = d.value;
var myVar = d.name;
evaluator.state[myVar] = d.value;
updateWithoutDeleting(root);
}
}
} // you clicked something that isn't in a boolean flow
else {
if (showOverlay) {
var attributeText = d.attributes
? JSON.stringify(d.attributes)
: "None";
if (!d.children && !d._children) {
// it's a leaf
//showValueOverlay( d.value );
showValueOverlay(
"Attributes: " + attributeText + "</br>Content: " + d.value
);
} //oops, we wanted to collapse this thing
else {
//showValueOverlay( "Attributes: " + attributeText + "</br>Content: " + d.value );
if (d.children) {
// hide the children by moving them into _children
d._children = d.children;
d.children = null;
} else {
// bring back the hidden children
d.children = d._children;
d._children = null;
}
}
}
}
}
function showValueOverlay(val | if | identifier_name |
visualizer.js | "!"], // this is the escaped form of !
[/NOT/i, "!"],
[/\(/, "("],
[/\)/, ")"],
[/(true)(?![a-zA-Z0-9])/i, "TRUE"],
[/(false)(?![a-zA-Z0-9])/i, "FALSE"],
[/[a-zA-Z]+/, "IDENT"],
[/.+/, "DIRTYTEXT"],
];
makeEvaluatorAndInitialize(
new Boolius(tokenDefinitions, grammarObject),
"((d && c)) || (!b && a) && (!d || !a) && (!c || !b)",
"Click operators to expand or collapse. Click leaf nodes to toggle true/false."
);
}
} else if (newMode.indexOf("xml") > -1) {
// the user wants to look at boolean expressions.
// is boolius already loaded?
if (!evaluator || !(evaluator instanceof XMLius)) { | // comments will be engulfed by the text of a node
// and ignored when the node is asked for its text as a string
[["COMMENT"], "#TEXT_NODE"],
[["<", "/", "IDENT", ">"], "CLOSETAG"],
[["<", "IDENT", ">"], "OPENTAG"],
[["<", "IDENT", "/", ">"], "XMLNODE"],
[["<", "IDENT", "IDENT", "=", '"', "WILDCARD", '"'], "OPENTAGSTART"],
/* Some recursive self-nesting here */
[
["OPENTAGSTART", "IDENT", "=", '"', "WILDCARD", '"'],
"OPENTAGSTART",
],
[["OPENTAGSTART", ">"], "OPENTAG"],
// can't have two identifiers in a row, unless we're between an opening and closing tag
// a/k/a node.text
[["IDENT", "IDENT"], "#TEXT_NODE"],
[["IDENT", "#TEXT_NODE"], "#TEXT_NODE"],
[["#TEXT_NODE", "#TEXT_NODE"], "#TEXT_NODE"],
// let's also have nested nodes engulfed in the NODETEXT
[["XMLNODE", "#TEXT_NODE"], "#TEXT_NODE"],
[["XMLNODES", "#TEXT_NODE"], "#TEXT_NODE"],
[["#TEXT_NODE", "XMLNODE"], "#TEXT_NODE"],
[["#TEXT_NODE", "XMLNODES"], "#TEXT_NODE"],
[["OPENTAG", "CLOSETAG"], "XMLNODE"],
[["OPENTAG", "#TEXT_NODE", "CLOSETAG"], "XMLNODE"],
[["OPENTAG", "XMLNODE", "CLOSETAG"], "XMLNODE"],
[["XMLNODE", "XMLNODE"], "XMLNODES"],
[["OPENTAG", "XMLNODES", "CLOSETAG"], "XMLNODE"],
];
let IGNORE = true;
let tokenDefinitions = [
[/\s+/, "", IGNORE],
[/<!--/, "OPENCOMMENT"],
[/-->/, "CLOSECOMMENT"],
[/\//, "/"],
[/>/, ">"],
[/</, "<"],
[/=/, "="],
[/"/, '"'],
[/'/, '"'],
[/[-+]?[0-9]*\.?[0-9]+/, "NUM_LIT"],
[/[a-zA-Z]+[a-zA-Z0-9-]*/, "IDENT"],
// having trapped all these things, what's left is nodetext
[/[^<]+/, "#TEXT_NODE"],
];
makeEvaluatorAndInitialize(
new XMLius(tokenDefinitions, grammarObject),
`<div class="hintwrapper"><div class="hint">Click operators to expand or collapse. Click leaf nodes to toggle true/false.</div><div class="styled-select green semi-square" style="bold"></div></div>`,
"Mouseover nodes to see attributes. Click nodetext to see content."
);
}
}
}
function makeEvaluatorAndInitialize(newEvaluator, statement, hintText) {
assignEvaluator(newEvaluator);
d3.select("#statement").node().value = statement;
d3.select("div.hint").text(hintText);
evaluateStatement();
}
function assignEvaluator(newEvaluator) {
// don't change if the user wants what they already have
if (evaluator && newEvaluator.constructor === evaluator.constructor) return;
evaluator = newEvaluator;
}
var evaluator;
var winWidth = Math.max(1000, window.innerWidth);
let header = document.querySelector("header")[0];
var winHeight = Math.max(500, window.innerHeight - 240);
var winWidth = Math.max(800, window.innerWidth);
var m = [0, 120, 140, 120],
w = winWidth - m[1] - m[3],
h = winHeight - m[0] - m[2],
i = 0,
root;
var tree = d3.layout.tree().size([h, w]);
var diagonal = d3.svg.diagonal().projection(function (d) {
return [d.y, d.x];
});
var vis = d3
.select("#body")
.append("svg:svg")
.attr("width", w + m[1] + m[3])
.attr("height", h + m[0] + m[2])
.append("svg:g")
.attr("transform", "translate(" + m[3] + "," + m[0] + ")");
vis
.append("text")
.attr("opacity", 1)
.attr("y", 246)
.attr("dy", "1.71em")
.style("font-size", "34px")
.style("text-anchor", "end")
.attr("id", "result")
.text("");
d3.select("#testbutton").on("click", function (e) {
evaluateStatement();
});
d3.select("#statement").on("keyup", function () {
if (d3.event.keyCode == 13) {
d3.select("#testbutton").on("click")();
}
});
var parseTree;
function evaluateStatement() {
var statement = d3.select("#statement").node().value;
parseTree = evaluator.parse(statement);
displayJSON(parseTree);
}
function displayJSON(json) {
if (json == null) return;
root = json;
root.x0 = h / 2;
root.y0 = 0;
//d3.select("#statement").val( root.title );
d3.select("#statement").property("value", root.expressionString);
d3.select("#result").text(root.value);
function toggleAll(d, delay) {
if (!delay) delay = 1;
if (d.children) {
toggle(d);
}
if (d._children) {
toggle(d);
}
}
// Initialize the display to show all nodes.
root.children.forEach(toggleAll, 444);
update(root);
}
// Toggle children.
function toggle(d, showOverlay) {
if (d == undefined) return;
//boolean
if (d.value === true || d.value === false) {
if (d.children) {
// hide the children by moving them into _children
d._children = d.children;
d.children = null;
} else {
// bring back the hidden children
d.children = d._children;
d._children = null;
}
var hasNoChildren = !d.children && !d._children;
if (!hasNoChildren) {
// has an array in d.children or d._children
// but it might be empty!
if (d.children && d.children.length == 0) hasNoChildren = true;
if (d._children && d._children.length == 0) hasNoChildren = true;
}
if (hasNoChildren) {
// it's a leaf
// toggle true/false
if (d.value === true || d.value === false) {
d.value = !d.value;
//var myInt = parseInt( d.name );
//conditionTruthValues[ myInt ] = d.value;
var myVar = d.name;
evaluator.state[myVar] = d.value;
updateWithoutDeleting(root);
}
}
} // you clicked something that isn't in a boolean flow
else {
if (showOverlay) {
var attributeText = d.attributes
? JSON.stringify(d.attributes)
: "None";
if (!d.children && !d._children) {
// it's a leaf
//showValueOverlay( d.value );
showValueOverlay(
"Attributes: " + attributeText + "</br>Content: " + d.value
);
} //oops, we wanted to collapse this thing
else {
//showValueOverlay( "Attributes: " + attributeText + "</br>Content: " + d.value );
if (d.children) {
// hide the children by moving them into _children
d._children = d.children;
d.children = null;
} else {
// bring back the hidden children
d.children = d._children;
d._children = null;
}
}
}
}
}
function showValueOverlay(val | let grammarObject = [
[["OPENCOMMENT", "WILDCARD", "CLOSECOMMENT"], "COMMENT"], | random_line_split |
visualizer.js | "!"], // this is the escaped form of !
[/NOT/i, "!"],
[/\(/, "("],
[/\)/, ")"],
[/(true)(?![a-zA-Z0-9])/i, "TRUE"],
[/(false)(?![a-zA-Z0-9])/i, "FALSE"],
[/[a-zA-Z]+/, "IDENT"],
[/.+/, "DIRTYTEXT"],
];
makeEvaluatorAndInitialize(
new Boolius(tokenDefinitions, grammarObject),
"((d && c)) || (!b && a) && (!d || !a) && (!c || !b)",
"Click operators to expand or collapse. Click leaf nodes to toggle true/false."
);
}
} else if (newMode.indexOf("xml") > -1) {
// the user wants to look at boolean expressions.
// is boolius already loaded?
if (!evaluator || !(evaluator instanceof XMLius)) {
let grammarObject = [
[["OPENCOMMENT", "WILDCARD", "CLOSECOMMENT"], "COMMENT"],
// comments will be engulfed by the text of a node
// and ignored when the node is asked for its text as a string
[["COMMENT"], "#TEXT_NODE"],
[["<", "/", "IDENT", ">"], "CLOSETAG"],
[["<", "IDENT", ">"], "OPENTAG"],
[["<", "IDENT", "/", ">"], "XMLNODE"],
[["<", "IDENT", "IDENT", "=", '"', "WILDCARD", '"'], "OPENTAGSTART"],
/* Some recursive self-nesting here */
[
["OPENTAGSTART", "IDENT", "=", '"', "WILDCARD", '"'],
"OPENTAGSTART",
],
[["OPENTAGSTART", ">"], "OPENTAG"],
// can't have two identifiers in a row, unless we're between an opening and closing tag
// a/k/a node.text
[["IDENT", "IDENT"], "#TEXT_NODE"],
[["IDENT", "#TEXT_NODE"], "#TEXT_NODE"],
[["#TEXT_NODE", "#TEXT_NODE"], "#TEXT_NODE"],
// let's also have nested nodes engulfed in the NODETEXT
[["XMLNODE", "#TEXT_NODE"], "#TEXT_NODE"],
[["XMLNODES", "#TEXT_NODE"], "#TEXT_NODE"],
[["#TEXT_NODE", "XMLNODE"], "#TEXT_NODE"],
[["#TEXT_NODE", "XMLNODES"], "#TEXT_NODE"],
[["OPENTAG", "CLOSETAG"], "XMLNODE"],
[["OPENTAG", "#TEXT_NODE", "CLOSETAG"], "XMLNODE"],
[["OPENTAG", "XMLNODE", "CLOSETAG"], "XMLNODE"],
[["XMLNODE", "XMLNODE"], "XMLNODES"],
[["OPENTAG", "XMLNODES", "CLOSETAG"], "XMLNODE"],
];
let IGNORE = true;
let tokenDefinitions = [
[/\s+/, "", IGNORE],
[/<!--/, "OPENCOMMENT"],
[/-->/, "CLOSECOMMENT"],
[/\//, "/"],
[/>/, ">"],
[/</, "<"],
[/=/, "="],
[/"/, '"'],
[/'/, '"'],
[/[-+]?[0-9]*\.?[0-9]+/, "NUM_LIT"],
[/[a-zA-Z]+[a-zA-Z0-9-]*/, "IDENT"],
// having trapped all these things, what's left is nodetext
[/[^<]+/, "#TEXT_NODE"],
];
makeEvaluatorAndInitialize(
new XMLius(tokenDefinitions, grammarObject),
`<div class="hintwrapper"><div class="hint">Click operators to expand or collapse. Click leaf nodes to toggle true/false.</div><div class="styled-select green semi-square" style="bold"></div></div>`,
"Mouseover nodes to see attributes. Click nodetext to see content."
);
}
}
}
function makeEvaluatorAndInitialize(newEvaluator, statement, hintText) {
assignEvaluator(newEvaluator);
d3.select("#statement").node().value = statement;
d3.select("div.hint").text(hintText);
evaluateStatement();
}
function assignEvaluator(newEvaluator) {
// don't change if the user wants what they already have
if (evaluator && newEvaluator.constructor === evaluator.constructor) return;
evaluator = newEvaluator;
}
var evaluator;
var winWidth = Math.max(1000, window.innerWidth);
let header = document.querySelector("header")[0];
var winHeight = Math.max(500, window.innerHeight - 240);
var winWidth = Math.max(800, window.innerWidth);
var m = [0, 120, 140, 120],
w = winWidth - m[1] - m[3],
h = winHeight - m[0] - m[2],
i = 0,
root;
var tree = d3.layout.tree().size([h, w]);
var diagonal = d3.svg.diagonal().projection(function (d) {
return [d.y, d.x];
});
var vis = d3
.select("#body")
.append("svg:svg")
.attr("width", w + m[1] + m[3])
.attr("height", h + m[0] + m[2])
.append("svg:g")
.attr("transform", "translate(" + m[3] + "," + m[0] + ")");
vis
.append("text")
.attr("opacity", 1)
.attr("y", 246)
.attr("dy", "1.71em")
.style("font-size", "34px")
.style("text-anchor", "end")
.attr("id", "result")
.text("");
d3.select("#testbutton").on("click", function (e) {
evaluateStatement();
});
d3.select("#statement").on("keyup", function () {
if (d3.event.keyCode == 13) {
d3.select("#testbutton").on("click")();
}
});
var parseTree;
function evaluateStatement() {
var statement = d3.select("#statement").node().value;
parseTree = evaluator.parse(statement);
displayJSON(parseTree);
}
function displayJSON(json) {
if (json == null) return;
root = json;
root.x0 = h / 2;
root.y0 = 0;
//d3.select("#statement").val( root.title );
d3.select("#statement").property("value", root.expressionString);
d3.select("#result").text(root.value);
function toggleAll(d, delay) {
if (!delay) delay = 1;
if (d.children) {
toggle(d);
}
if (d._children) {
toggle(d);
}
}
// Initialize the display to show all nodes.
root.children.forEach(toggleAll, 444);
update(root);
}
// Toggle children.
function toggle(d, showOverlay) {
if (d == undefined) return;
//boolean
if (d.value === true || d.value === false) {
if (d.children) {
// hide the children by moving them into _children
d._children = d.children;
d.children = null;
} else {
// bring back the hidden children
d.children = d._children;
d._children = null;
}
var hasNoChildren = !d.children && !d._children;
if (!hasNoChildren) {
// has an array in d.children or d._children
// but it might be empty!
if (d.children && d.children.length == 0) hasNoChildren = true;
if (d._children && d._children.length == 0) hasNoChildren = true;
}
if (hasNoChildren) {
// it's a leaf
// toggle true/false
if (d.value === true || d.value === false) |
}
} // you clicked something that isn't in a boolean flow
else {
if (showOverlay) {
var attributeText = d.attributes
? JSON.stringify(d.attributes)
: "None";
if (!d.children && !d._children) {
// it's a leaf
//showValueOverlay( d.value );
showValueOverlay(
"Attributes: " + attributeText + "</br>Content: " + d.value
);
} //oops, we wanted to collapse this thing
else {
//showValueOverlay( "Attributes: " + attributeText + "</br>Content: " + d.value );
if (d.children) {
// hide the children by moving them into _children
d._children = d.children;
d.children = null;
} else {
// bring back the hidden children
d.children = d._children;
d._children = null;
}
}
}
}
}
function showValue | {
d.value = !d.value;
//var myInt = parseInt( d.name );
//conditionTruthValues[ myInt ] = d.value;
var myVar = d.name;
evaluator.state[myVar] = d.value;
updateWithoutDeleting(root);
} | identifier_body |
pawn.py | """ Just a purple sphere """
from vapory import *
objects = [
# SUN
LightSource([1500,2500,-2500], 'color',1),
# SKY
Sphere( [0,0,0],1, 'hollow',
Texture(
Pigment( 'gradient', [0,1,0],
'color_map{[0 color White] [1 color Blue ]}'
'quick_color', 'White'
),
Finish( 'ambient', 1, 'diffuse', 0)
),
'scale', 10000
),
# GROUND
Plane( [0,1,0], 0 ,
Texture( Pigment( 'color', [1.1*e for e in [0.80,0.55,0.35]])),
Normal( 'bumps', 0.75, 'scale', 0.035),
Finish( 'phong', 0.1 )
), | Union( Sphere([0,1,0],0.35),
Cone([0,0,0],0.5,[0,1,0],0.0),
Texture( Pigment( 'color', [1,0.65,0])),
Finish( 'phong', 0.5)
)
]
scene = Scene( Camera( 'ultra_wide_angle',
'angle',45,
'location',[0.0 , 0.6 ,-3.0],
'look_at', [0.0 , 0.6 , 0.0]
),
objects= objects,
included=['colors.inc']
)
scene.render('pawn.png', remove_temp=False) |
# PAWN | random_line_split |
api_ping_check.py | ###############################################
# RabbitMQ in Action
# Chapter 10 - RabbitMQ ping (HTTP API) check.
###############################################
#
#
# Author: Jason J. W. Williams
# (C)2011
###############################################
import sys, json, httplib, urllib, base64, socket
#(apic.0) Nagios status codes
EXIT_OK = 0
EXIT_WARNING = 1
EXIT_CRITICAL = 2
EXIT_UNKNOWN = 3
#/(apic.1) Parse arguments
server, port = sys.argv[1].split(":")
vhost = sys.argv[2]
username = sys.argv[3]
password = sys.argv[4]
#/(apic.2) Connect to server
conn = httplib.HTTPConnection(server, port)
#/(apic.3) Build API path
path = "/api/aliveness-test/%s" % urllib.quote(vhost, safe="")
method = "GET"
#/(apic.4) Issue API request
credentials = base64.b64encode("%s:%s" % (username, password))
try:
conn.request(method, path, "",
{"Content-Type" : "application/json",
"Authorization" : "Basic " + credentials})
#/(apic.5) Could not connect to API server, return critical status
except socket.error:
print "CRITICAL: Could not connect to %s:%s" % (server, port)
exit(EXIT_CRITICAL)
response = conn.getresponse()
#/(apic.6) RabbitMQ not responding/alive, return critical status
if response.status > 299:
|
#/(apic.7) RabbitMQ alive, return OK status
print "OK: Broker alive: %s" % response.read()
exit(EXIT_OK)
| print "CRITICAL: Broker not alive: %s" % response.read()
exit(EXIT_CRITICAL) | conditional_block |
api_ping_check.py | ###############################################
# RabbitMQ in Action
# Chapter 10 - RabbitMQ ping (HTTP API) check.
###############################################
#
#
# Author: Jason J. W. Williams
# (C)2011
###############################################
import sys, json, httplib, urllib, base64, socket
#(apic.0) Nagios status codes
EXIT_OK = 0
EXIT_WARNING = 1
EXIT_CRITICAL = 2
EXIT_UNKNOWN = 3
#/(apic.1) Parse arguments
server, port = sys.argv[1].split(":")
vhost = sys.argv[2]
username = sys.argv[3]
password = sys.argv[4]
#/(apic.2) Connect to server
conn = httplib.HTTPConnection(server, port)
#/(apic.3) Build API path
path = "/api/aliveness-test/%s" % urllib.quote(vhost, safe="")
method = "GET"
#/(apic.4) Issue API request
credentials = base64.b64encode("%s:%s" % (username, password))
try:
conn.request(method, path, "",
{"Content-Type" : "application/json",
"Authorization" : "Basic " + credentials})
#/(apic.5) Could not connect to API server, return critical status
except socket.error:
print "CRITICAL: Could not connect to %s:%s" % (server, port) |
response = conn.getresponse()
#/(apic.6) RabbitMQ not responding/alive, return critical status
if response.status > 299:
print "CRITICAL: Broker not alive: %s" % response.read()
exit(EXIT_CRITICAL)
#/(apic.7) RabbitMQ alive, return OK status
print "OK: Broker alive: %s" % response.read()
exit(EXIT_OK) | exit(EXIT_CRITICAL) | random_line_split |
analytics.js | () {
if (async) {
async.cancel();
}
}
function unbind() {
$(mainAsync).unbind('cancelled', onCancel);
}
$(mainAsync).bind('cancelled', onCancel);
var async;
return async = getCPS({url: '/_uistats', data: data, missingValue: mark404}, cancelMark, function (value, status, xhr) {
unbind();
if (value !== cancelMark && value !== mark404) {
var date = xhr.getResponseHeader('date');
value.serverDate = parseHTTPDate(date).valueOf();
value.clientDate = (new Date()).valueOf();
}
body(value, status, xhr);
});
}
var dataCallback;
var mainAsync = future(function (_dataCallback) {
dataCallback = _dataCallback;
loop();
});
mainAsync.cancel = function () {
$(this).trigger("cancelled");
};
var extraOpts = {};
var prevValue;
return mainAsync;
function loop(deliverValue) {
doGet(_.extend({}, opts, extraOpts), onLoopData);
}
function onLoopData(value) {
if (value === mark404) {
return;
}
if (prevValue) {
value = maybeApplyDelta(prevValue, value);
}
if (!dataCallback.continuing(value)) {
return;
}
prevValue = value;
if (value.lastTStamp) {
extraOpts = {haveTStamp: JSON.stringify(value.lastTStamp)};
}
if (!('nextReqAfter' in value)) {
BUG();
}
setTimeout(loop, value.nextReqAfter);
}
function restoreOpsBlock(prevSamples, samples, keepCount) {
var prevTS = prevSamples.timestamp;
if (samples.timestamp && samples.timestamp.length == 0) {
// server was unable to return any data for this "kind" of
// stats
if (prevSamples && prevSamples.timestamp && prevSamples.timestamp.length > 0) {
return prevSamples;
}
return samples;
}
if (prevTS == undefined ||
prevTS.length == 0 ||
prevTS[prevTS.length-1] != samples.timestamp[0]) {
return samples;
}
var newSamples = {};
for (var keyName in samples) {
var ps = prevSamples[keyName];
if (!ps) {
ps = [];
ps.length = keepCount;
}
newSamples[keyName] = ps.concat(samples[keyName].slice(1)).slice(-keepCount);
}
return newSamples;
}
function maybeApplyDelta(prevValue, value) {
var stats = value.stats;
var prevStats = prevValue.stats || {};
for (var kind in stats) {
var newSamples = restoreOpsBlock(prevStats[kind],
stats[kind],
value.samplesCount + bufferDepth);
stats[kind] = newSamples;
}
return value;
}
}
var statsBucketURL = self.statsBucketURL = new StringHashFragmentCell("statsBucket");
var statsHostname = self.statsHostname = new StringHashFragmentCell("statsHostname");
var statsStatName = self.statsStatName = new StringHashFragmentCell("statsStatName");
self.selectedGraphNameCell = new StringHashFragmentCell("graph");
self.configurationExtra = new Cell();
self.smallGraphSelectionCellCell = Cell.compute(function (v) {
return v.need(self.displayingSpecificStatsCell) ? self.statsHostname : self.selectedGraphNameCell;
});
// contains bucket details of statsBucketURL bucket (or default if there are no such bucket)
var statsBucketDetails = self.statsBucketDetails = Cell.compute(function (v) {
var uri = v(statsBucketURL);
var buckets = v.need(DAL.cells.bucketsListCell);
var rv;
if (uri !== undefined) {
rv = _.detect(buckets, function (info) {return info.uri === uri});
} else {
rv = _.detect(buckets, function (info) {return info.name === "default"}) || buckets[0];
}
return rv;
}).name("statsBucketDetails");
var statsOptionsCell = self.statsOptionsCell = (new Cell()).name("statsOptionsCell");
statsOptionsCell.setValue({});
_.extend(statsOptionsCell, {
update: function (options) {
this.modifyValue(_.bind($.extend, $, {}), options);
},
equality: _.isEqual
});
var samplesBufferDepthRAW = new StringHashFragmentCell("statsBufferDepth");
self.samplesBufferDepth = Cell.computeEager(function (v) {
return v(samplesBufferDepthRAW) || 1;
});
var zoomLevel;
(function () {
var slider = $("#js_date_slider").slider({
orientation: "vertical",
range: "min",
min: 1,
max: 6,
value: 6,
step: 1,
slide: function(event, ui) {
var dateSwitchers = $("#js_date_slider_container .js_click");
dateSwitchers.eq(dateSwitchers.length - ui.value).trigger('click');
}
});
zoomLevel = (new LinkSwitchCell('zoom', {
firstItemIsDefault: true
})).name("zoomLevel");
_.each('minute hour day week month year'.split(' '), function (name) {
zoomLevel.addItem('js_zoom_' + name, name);
});
zoomLevel.finalizeBuilding();
zoomLevel.subscribeValue(function (zoomLevel) {
var z = $('#js_zoom_' + zoomLevel);
slider.slider('value', 6 - z.index());
self.statsOptionsCell.update({
zoom: zoomLevel
});
});
})();
self.rawStatsCell = Cell.compute(function (v) {
if (v.need(DAL.cells.mode) != "analytics") {
return;
}
var options = v.need(statsOptionsCell);
var node;
var bucket = v.need(statsBucketDetails).name;
var data = _.extend({bucket: bucket}, options);
var statName = v(statsStatName);
if (statName) {
data.statName = statName;
} else if ((node = v(statsHostname))) {
// we don't send node it we're dealing with "specific stats" and
// we're careful to depend on statsHostname cell _only_ we're
// willing to send node.
data.node = node;
}
var bufferDepth = v.need(self.samplesBufferDepth);
return createSamplesFuture(data, bufferDepth);
});
self.displayingSpecificStatsCell = Cell.compute(function (v) {
return !!(v.need(self.rawStatsCell).specificStatName);
});
self.directoryURLCell = Cell.compute(function (v) {
return v.need(self.rawStatsCell).directory.url;
});
self.directoryURLCell.equality = function (a, b) {return a === b;};
self.directoryValueCell = Cell.compute(function (v) {
return v.need(self.rawStatsCell).directory.value;
});
self.directoryValueCell.equality = _.isEqual;
self.directoryCell = Cell.compute(function (v) {
var url = v.need(self.directoryURLCell);
if (url) {
return future.get({url: url});
}
return v.need(self.directoryValueCell);
});
self.specificStatTitleCell = Cell.compute(function (v) {
return v.need(self.rawStatsCell).directory.origTitle;
});
self.infosCell = Cell.needing(self.directoryCell).compute(function (v, statDesc) {
statDesc = JSON.parse(JSON.stringify(statDesc)); // this makes deep copy of statDesc
var infos = [];
infos.byName = {};
var statItems = [];
var blockIDs = [];
var hadServerResources = false;
if (statDesc.blocks[0].serverResources) {
// We want it last so that default stat name (which is first
// statItems entry) is not from ServerResourcesBlock
hadServerResources = true;
statDesc.blocks = statDesc.blocks.slice(1).concat([statDesc.blocks[0]]);
}
_.each(statDesc.blocks, function (aBlock) {
var blockName = aBlock.blockName;
aBlock.id = _.uniqueId("GB");
blockIDs.push(aBlock.id);
var stats = aBlock.stats;
statItems = statItems.concat(stats);
_.each(stats, function (statInfo) {
statInfo.id = _.uniqueId("G");
statInfo.blockId = aBlock.id;
});
});
// and now make ServerResourcesBlock first for rendering
if (hadServerResources) {
statDesc.blocks.unshift(statDesc.blocks.pop());
}
_.each(statItems, function (item) {
infos.push(item);
infos.byName[item.name] = item;
});
infos.blockIDs = blockIDs;
var infosByTitle = _.groupBy(infos, "title");
_.each(infos, function (statInfo) {
if (statInfo.missing || statInfo.bigTitle) {
return;
}
var title = statInfo.title;
var homonyms = infosByTitle[title];
if (homonyms.length == 1) {
return;
}
var sameBlock = _.any(homonyms, function (otherInfo) {
return | onCancel | identifier_name |
|
analytics.js | detect(buckets, function (info) {return info.name === "default"}) || buckets[0];
}
return rv;
}).name("statsBucketDetails");
var statsOptionsCell = self.statsOptionsCell = (new Cell()).name("statsOptionsCell");
statsOptionsCell.setValue({});
_.extend(statsOptionsCell, {
update: function (options) {
this.modifyValue(_.bind($.extend, $, {}), options);
},
equality: _.isEqual
});
var samplesBufferDepthRAW = new StringHashFragmentCell("statsBufferDepth");
self.samplesBufferDepth = Cell.computeEager(function (v) {
return v(samplesBufferDepthRAW) || 1;
});
var zoomLevel;
(function () {
var slider = $("#js_date_slider").slider({
orientation: "vertical",
range: "min",
min: 1,
max: 6,
value: 6,
step: 1,
slide: function(event, ui) {
var dateSwitchers = $("#js_date_slider_container .js_click");
dateSwitchers.eq(dateSwitchers.length - ui.value).trigger('click');
}
});
zoomLevel = (new LinkSwitchCell('zoom', {
firstItemIsDefault: true
})).name("zoomLevel");
_.each('minute hour day week month year'.split(' '), function (name) {
zoomLevel.addItem('js_zoom_' + name, name);
});
zoomLevel.finalizeBuilding();
zoomLevel.subscribeValue(function (zoomLevel) {
var z = $('#js_zoom_' + zoomLevel);
slider.slider('value', 6 - z.index());
self.statsOptionsCell.update({
zoom: zoomLevel
});
});
})();
self.rawStatsCell = Cell.compute(function (v) {
if (v.need(DAL.cells.mode) != "analytics") {
return;
}
var options = v.need(statsOptionsCell);
var node;
var bucket = v.need(statsBucketDetails).name;
var data = _.extend({bucket: bucket}, options);
var statName = v(statsStatName);
if (statName) {
data.statName = statName;
} else if ((node = v(statsHostname))) {
// we don't send node it we're dealing with "specific stats" and
// we're careful to depend on statsHostname cell _only_ we're
// willing to send node.
data.node = node;
}
var bufferDepth = v.need(self.samplesBufferDepth);
return createSamplesFuture(data, bufferDepth);
});
self.displayingSpecificStatsCell = Cell.compute(function (v) {
return !!(v.need(self.rawStatsCell).specificStatName);
});
self.directoryURLCell = Cell.compute(function (v) {
return v.need(self.rawStatsCell).directory.url;
});
self.directoryURLCell.equality = function (a, b) {return a === b;};
self.directoryValueCell = Cell.compute(function (v) {
return v.need(self.rawStatsCell).directory.value;
});
self.directoryValueCell.equality = _.isEqual;
self.directoryCell = Cell.compute(function (v) {
var url = v.need(self.directoryURLCell);
if (url) {
return future.get({url: url});
}
return v.need(self.directoryValueCell);
});
self.specificStatTitleCell = Cell.compute(function (v) {
return v.need(self.rawStatsCell).directory.origTitle;
});
self.infosCell = Cell.needing(self.directoryCell).compute(function (v, statDesc) {
statDesc = JSON.parse(JSON.stringify(statDesc)); // this makes deep copy of statDesc
var infos = [];
infos.byName = {};
var statItems = [];
var blockIDs = [];
var hadServerResources = false;
if (statDesc.blocks[0].serverResources) {
// We want it last so that default stat name (which is first
// statItems entry) is not from ServerResourcesBlock
hadServerResources = true;
statDesc.blocks = statDesc.blocks.slice(1).concat([statDesc.blocks[0]]);
}
_.each(statDesc.blocks, function (aBlock) {
var blockName = aBlock.blockName;
aBlock.id = _.uniqueId("GB");
blockIDs.push(aBlock.id);
var stats = aBlock.stats;
statItems = statItems.concat(stats);
_.each(stats, function (statInfo) {
statInfo.id = _.uniqueId("G");
statInfo.blockId = aBlock.id;
});
});
// and now make ServerResourcesBlock first for rendering
if (hadServerResources) {
statDesc.blocks.unshift(statDesc.blocks.pop());
}
_.each(statItems, function (item) {
infos.push(item);
infos.byName[item.name] = item;
});
infos.blockIDs = blockIDs;
var infosByTitle = _.groupBy(infos, "title");
_.each(infos, function (statInfo) {
if (statInfo.missing || statInfo.bigTitle) {
return;
}
var title = statInfo.title;
var homonyms = infosByTitle[title];
if (homonyms.length == 1) {
return;
}
var sameBlock = _.any(homonyms, function (otherInfo) {
return otherInfo !== statInfo && otherInfo.blockId === statInfo.blockId;
});
var blockInfo = _.detect(statDesc.blocks, function (blockCand) {
return blockCand.id === statInfo.blockId;
});
if (!blockInfo) {
BUG();
}
if (sameBlock && blockInfo.columns) {
var idx = _.indexOf(blockInfo.stats, statInfo);
if (idx < 0) {
BUG();
}
statInfo.bigTitle = blockInfo.columns[idx % 4] + ' ' + statInfo.title;
} else {
statInfo.bigTitle = (blockInfo.bigTitlePrefix || blockInfo.blockName) + ' ' + statInfo.title;
}
});
return {statDesc: statDesc, infos: infos};
});
self.statsDescInfoCell = Cell.needing(self.infosCell).compute(function (v, infos) {
return infos.statDesc;
}).name("statsDescInfoCell");
self.graphsConfigurationCell = Cell.compute(function (v) {
var selectedGraphName = v(v.need(self.smallGraphSelectionCellCell));
var stats = v.need(self.rawStatsCell);
var selected;
var infos = v.need(self.infosCell).infos;
if (selectedGraphName && (selectedGraphName in infos.byName)) {
selected = infos.byName[selectedGraphName];
} else {
selected = infos[0];
}
var auxTS = {};
var samples = _.clone(stats.stats[stats.mainStatsBlock]);
_.each(stats.stats, function (subSamples, subName) {
if (subName === stats.mainStatsBlock) {
return;
}
var timestamps = subSamples.timestamp;
for (var k in subSamples) {
if (k == "timestamp") {
continue;
} | }
});
if (!samples[selected.name]) {
selected = _.detect(infos, function (info) {return samples[info.name];}) || selected;
}
return {
interval: stats.interval,
zoomLevel: v.need(zoomLevel),
selected: selected,
samples: samples,
timestamp: samples.timestamp,
auxTimestamps: auxTS,
serverDate: stats.serverDate,
clientDate: stats.clientDate,
infos: infos,
extra: v(self.configurationExtra)
};
});
self.statsNodesCell = Cell.compute(function (v) {
return _.filter(v.need(DAL.cells.serversCell).active, function (node) {
return node.clusterMembership !== 'inactiveFailed' && node.status !== 'unhealthy';
});
});
self.hotKeysCell = Cell.computeEager(function (v) {
if (!v(self.statsBucketDetails)) {
// this deals with "no buckets at all" case for us
return [];
}
return v.need(self.rawStatsCell).hot_keys || [];
});
self.hotKeysCell.equality = _.isEqual;
Cell.autonameCells(self);
})(StatsModel);
var maybeReloadAppDueToLeak = (function () {
var counter = 300;
return function () {
if (!window.G_vmlCanvasManager)
return;
if (!--counter)
reloadPage();
};
})();
;(function (global) {
var queuedUpdates = [];
function flushQueuedUpdate() {
var i = queuedUpdates.length;
while (--i >= 0) {
queuedUpdates[i]();
}
queuedUpdates.length = 0;
}
var shadowSize = 3;
if (window.G_vmlCanvasManager) {
shadowSize = 0;
}
function renderSmallGraph(jq, options) {
function reqOpt(name) {
var rv = options[name];
if (rv === undefined)
throw new Error("missing option: " + name);
return rv;
}
var data = reqOpt('data');
var now = reqOpt('now');
var plotSeries = buildPlot | samples[k] = subSamples[k];
auxTS[k] = timestamps; | random_line_split |
analytics.js | GraphJQ, smallGraphsContainerJQ, descCell, configurationCell, isBucketAvailableCell) {
this.largeGraphJQ = largeGraphJQ;
this.smallGraphsContainerJQ = smallGraphsContainerJQ;
this.drawnDesc = this.drawnConfiguration = {};
Cell.subscribeMultipleValues($m(this, 'renderAll'), descCell, configurationCell, isBucketAvailableCell);
},
// renderAll (and subscribeMultipleValues) exist to strictly order renderStatsBlock w.r.t. updateGraphs
renderAll: function (desc, configuration, isBucketAvailable) {
if (this.drawnDesc !== desc) {
this.renderStatsBlock(desc);
this.drawnDesc = desc;
}
if (this.drawnConfiguration !== configuration) {
this.updateGraphs(configuration);
this.drawnConfiguration = configuration;
}
if (!isBucketAvailable) {
this.unrenderNothing();
this.largeGraphJQ.html('');
this.smallGraphsContainerJQ.html('');
$('#js_analytics .js_current-graph-name').text('');
$('#js_analytics .js_current-graph-desc').text('');
}
},
unrenderNothing: function () {
if (this.spinners) {
_.each(this.spinners, function (s) {s.remove()});
this.spinners = null;
}
},
renderNothing: function () {
if (this.spinners) {
return;
}
this.spinners = [
overlayWithSpinner(this.largeGraphJQ, undefined, undefined, 1)
];
this.smallGraphsContainerJQ.find('#js_small_graph_value').text('?')
},
renderStatsBlock: function (descValue) {
if (!descValue) {
this.smallGraphsContainerJQ.html('');
this.renderNothing();
return;
}
this.unrenderNothing();
renderTemplate('js_new_stats_block', descValue, this.smallGraphsContainerJQ[0]);
$(this).trigger('menelaus.graphs-widget.rendered-stats-block');
},
zoomToSeconds: {
minute: 60,
hour: 3600,
day: 86400,
week: 691200,
month: 2678400,
year: 31622400
},
forceNextRendering: function () {
this.lastCompletedTimestamp = undefined;
},
updateGraphs: function (configuration) {
var self = this;
if (!configuration) {
self.lastCompletedTimestamp = undefined;
return self.renderNothing();
}
self.unrenderNothing();
var nowTStamp = (new Date()).valueOf();
if (self.lastCompletedTimestamp && nowTStamp - self.lastCompletedTimestamp < 200) {
// skip this sample as we're too slow
return;
}
var stats = configuration.samples;
var timeOffset = configuration.clientDate - configuration.serverDate;
var zoomMillis = (self.zoomToSeconds[configuration.zoomLevel] || 60) * 1000;
var selected = configuration.selected;
var now = (new Date()).valueOf();
if (configuration.interval < 2000) {
now -= StatsModel.samplesBufferDepth.value * 1000;
}
maybeReloadAppDueToLeak();
var auxTS = configuration.auxTimestamps || {};
plotStatGraph(self.largeGraphJQ, stats[selected.name], auxTS[selected.name] || configuration.timestamp, {
color: '#1d88ad',
verticalMargin: 1.02,
fixedTimeWidth: zoomMillis,
timeOffset: timeOffset,
lastSampleTime: now,
breakInterval: configuration.interval * 2.5,
maxY: configuration.infos.byName[selected.name].maxY,
isBytes: configuration.selected.isBytes
});
try {
var visibleBlockIDs = {};
_.each($(_.map(configuration.infos.blockIDs, $i)).filter(":has(.js_stats:visible)"), function (e) {
visibleBlockIDs[e.id] = e;
});
} catch (e) {
debugger
throw e;
}
_.each(configuration.infos, function (statInfo) {
if (!visibleBlockIDs[statInfo.blockId]) {
return;
}
var statName = statInfo.name;
var graphContainer = $($i(statInfo.id));
if (graphContainer.length == 0) {
return;
}
renderSmallGraph(graphContainer, {
data: stats[statName] || [],
breakInterval: configuration.interval * 2.5,
timeOffset: timeOffset,
now: now,
zoomMillis: zoomMillis,
isSelected: selected.name == statName,
timestamp: auxTS[statName] || configuration.timestamp,
maxY: configuration.infos.byName[statName].maxY,
isBytes: statInfo.isBytes
});
});
self.lastCompletedTimestamp = (new Date()).valueOf();
$(self).trigger('menelaus.graphs-widget.rendered-graphs');
}
});
var AnalyticsSection = {
onKeyStats: function (hotKeys) {
renderTemplate('js_top_keys', _.map(hotKeys, function (e) {
return $.extend({}, e, {total: 0 + e.gets + e.misses});
}));
},
init: function () {
var self = this;
StatsModel.hotKeysCell.subscribeValue($m(self, 'onKeyStats'));
prepareTemplateForCell('js_top_keys', StatsModel.hotKeysCell);
StatsModel.displayingSpecificStatsCell.subscribeValue(function (displayingSpecificStats) {
$('#js_top_keys_block').toggle(!displayingSpecificStats);
});
$('#js_analytics .js_block-expander').live('click', function () {
// this forces configuration refresh and graphs redraw
self.widget.forceNextRendering();
StatsModel.configurationExtra.setValue({});
});
IOCenter.staleness.subscribeValue(function (stale) {
$('#js_stats-period-container')[stale ? 'hide' : 'show']();
$('#js_analytics .js_staleness-notice')[stale ? 'show' : 'hide']();
});
(function () {
var cell = Cell.compute(function (v) {
var mode = v.need(DAL.cells.mode);
if (mode != 'analytics') {
return;
}
var allBuckets = v.need(DAL.cells.bucketsListCell);
var selectedBucket = v(StatsModel.statsBucketDetails);
return {list: _.map(allBuckets, function (info) {return [info.uri, info.name]}),
selected: selectedBucket && selectedBucket.uri};
});
$('#js_analytics_buckets_select').bindListCell(cell, {
onChange: function (e, newValue) {
StatsModel.statsBucketURL.setValue(newValue);
}
});
})();
(function () {
var cell = Cell.compute(function (v) {
var mode = v.need(DAL.cells.mode);
if (mode != 'analytics') {
return;
}
var allNodes = v(StatsModel.statsNodesCell);
var statsHostname = v(StatsModel.statsHostname);
var list;
if (allNodes) {
list = _.map(allNodes, function (srv) {
var name = ViewHelpers.maybeStripPort(srv.hostname, allNodes);
return [srv.hostname, name];
});
// natural sort by full hostname (which includes port number)
list.sort(mkComparatorByProp(0, naturalSort));
list.unshift(['', 'All Server Nodes (' + allNodes.length + ')' ]);
}
return {list: list, selected: statsHostname};
});
$('#js_analytics_servers_select').bindListCell(cell, {
onChange: function (e, newValue) {
StatsModel.statsHostname.setValue(newValue || undefined);
}
});
})();
self.widget = new GraphsWidget(
$('#js_analytics_main_graph'),
$('#js_stats_container'),
StatsModel.statsDescInfoCell,
StatsModel.graphsConfigurationCell,
DAL.cells.isBucketsAvailableCell);
Cell.needing(StatsModel.graphsConfigurationCell).compute(function (v, configuration) {
return configuration.timestamp.length == 0;
}).subscribeValue(function (missingSamples) {
$('#js_stats-period-container').toggleClass('dynamic_missing-samples', !!missingSamples);
});
Cell.needing(StatsModel.graphsConfigurationCell).compute(function (v, configuration) {
var zoomMillis = GraphsWidget.prototype.zoomToSeconds[configuration.zoomLevel] * 1000;
return Math.ceil(Math.min(zoomMillis, configuration.serverDate - configuration.timestamp[0]) / 1000);
}).subscribeValue(function (visibleSeconds) {
$('#js_stats_visible_period').text(isNaN(visibleSeconds) ? '?' : formatUptime(visibleSeconds));
});
(function () {
var selectionCell;
StatsModel.smallGraphSelectionCellCell.subscribeValue(function (cell) {
selectionCell = cell;
});
$(".js_analytics-small-graph").live("click", function (e) {
// don't intercept right arrow clicks
if ($(e.target).is(".js_right-arrow, .js_right-arrow *")) {
return;
}
e.preventDefault();
var graphParam = this.getAttribute('data-graph');
if (!graphParam) | {
debugger
throw new Error("shouldn't happen");
} | conditional_block |
|
analytics.js | detect(buckets, function (info) {return info.name === "default"}) || buckets[0];
}
return rv;
}).name("statsBucketDetails");
var statsOptionsCell = self.statsOptionsCell = (new Cell()).name("statsOptionsCell");
statsOptionsCell.setValue({});
_.extend(statsOptionsCell, {
update: function (options) {
this.modifyValue(_.bind($.extend, $, {}), options);
},
equality: _.isEqual
});
var samplesBufferDepthRAW = new StringHashFragmentCell("statsBufferDepth");
self.samplesBufferDepth = Cell.computeEager(function (v) {
return v(samplesBufferDepthRAW) || 1;
});
var zoomLevel;
(function () {
var slider = $("#js_date_slider").slider({
orientation: "vertical",
range: "min",
min: 1,
max: 6,
value: 6,
step: 1,
slide: function(event, ui) {
var dateSwitchers = $("#js_date_slider_container .js_click");
dateSwitchers.eq(dateSwitchers.length - ui.value).trigger('click');
}
});
zoomLevel = (new LinkSwitchCell('zoom', {
firstItemIsDefault: true
})).name("zoomLevel");
_.each('minute hour day week month year'.split(' '), function (name) {
zoomLevel.addItem('js_zoom_' + name, name);
});
zoomLevel.finalizeBuilding();
zoomLevel.subscribeValue(function (zoomLevel) {
var z = $('#js_zoom_' + zoomLevel);
slider.slider('value', 6 - z.index());
self.statsOptionsCell.update({
zoom: zoomLevel
});
});
})();
self.rawStatsCell = Cell.compute(function (v) {
if (v.need(DAL.cells.mode) != "analytics") {
return;
}
var options = v.need(statsOptionsCell);
var node;
var bucket = v.need(statsBucketDetails).name;
var data = _.extend({bucket: bucket}, options);
var statName = v(statsStatName);
if (statName) {
data.statName = statName;
} else if ((node = v(statsHostname))) {
// we don't send node it we're dealing with "specific stats" and
// we're careful to depend on statsHostname cell _only_ we're
// willing to send node.
data.node = node;
}
var bufferDepth = v.need(self.samplesBufferDepth);
return createSamplesFuture(data, bufferDepth);
});
self.displayingSpecificStatsCell = Cell.compute(function (v) {
return !!(v.need(self.rawStatsCell).specificStatName);
});
self.directoryURLCell = Cell.compute(function (v) {
return v.need(self.rawStatsCell).directory.url;
});
self.directoryURLCell.equality = function (a, b) {return a === b;};
self.directoryValueCell = Cell.compute(function (v) {
return v.need(self.rawStatsCell).directory.value;
});
self.directoryValueCell.equality = _.isEqual;
self.directoryCell = Cell.compute(function (v) {
var url = v.need(self.directoryURLCell);
if (url) {
return future.get({url: url});
}
return v.need(self.directoryValueCell);
});
self.specificStatTitleCell = Cell.compute(function (v) {
return v.need(self.rawStatsCell).directory.origTitle;
});
self.infosCell = Cell.needing(self.directoryCell).compute(function (v, statDesc) {
statDesc = JSON.parse(JSON.stringify(statDesc)); // this makes deep copy of statDesc
var infos = [];
infos.byName = {};
var statItems = [];
var blockIDs = [];
var hadServerResources = false;
if (statDesc.blocks[0].serverResources) {
// We want it last so that default stat name (which is first
// statItems entry) is not from ServerResourcesBlock
hadServerResources = true;
statDesc.blocks = statDesc.blocks.slice(1).concat([statDesc.blocks[0]]);
}
_.each(statDesc.blocks, function (aBlock) {
var blockName = aBlock.blockName;
aBlock.id = _.uniqueId("GB");
blockIDs.push(aBlock.id);
var stats = aBlock.stats;
statItems = statItems.concat(stats);
_.each(stats, function (statInfo) {
statInfo.id = _.uniqueId("G");
statInfo.blockId = aBlock.id;
});
});
// and now make ServerResourcesBlock first for rendering
if (hadServerResources) {
statDesc.blocks.unshift(statDesc.blocks.pop());
}
_.each(statItems, function (item) {
infos.push(item);
infos.byName[item.name] = item;
});
infos.blockIDs = blockIDs;
var infosByTitle = _.groupBy(infos, "title");
_.each(infos, function (statInfo) {
if (statInfo.missing || statInfo.bigTitle) {
return;
}
var title = statInfo.title;
var homonyms = infosByTitle[title];
if (homonyms.length == 1) {
return;
}
var sameBlock = _.any(homonyms, function (otherInfo) {
return otherInfo !== statInfo && otherInfo.blockId === statInfo.blockId;
});
var blockInfo = _.detect(statDesc.blocks, function (blockCand) {
return blockCand.id === statInfo.blockId;
});
if (!blockInfo) {
BUG();
}
if (sameBlock && blockInfo.columns) {
var idx = _.indexOf(blockInfo.stats, statInfo);
if (idx < 0) {
BUG();
}
statInfo.bigTitle = blockInfo.columns[idx % 4] + ' ' + statInfo.title;
} else {
statInfo.bigTitle = (blockInfo.bigTitlePrefix || blockInfo.blockName) + ' ' + statInfo.title;
}
});
return {statDesc: statDesc, infos: infos};
});
self.statsDescInfoCell = Cell.needing(self.infosCell).compute(function (v, infos) {
return infos.statDesc;
}).name("statsDescInfoCell");
self.graphsConfigurationCell = Cell.compute(function (v) {
var selectedGraphName = v(v.need(self.smallGraphSelectionCellCell));
var stats = v.need(self.rawStatsCell);
var selected;
var infos = v.need(self.infosCell).infos;
if (selectedGraphName && (selectedGraphName in infos.byName)) {
selected = infos.byName[selectedGraphName];
} else {
selected = infos[0];
}
var auxTS = {};
var samples = _.clone(stats.stats[stats.mainStatsBlock]);
_.each(stats.stats, function (subSamples, subName) {
if (subName === stats.mainStatsBlock) {
return;
}
var timestamps = subSamples.timestamp;
for (var k in subSamples) {
if (k == "timestamp") {
continue;
}
samples[k] = subSamples[k];
auxTS[k] = timestamps;
}
});
if (!samples[selected.name]) {
selected = _.detect(infos, function (info) {return samples[info.name];}) || selected;
}
return {
interval: stats.interval,
zoomLevel: v.need(zoomLevel),
selected: selected,
samples: samples,
timestamp: samples.timestamp,
auxTimestamps: auxTS,
serverDate: stats.serverDate,
clientDate: stats.clientDate,
infos: infos,
extra: v(self.configurationExtra)
};
});
self.statsNodesCell = Cell.compute(function (v) {
return _.filter(v.need(DAL.cells.serversCell).active, function (node) {
return node.clusterMembership !== 'inactiveFailed' && node.status !== 'unhealthy';
});
});
self.hotKeysCell = Cell.computeEager(function (v) {
if (!v(self.statsBucketDetails)) {
// this deals with "no buckets at all" case for us
return [];
}
return v.need(self.rawStatsCell).hot_keys || [];
});
self.hotKeysCell.equality = _.isEqual;
Cell.autonameCells(self);
})(StatsModel);
var maybeReloadAppDueToLeak = (function () {
var counter = 300;
return function () {
if (!window.G_vmlCanvasManager)
return;
if (!--counter)
reloadPage();
};
})();
;(function (global) {
var queuedUpdates = [];
function flushQueuedUpdate() |
var shadowSize = 3;
if (window.G_vmlCanvasManager) {
shadowSize = 0;
}
function renderSmallGraph(jq, options) {
function reqOpt(name) {
var rv = options[name];
if (rv === undefined)
throw new Error("missing option: " + name);
return rv;
}
var data = reqOpt('data');
var now = reqOpt('now');
var plotSeries = | {
var i = queuedUpdates.length;
while (--i >= 0) {
queuedUpdates[i]();
}
queuedUpdates.length = 0;
} | identifier_body |
app.js | 'use strict';
angular.module('citizenForumsShowApp', [
'citizenForumsShowApp.services',
'ngCookies',
'ngResource',
'ngSanitize',
'ngRoute',
'uiGmapgoogle-maps',
'xeditable',
'restangular',
'nl2br'
]).config(['$interpolateProvider', function($interpolateProvider) {
$interpolateProvider.startSymbol('[[');
$interpolateProvider.endSymbol(']]');
}]).config(function(uiGmapGoogleMapApiProvider) { | uiGmapGoogleMapApiProvider.configure({
// key: '', // TODO set Google Maps API key
v: '3.17',
language: 'es',
sensor: false,
libraries: 'drawing,geometry,visualization'
});
})
.run(function(editableOptions) {
editableOptions.theme = 'bs3'; // X-editable form theme
})
.constant('CFG', {
DELAY: 600,
RANGE_STEPS: 20,
GMAPS_ZOOM: 14,
GPS_CENTER_POS: { lat: 41.4926867, lng: 2.3613954}, // Premià de Mar (Barcelona) center
PROCESS_PARTICIPATION_STATE: { DRAFT: 1, PRESENTATION: 2, DEBATE: 3, CLOSED: 4 }
})
; | random_line_split |
|
event_loop.rs | use std::sync::{Arc, Condvar, Mutex};
use std::thread::spawn;
use std::time::{Duration, Instant};
use super::schedule_queue::*;
use super::scheduler::*;
#[derive(Clone)]
pub struct EventLoop {
queue: Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>,
}
impl EventLoop {
/// Creates a new EventLoop
pub fn | () -> Self {
let queue = Arc::new((Mutex::new(ScheduleQueue::new()), Condvar::new()));
let scheduler = EventLoop { queue: queue.clone() };
spawn(move || {
loop {
let mut action = dequeue(&queue);
action.invoke();
}
});
scheduler
}
}
fn dequeue(queue: &Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>) -> Box<Action> {
let (ref mutex, ref cvar) = **queue;
let mut queue = mutex.lock().unwrap();
loop {
if let Some(record) = queue.dequeue() {
let now = Instant::now();
if record.1 <= now {
return record.0;
} else {
let timeout = now - record.1;
let r = cvar.wait_timeout(queue, timeout).unwrap();
queue = r.0;
if r.1.timed_out() {
return record.0;
} else {
queue.enqueue(record);
continue;
}
}
} else {
queue = cvar.wait(queue).unwrap();
}
}
}
impl ParallelScheduler for EventLoop {
fn schedule<F>(&self, func: F, delay: Duration)
where F: FnOnce() + Send + 'static
{
let due = Instant::now() + delay;
let &(ref mutex, ref cvar) = &*self.queue;
mutex.lock().unwrap().enqueue((Box::new(Some(func)), due));
cvar.notify_one();
}
}
trait Action {
fn invoke(&mut self);
}
impl<F> Action for Option<F>
where F: FnOnce() + Send
{
fn invoke(&mut self) {
if let Some(action) = self.take() {
action();
}
}
}
| new | identifier_name |
event_loop.rs | use std::sync::{Arc, Condvar, Mutex};
use std::thread::spawn;
use std::time::{Duration, Instant};
use super::schedule_queue::*;
use super::scheduler::*;
#[derive(Clone)]
pub struct EventLoop {
queue: Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>,
}
impl EventLoop {
/// Creates a new EventLoop
pub fn new() -> Self {
let queue = Arc::new((Mutex::new(ScheduleQueue::new()), Condvar::new()));
let scheduler = EventLoop { queue: queue.clone() };
spawn(move || {
loop {
let mut action = dequeue(&queue);
action.invoke();
}
});
scheduler
}
}
fn dequeue(queue: &Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>) -> Box<Action> {
let (ref mutex, ref cvar) = **queue;
let mut queue = mutex.lock().unwrap();
loop {
if let Some(record) = queue.dequeue() {
let now = Instant::now();
if record.1 <= now {
return record.0;
} else {
let timeout = now - record.1;
let r = cvar.wait_timeout(queue, timeout).unwrap();
queue = r.0;
if r.1.timed_out() {
return record.0;
} else {
queue.enqueue(record);
continue;
}
}
} else {
queue = cvar.wait(queue).unwrap();
}
}
}
impl ParallelScheduler for EventLoop {
fn schedule<F>(&self, func: F, delay: Duration)
where F: FnOnce() + Send + 'static
{
let due = Instant::now() + delay;
let &(ref mutex, ref cvar) = &*self.queue;
mutex.lock().unwrap().enqueue((Box::new(Some(func)), due));
cvar.notify_one();
}
}
trait Action {
fn invoke(&mut self);
}
impl<F> Action for Option<F>
where F: FnOnce() + Send
{
fn invoke(&mut self) {
if let Some(action) = self.take() |
}
}
| {
action();
} | conditional_block |
event_loop.rs | use std::sync::{Arc, Condvar, Mutex};
use std::thread::spawn;
use std::time::{Duration, Instant};
use super::schedule_queue::*;
use super::scheduler::*;
#[derive(Clone)]
pub struct EventLoop {
queue: Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>,
}
impl EventLoop {
/// Creates a new EventLoop
pub fn new() -> Self {
let queue = Arc::new((Mutex::new(ScheduleQueue::new()), Condvar::new()));
let scheduler = EventLoop { queue: queue.clone() };
spawn(move || {
loop {
let mut action = dequeue(&queue);
action.invoke();
}
});
scheduler
}
}
fn dequeue(queue: &Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>) -> Box<Action> {
let (ref mutex, ref cvar) = **queue;
let mut queue = mutex.lock().unwrap();
loop {
if let Some(record) = queue.dequeue() {
let now = Instant::now();
if record.1 <= now {
return record.0;
} else {
let timeout = now - record.1; | } else {
queue.enqueue(record);
continue;
}
}
} else {
queue = cvar.wait(queue).unwrap();
}
}
}
impl ParallelScheduler for EventLoop {
fn schedule<F>(&self, func: F, delay: Duration)
where F: FnOnce() + Send + 'static
{
let due = Instant::now() + delay;
let &(ref mutex, ref cvar) = &*self.queue;
mutex.lock().unwrap().enqueue((Box::new(Some(func)), due));
cvar.notify_one();
}
}
trait Action {
fn invoke(&mut self);
}
impl<F> Action for Option<F>
where F: FnOnce() + Send
{
fn invoke(&mut self) {
if let Some(action) = self.take() {
action();
}
}
} | let r = cvar.wait_timeout(queue, timeout).unwrap();
queue = r.0;
if r.1.timed_out() {
return record.0; | random_line_split |
event_loop.rs | use std::sync::{Arc, Condvar, Mutex};
use std::thread::spawn;
use std::time::{Duration, Instant};
use super::schedule_queue::*;
use super::scheduler::*;
#[derive(Clone)]
pub struct EventLoop {
queue: Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>,
}
impl EventLoop {
/// Creates a new EventLoop
pub fn new() -> Self {
let queue = Arc::new((Mutex::new(ScheduleQueue::new()), Condvar::new()));
let scheduler = EventLoop { queue: queue.clone() };
spawn(move || {
loop {
let mut action = dequeue(&queue);
action.invoke();
}
});
scheduler
}
}
fn dequeue(queue: &Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>) -> Box<Action> {
let (ref mutex, ref cvar) = **queue;
let mut queue = mutex.lock().unwrap();
loop {
if let Some(record) = queue.dequeue() {
let now = Instant::now();
if record.1 <= now {
return record.0;
} else {
let timeout = now - record.1;
let r = cvar.wait_timeout(queue, timeout).unwrap();
queue = r.0;
if r.1.timed_out() {
return record.0;
} else {
queue.enqueue(record);
continue;
}
}
} else {
queue = cvar.wait(queue).unwrap();
}
}
}
impl ParallelScheduler for EventLoop {
fn schedule<F>(&self, func: F, delay: Duration)
where F: FnOnce() + Send + 'static
{
let due = Instant::now() + delay;
let &(ref mutex, ref cvar) = &*self.queue;
mutex.lock().unwrap().enqueue((Box::new(Some(func)), due));
cvar.notify_one();
}
}
trait Action {
fn invoke(&mut self);
}
impl<F> Action for Option<F>
where F: FnOnce() + Send
{
fn invoke(&mut self) |
}
| {
if let Some(action) = self.take() {
action();
}
} | identifier_body |
builder.rs | be used directly. Use the
/// methods in the `InstBuilder` trait instead.
///
/// Any data type that implements `InstBuilderBase` also gets all the methods of the `InstBuilder`
/// trait.
pub trait InstBuilderBase<'f>: Sized {
/// Get an immutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert an instruction and return a reference to it, consuming the builder.
///
/// The result types may depend on a controlling type variable. For non-polymorphic
/// instructions with multiple results, pass `VOID` for the `ctrl_typevar` argument.
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph);
}
// Include trait code generated by `lib/cretonne/meta/gen_instr.py`.
//
// This file defines the `InstBuilder` trait as an extension of `InstBuilderBase` with methods per
// instruction format and per opcode.
include!(concat!(env!("OUT_DIR"), "/inst_builder.rs"));
/// Any type implementing `InstBuilderBase` gets all the `InstBuilder` methods for free.
impl<'f, T: InstBuilderBase<'f>> InstBuilder<'f> for T {}
/// Base trait for instruction inserters.
///
/// This is an alternative base trait for an instruction builder to implement.
///
/// An instruction inserter can be adapted into an instruction builder by wrapping it in an
/// `InsertBuilder`. This provides some common functionality for instruction builders that insert
/// new instructions, as opposed to the `ReplaceBuilder` which overwrites existing instructions.
pub trait InstInserterBase<'f>: Sized {
/// Get an immutable reference to the data flow graph.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert a new instruction which belongs to the DFG.
fn insert_built_inst(self, inst: Inst, ctrl_typevar: Type) -> &'f mut DataFlowGraph;
}
use std::marker::PhantomData;
/// Builder that inserts an instruction at the current position.
///
/// An `InsertBuilder` is a wrapper for an `InstInserterBase` that turns it into an instruction
/// builder with some additional facilities for creating instructions that reuse existing values as
/// their results.
pub struct InsertBuilder<'f, IIB: InstInserterBase<'f>> {
inserter: IIB,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB: InstInserterBase<'f>> InsertBuilder<'f, IIB> {
/// Create a new builder which inserts instructions at `pos`.
/// The `dfg` and `pos.layout` references should be from the same `Function`.
pub fn new(inserter: IIB) -> InsertBuilder<'f, IIB> {
InsertBuilder {
inserter,
unused: PhantomData,
}
}
/// Reuse result values in `reuse`.
///
/// Convert this builder into one that will reuse the provided result values instead of
/// allocating new ones. The provided values for reuse must not be attached to anything. Any
/// missing result values will be allocated as normal.
///
/// The `reuse` argument is expected to be an array of `Option<Value>`.
pub fn with_results<Array>(self, reuse: Array) -> InsertReuseBuilder<'f, IIB, Array>
where
Array: AsRef<[Option<Value>]>,
{
InsertReuseBuilder {
inserter: self.inserter,
reuse,
unused: PhantomData,
}
}
/// Reuse a single result value.
///
/// Convert this into a builder that will reuse `v` as the single result value. The reused
/// result value `v` must not be attached to anything.
///
/// This method should only be used when building an instruction with exactly one result. Use
/// `with_results()` for the more general case.
pub fn with_result(self, v: Value) -> InsertReuseBuilder<'f, IIB, [Option<Value>; 1]> {
// TODO: Specialize this to return a different builder that just attaches `v` instead of
// calling `make_inst_results_reusing()`.
self.with_results([Some(v)])
}
}
impl<'f, IIB: InstInserterBase<'f>> InstBuilderBase<'f> for InsertBuilder<'f, IIB> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst;
{
let dfg = self.inserter.data_flow_graph_mut();
inst = dfg.make_inst(data);
dfg.make_inst_results(inst, ctrl_typevar);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Builder that inserts a new instruction like `InsertBuilder`, but reusing result values.
pub struct InsertReuseBuilder<'f, IIB, Array>
where
IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>,
{
inserter: IIB,
reuse: Array,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB, Array> InstBuilderBase<'f> for InsertReuseBuilder<'f, IIB, Array>
where IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>
{
fn data_flow_graph(&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst;
{
let dfg = self.inserter.data_flow_graph_mut();
inst = dfg.make_inst(data);
// Make an `Interator<Item = Option<Value>>`.
let ru = self.reuse.as_ref().iter().cloned();
dfg.make_inst_results_reusing(inst, ctrl_typevar, ru);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Instruction builder that replaces an existing instruction.
///
/// The inserted instruction will have the same `Inst` number as the old one.
///
/// If the old instruction still has result values attached, it is assumed that the new instruction
/// produces the same number and types of results. The old result values are preserved. If the
/// replacement instruction format does not support multiple results, the builder panics. It is a
/// bug to leave result values dangling.
pub struct ReplaceBuilder<'f> {
dfg: &'f mut DataFlowGraph,
inst: Inst,
}
impl<'f> ReplaceBuilder<'f> {
/// Create a `ReplaceBuilder` that will overwrite `inst`.
pub fn new(dfg: &'f mut DataFlowGraph, inst: Inst) -> ReplaceBuilder {
ReplaceBuilder { dfg, inst }
}
}
impl<'f> InstBuilderBase<'f> for ReplaceBuilder<'f> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.dfg
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.dfg
}
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
// Splat the new instruction on top of the old one.
self.dfg[self.inst] = data;
if !self.dfg.has_results(self.inst) |
(self.inst, self.dfg)
}
}
#[cfg(test)]
mod tests {
use cursor::{Cursor, FuncCursor};
use ir::{Function, InstBuilder, ValueDef};
use ir::types::*;
use ir::condcodes::*;
#[test]
fn types() {
let mut func = Function::new();
let ebb0 = func.dfg.make_ebb();
let arg0 = func.dfg.append_ebb_param(ebb0, I32);
let mut pos = FuncCursor::new(&mut func);
pos.insert_ebb(ebb0);
// Explicit types.
let v0 = pos.ins().iconst(I32, 3);
assert_eq!(pos.func.dfg.value_type(v0), I32);
// Inferred from inputs.
let v1 = pos.ins().iadd(arg0, v0);
assert_eq!(pos.func.dfg.value_type(v1), I32);
// Formula.
let cmp = pos.ins().icmp(IntCC::Equal, arg0, v0);
assert_eq!(pos.func.dfg.value_type | {
// The old result values were either detached or non-existent.
// Construct new ones.
self.dfg.make_inst_results(self.inst, ctrl_typevar);
} | conditional_block |
builder.rs | normally be used directly. Use the
/// methods in the `InstBuilder` trait instead.
///
/// Any data type that implements `InstBuilderBase` also gets all the methods of the `InstBuilder`
/// trait.
pub trait InstBuilderBase<'f>: Sized {
/// Get an immutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert an instruction and return a reference to it, consuming the builder.
///
/// The result types may depend on a controlling type variable. For non-polymorphic
/// instructions with multiple results, pass `VOID` for the `ctrl_typevar` argument.
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph);
}
// Include trait code generated by `lib/cretonne/meta/gen_instr.py`.
//
// This file defines the `InstBuilder` trait as an extension of `InstBuilderBase` with methods per
// instruction format and per opcode.
include!(concat!(env!("OUT_DIR"), "/inst_builder.rs"));
/// Any type implementing `InstBuilderBase` gets all the `InstBuilder` methods for free.
impl<'f, T: InstBuilderBase<'f>> InstBuilder<'f> for T {}
/// Base trait for instruction inserters.
///
/// This is an alternative base trait for an instruction builder to implement.
///
/// An instruction inserter can be adapted into an instruction builder by wrapping it in an
/// `InsertBuilder`. This provides some common functionality for instruction builders that insert
/// new instructions, as opposed to the `ReplaceBuilder` which overwrites existing instructions.
pub trait InstInserterBase<'f>: Sized {
/// Get an immutable reference to the data flow graph.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert a new instruction which belongs to the DFG.
fn insert_built_inst(self, inst: Inst, ctrl_typevar: Type) -> &'f mut DataFlowGraph;
}
use std::marker::PhantomData;
/// Builder that inserts an instruction at the current position.
///
/// An `InsertBuilder` is a wrapper for an `InstInserterBase` that turns it into an instruction
/// builder with some additional facilities for creating instructions that reuse existing values as
/// their results.
pub struct InsertBuilder<'f, IIB: InstInserterBase<'f>> {
inserter: IIB,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB: InstInserterBase<'f>> InsertBuilder<'f, IIB> {
/// Create a new builder which inserts instructions at `pos`.
/// The `dfg` and `pos.layout` references should be from the same `Function`.
pub fn new(inserter: IIB) -> InsertBuilder<'f, IIB> {
InsertBuilder {
inserter,
unused: PhantomData,
}
}
/// Reuse result values in `reuse`.
///
/// Convert this builder into one that will reuse the provided result values instead of
/// allocating new ones. The provided values for reuse must not be attached to anything. Any
/// missing result values will be allocated as normal.
///
/// The `reuse` argument is expected to be an array of `Option<Value>`.
pub fn with_results<Array>(self, reuse: Array) -> InsertReuseBuilder<'f, IIB, Array>
where
Array: AsRef<[Option<Value>]>,
{
InsertReuseBuilder {
inserter: self.inserter,
reuse,
unused: PhantomData,
}
}
/// Reuse a single result value.
///
/// Convert this into a builder that will reuse `v` as the single result value. The reused
/// result value `v` must not be attached to anything.
///
/// This method should only be used when building an instruction with exactly one result. Use
/// `with_results()` for the more general case.
pub fn with_result(self, v: Value) -> InsertReuseBuilder<'f, IIB, [Option<Value>; 1]> {
// TODO: Specialize this to return a different builder that just attaches `v` instead of
// calling `make_inst_results_reusing()`.
self.with_results([Some(v)])
}
}
impl<'f, IIB: InstInserterBase<'f>> InstBuilderBase<'f> for InsertBuilder<'f, IIB> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst;
{
let dfg = self.inserter.data_flow_graph_mut();
inst = dfg.make_inst(data);
dfg.make_inst_results(inst, ctrl_typevar);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Builder that inserts a new instruction like `InsertBuilder`, but reusing result values.
pub struct InsertReuseBuilder<'f, IIB, Array>
where
IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>,
{
inserter: IIB,
reuse: Array,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB, Array> InstBuilderBase<'f> for InsertReuseBuilder<'f, IIB, Array>
where IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>
{
fn data_flow_graph(&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst; | inst = dfg.make_inst(data);
// Make an `Interator<Item = Option<Value>>`.
let ru = self.reuse.as_ref().iter().cloned();
dfg.make_inst_results_reusing(inst, ctrl_typevar, ru);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Instruction builder that replaces an existing instruction.
///
/// The inserted instruction will have the same `Inst` number as the old one.
///
/// If the old instruction still has result values attached, it is assumed that the new instruction
/// produces the same number and types of results. The old result values are preserved. If the
/// replacement instruction format does not support multiple results, the builder panics. It is a
/// bug to leave result values dangling.
pub struct ReplaceBuilder<'f> {
dfg: &'f mut DataFlowGraph,
inst: Inst,
}
impl<'f> ReplaceBuilder<'f> {
/// Create a `ReplaceBuilder` that will overwrite `inst`.
pub fn new(dfg: &'f mut DataFlowGraph, inst: Inst) -> ReplaceBuilder {
ReplaceBuilder { dfg, inst }
}
}
impl<'f> InstBuilderBase<'f> for ReplaceBuilder<'f> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.dfg
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.dfg
}
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
// Splat the new instruction on top of the old one.
self.dfg[self.inst] = data;
if !self.dfg.has_results(self.inst) {
// The old result values were either detached or non-existent.
// Construct new ones.
self.dfg.make_inst_results(self.inst, ctrl_typevar);
}
(self.inst, self.dfg)
}
}
#[cfg(test)]
mod tests {
use cursor::{Cursor, FuncCursor};
use ir::{Function, InstBuilder, ValueDef};
use ir::types::*;
use ir::condcodes::*;
#[test]
fn types() {
let mut func = Function::new();
let ebb0 = func.dfg.make_ebb();
let arg0 = func.dfg.append_ebb_param(ebb0, I32);
let mut pos = FuncCursor::new(&mut func);
pos.insert_ebb(ebb0);
// Explicit types.
let v0 = pos.ins().iconst(I32, 3);
assert_eq!(pos.func.dfg.value_type(v0), I32);
// Inferred from inputs.
let v1 = pos.ins().iadd(arg0, v0);
assert_eq!(pos.func.dfg.value_type(v1), I32);
// Formula.
let cmp = pos.ins().icmp(IntCC::Equal, arg0, v0);
assert_eq!(pos.func.dfg.value_type(cmp | {
let dfg = self.inserter.data_flow_graph_mut(); | random_line_split |
builder.rs | normally be used directly. Use the
/// methods in the `InstBuilder` trait instead.
///
/// Any data type that implements `InstBuilderBase` also gets all the methods of the `InstBuilder`
/// trait.
pub trait InstBuilderBase<'f>: Sized {
/// Get an immutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert an instruction and return a reference to it, consuming the builder.
///
/// The result types may depend on a controlling type variable. For non-polymorphic
/// instructions with multiple results, pass `VOID` for the `ctrl_typevar` argument.
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph);
}
// Include trait code generated by `lib/cretonne/meta/gen_instr.py`.
//
// This file defines the `InstBuilder` trait as an extension of `InstBuilderBase` with methods per
// instruction format and per opcode.
include!(concat!(env!("OUT_DIR"), "/inst_builder.rs"));
/// Any type implementing `InstBuilderBase` gets all the `InstBuilder` methods for free.
impl<'f, T: InstBuilderBase<'f>> InstBuilder<'f> for T {}
/// Base trait for instruction inserters.
///
/// This is an alternative base trait for an instruction builder to implement.
///
/// An instruction inserter can be adapted into an instruction builder by wrapping it in an
/// `InsertBuilder`. This provides some common functionality for instruction builders that insert
/// new instructions, as opposed to the `ReplaceBuilder` which overwrites existing instructions.
pub trait InstInserterBase<'f>: Sized {
/// Get an immutable reference to the data flow graph.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert a new instruction which belongs to the DFG.
fn insert_built_inst(self, inst: Inst, ctrl_typevar: Type) -> &'f mut DataFlowGraph;
}
use std::marker::PhantomData;
/// Builder that inserts an instruction at the current position.
///
/// An `InsertBuilder` is a wrapper for an `InstInserterBase` that turns it into an instruction
/// builder with some additional facilities for creating instructions that reuse existing values as
/// their results.
pub struct InsertBuilder<'f, IIB: InstInserterBase<'f>> {
inserter: IIB,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB: InstInserterBase<'f>> InsertBuilder<'f, IIB> {
/// Create a new builder which inserts instructions at `pos`.
/// The `dfg` and `pos.layout` references should be from the same `Function`.
pub fn new(inserter: IIB) -> InsertBuilder<'f, IIB> {
InsertBuilder {
inserter,
unused: PhantomData,
}
}
/// Reuse result values in `reuse`.
///
/// Convert this builder into one that will reuse the provided result values instead of
/// allocating new ones. The provided values for reuse must not be attached to anything. Any
/// missing result values will be allocated as normal.
///
/// The `reuse` argument is expected to be an array of `Option<Value>`.
pub fn with_results<Array>(self, reuse: Array) -> InsertReuseBuilder<'f, IIB, Array>
where
Array: AsRef<[Option<Value>]>,
{
InsertReuseBuilder {
inserter: self.inserter,
reuse,
unused: PhantomData,
}
}
/// Reuse a single result value.
///
/// Convert this into a builder that will reuse `v` as the single result value. The reused
/// result value `v` must not be attached to anything.
///
/// This method should only be used when building an instruction with exactly one result. Use
/// `with_results()` for the more general case.
pub fn with_result(self, v: Value) -> InsertReuseBuilder<'f, IIB, [Option<Value>; 1]> {
// TODO: Specialize this to return a different builder that just attaches `v` instead of
// calling `make_inst_results_reusing()`.
self.with_results([Some(v)])
}
}
impl<'f, IIB: InstInserterBase<'f>> InstBuilderBase<'f> for InsertBuilder<'f, IIB> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst;
{
let dfg = self.inserter.data_flow_graph_mut();
inst = dfg.make_inst(data);
dfg.make_inst_results(inst, ctrl_typevar);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Builder that inserts a new instruction like `InsertBuilder`, but reusing result values.
pub struct InsertReuseBuilder<'f, IIB, Array>
where
IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>,
{
inserter: IIB,
reuse: Array,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB, Array> InstBuilderBase<'f> for InsertReuseBuilder<'f, IIB, Array>
where IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>
{
fn | (&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst;
{
let dfg = self.inserter.data_flow_graph_mut();
inst = dfg.make_inst(data);
// Make an `Interator<Item = Option<Value>>`.
let ru = self.reuse.as_ref().iter().cloned();
dfg.make_inst_results_reusing(inst, ctrl_typevar, ru);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Instruction builder that replaces an existing instruction.
///
/// The inserted instruction will have the same `Inst` number as the old one.
///
/// If the old instruction still has result values attached, it is assumed that the new instruction
/// produces the same number and types of results. The old result values are preserved. If the
/// replacement instruction format does not support multiple results, the builder panics. It is a
/// bug to leave result values dangling.
pub struct ReplaceBuilder<'f> {
dfg: &'f mut DataFlowGraph,
inst: Inst,
}
impl<'f> ReplaceBuilder<'f> {
/// Create a `ReplaceBuilder` that will overwrite `inst`.
pub fn new(dfg: &'f mut DataFlowGraph, inst: Inst) -> ReplaceBuilder {
ReplaceBuilder { dfg, inst }
}
}
impl<'f> InstBuilderBase<'f> for ReplaceBuilder<'f> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.dfg
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.dfg
}
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
// Splat the new instruction on top of the old one.
self.dfg[self.inst] = data;
if !self.dfg.has_results(self.inst) {
// The old result values were either detached or non-existent.
// Construct new ones.
self.dfg.make_inst_results(self.inst, ctrl_typevar);
}
(self.inst, self.dfg)
}
}
#[cfg(test)]
mod tests {
use cursor::{Cursor, FuncCursor};
use ir::{Function, InstBuilder, ValueDef};
use ir::types::*;
use ir::condcodes::*;
#[test]
fn types() {
let mut func = Function::new();
let ebb0 = func.dfg.make_ebb();
let arg0 = func.dfg.append_ebb_param(ebb0, I32);
let mut pos = FuncCursor::new(&mut func);
pos.insert_ebb(ebb0);
// Explicit types.
let v0 = pos.ins().iconst(I32, 3);
assert_eq!(pos.func.dfg.value_type(v0), I32);
// Inferred from inputs.
let v1 = pos.ins().iadd(arg0, v0);
assert_eq!(pos.func.dfg.value_type(v1), I32);
// Formula.
let cmp = pos.ins().icmp(IntCC::Equal, arg0, v0);
assert_eq!(pos.func.dfg.value_type | data_flow_graph | identifier_name |
cursor.rs | extern crate sdl2;
use std::env;
use std::path::Path;
use sdl2::event::Event;
use sdl2::image::{LoadSurface, InitFlag};
use sdl2::keyboard::Keycode;
use sdl2::mouse::Cursor;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::surface::Surface;
pub fn run(png: &Path) -> Result<(), String> {
let sdl_context = sdl2::init()?;
let video_subsystem = sdl_context.video()?;
let _image_context = sdl2::image::init(InitFlag::PNG | InitFlag::JPG)?;
let window = video_subsystem.window("rust-sdl2 demo: Cursor", 800, 600)
.position_centered()
.build()
.map_err(|e| e.to_string())?;
let mut canvas = window.into_canvas().software().build().map_err(|e| e.to_string())?;
let surface = Surface::from_file(png)
.map_err(|err| format!("failed to load cursor image: {}", err))?;
let cursor = Cursor::from_surface(surface, 0, 0)
.map_err(|err| format!("failed to load cursor: {}", err))?;
cursor.set();
canvas.clear();
canvas.present();
canvas.set_draw_color(Color::RGBA(255, 255, 255, 255));
let mut events = sdl_context.event_pump()?;
'mainloop: loop {
for event in events.poll_iter() {
match event {
Event::Quit{..} |
Event::KeyDown {keycode: Option::Some(Keycode::Escape), ..} =>
break 'mainloop,
Event::MouseButtonDown {x, y, ..} => {
canvas.fill_rect(Rect::new(x, y, 1, 1))?;
canvas.present();
}
_ => {}
}
}
}
Ok(())
}
fn | () -> Result<(), String> {
let args: Vec<_> = env::args().collect();
if args.len() < 2 {
println!("Usage: cargo run /path/to/image.(png|jpg)")
} else {
run(Path::new(&args[1]))?;
}
Ok(())
}
| main | identifier_name |
cursor.rs | extern crate sdl2;
use std::env;
use std::path::Path; | use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::surface::Surface;
pub fn run(png: &Path) -> Result<(), String> {
let sdl_context = sdl2::init()?;
let video_subsystem = sdl_context.video()?;
let _image_context = sdl2::image::init(InitFlag::PNG | InitFlag::JPG)?;
let window = video_subsystem.window("rust-sdl2 demo: Cursor", 800, 600)
.position_centered()
.build()
.map_err(|e| e.to_string())?;
let mut canvas = window.into_canvas().software().build().map_err(|e| e.to_string())?;
let surface = Surface::from_file(png)
.map_err(|err| format!("failed to load cursor image: {}", err))?;
let cursor = Cursor::from_surface(surface, 0, 0)
.map_err(|err| format!("failed to load cursor: {}", err))?;
cursor.set();
canvas.clear();
canvas.present();
canvas.set_draw_color(Color::RGBA(255, 255, 255, 255));
let mut events = sdl_context.event_pump()?;
'mainloop: loop {
for event in events.poll_iter() {
match event {
Event::Quit{..} |
Event::KeyDown {keycode: Option::Some(Keycode::Escape), ..} =>
break 'mainloop,
Event::MouseButtonDown {x, y, ..} => {
canvas.fill_rect(Rect::new(x, y, 1, 1))?;
canvas.present();
}
_ => {}
}
}
}
Ok(())
}
fn main() -> Result<(), String> {
let args: Vec<_> = env::args().collect();
if args.len() < 2 {
println!("Usage: cargo run /path/to/image.(png|jpg)")
} else {
run(Path::new(&args[1]))?;
}
Ok(())
} | use sdl2::event::Event;
use sdl2::image::{LoadSurface, InitFlag};
use sdl2::keyboard::Keycode;
use sdl2::mouse::Cursor; | random_line_split |
cursor.rs | extern crate sdl2;
use std::env;
use std::path::Path;
use sdl2::event::Event;
use sdl2::image::{LoadSurface, InitFlag};
use sdl2::keyboard::Keycode;
use sdl2::mouse::Cursor;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::surface::Surface;
pub fn run(png: &Path) -> Result<(), String> {
let sdl_context = sdl2::init()?;
let video_subsystem = sdl_context.video()?;
let _image_context = sdl2::image::init(InitFlag::PNG | InitFlag::JPG)?;
let window = video_subsystem.window("rust-sdl2 demo: Cursor", 800, 600)
.position_centered()
.build()
.map_err(|e| e.to_string())?;
let mut canvas = window.into_canvas().software().build().map_err(|e| e.to_string())?;
let surface = Surface::from_file(png)
.map_err(|err| format!("failed to load cursor image: {}", err))?;
let cursor = Cursor::from_surface(surface, 0, 0)
.map_err(|err| format!("failed to load cursor: {}", err))?;
cursor.set();
canvas.clear();
canvas.present();
canvas.set_draw_color(Color::RGBA(255, 255, 255, 255));
let mut events = sdl_context.event_pump()?;
'mainloop: loop {
for event in events.poll_iter() {
match event {
Event::Quit{..} |
Event::KeyDown {keycode: Option::Some(Keycode::Escape), ..} =>
break 'mainloop,
Event::MouseButtonDown {x, y, ..} => {
canvas.fill_rect(Rect::new(x, y, 1, 1))?;
canvas.present();
}
_ => {}
}
}
}
Ok(())
}
fn main() -> Result<(), String> | {
let args: Vec<_> = env::args().collect();
if args.len() < 2 {
println!("Usage: cargo run /path/to/image.(png|jpg)")
} else {
run(Path::new(&args[1]))?;
}
Ok(())
} | identifier_body |
|
cursor.rs | extern crate sdl2;
use std::env;
use std::path::Path;
use sdl2::event::Event;
use sdl2::image::{LoadSurface, InitFlag};
use sdl2::keyboard::Keycode;
use sdl2::mouse::Cursor;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::surface::Surface;
pub fn run(png: &Path) -> Result<(), String> {
let sdl_context = sdl2::init()?;
let video_subsystem = sdl_context.video()?;
let _image_context = sdl2::image::init(InitFlag::PNG | InitFlag::JPG)?;
let window = video_subsystem.window("rust-sdl2 demo: Cursor", 800, 600)
.position_centered()
.build()
.map_err(|e| e.to_string())?;
let mut canvas = window.into_canvas().software().build().map_err(|e| e.to_string())?;
let surface = Surface::from_file(png)
.map_err(|err| format!("failed to load cursor image: {}", err))?;
let cursor = Cursor::from_surface(surface, 0, 0)
.map_err(|err| format!("failed to load cursor: {}", err))?;
cursor.set();
canvas.clear();
canvas.present();
canvas.set_draw_color(Color::RGBA(255, 255, 255, 255));
let mut events = sdl_context.event_pump()?;
'mainloop: loop {
for event in events.poll_iter() {
match event {
Event::Quit{..} |
Event::KeyDown {keycode: Option::Some(Keycode::Escape), ..} =>
break 'mainloop,
Event::MouseButtonDown {x, y, ..} => {
canvas.fill_rect(Rect::new(x, y, 1, 1))?;
canvas.present();
}
_ => |
}
}
}
Ok(())
}
fn main() -> Result<(), String> {
let args: Vec<_> = env::args().collect();
if args.len() < 2 {
println!("Usage: cargo run /path/to/image.(png|jpg)")
} else {
run(Path::new(&args[1]))?;
}
Ok(())
}
| {} | conditional_block |
toHTML.js | 'use strict'
const he = require('he')
const node = require('./node')
const debug = require('debug')('arraydom.toHTML')
const pad = Array(80).join(' ') // don't bother to indent more than 80
const selfClosing = {
// from http://xahlee.info/js/html5_non-closing_tag.html
// These are the tags that don't need an end-tag
area: true,
base: true,
br: true,
col: true,
command: true,
embed: true,
hr: true,
img: true,
input: true,
keygen: true,
link: true,
meta: true,
param: true,
source: true,
track: true,
wbr: true
}
function encode (s) {
//console.log('ENCODE', JSON.stringify(s))
if (typeof s === 'number') {
s = '' + s
}
return he.encode(s, {useNamedReferences: true})
}
/*
return a copy of this node where the second element is defintely the
attribute object. By calling this in all the right places, we allow
people to omit that element. Not sure yet whether that's a good
practice.
We might also want an in-place normalize-node or normalize-tree.
function normalizedNode (node) {
if (node.length === 0) {
throw Error('cant handle empty nodes')
}
if (node.length === 1) {
return [ node[0], {} ]
}
if (typeof node[1] === 'object' && !Array.isArray(node[1])) {
return node
}
const result = node.slice(1)
result.splice(0,0, node[0], {})
return result
}
*/
function toHTML (tree, options) {
options = options || {}
const s = new Serializer(options)
let indent = 0
if (options.compact) {
indent = undefined
}
return s.serialize(tree, indent)
}
function Serializer (options) {
}
// would be more efficient to use some kind of output stream instead
// of a string, probably...
Serializer.prototype.serialize = function (tree, indent) {
const tags = tree[0]
const attrs = node.attrs(tree)
const children = node.children(tree)
debug('starting with', tags)
let s = ''
const parts = tags.split(' ')
const tag = parts[0]
debug('parts', parts)
// 'document' is our pseudo-element for handling the fact that HTML
// documents are not trees when you allow comments, doctype, etc.
if (tag === 'document') {
return children.map(toHTML).join('')
}
if (indent >= 0) {
s += pad.slice(0, (indent) * 2)
}
/*
We don't want to indent flows of text. There's no real way to
know if we're in a flow of text because JavaScript could change
the CSS on a div. So we use this heuristic: if any of our
children are text, we don't indent, and neither do any of our
decendants
*/
let oldIndent = indent
if (indent >= 0) {
for (let child of children) {
if (typeof child === 'string' || typeof child === 'number') {
indent = undefined
}
}
}
if (tag === 'processinginstruction') {
s += '<' + children[0] + '>\n'
return s
}
if (tag === 'comment') {
s += '<!-- ' + children[0] + '-->\n'
return s
}
s += '<' + tag
if (parts.length > 1) {
s += ' class="' + parts.slice(1).join(' ') + '"'
}
const attrnames = Object.getOwnPropertyNames(attrs).sort()
let style = ''
for (let key of attrnames) {
let val = attrs[key]
debug('key:', key)
if (key.localeCompare('stylf') === 1) {
debug('after style.')
// we're past any style.foo entries
if (style !== '') {
s += ' style="' + style.trim() + '"'
style = ''
}
}
if (val === undefined) {
throw Error('attribute value undefined for attr '+key)
}
if (key.startsWith('style.')) {
style += key.slice(6) + ': ' + encode(val) + '; '
} else {
s += ' ' + key + '="' + encode(val) + '"'
}
}
if (style !== '') { // in case we finished the loop without emitting style
s += ' style="' + style.trim() + '"'
}
if (selfClosing[tag]) {
if (s.endsWith('"')) {
s += ' '
}
s += '/>'
if (oldIndent >= 0) {
s += '\n'
}
return s
}
s += '>'
if (indent >= 0 && children.length > 0) {
s += '\n'
}
for (let child of children) {
if (typeof child === 'function') {
throw Error('someone else should have dealt with this first')
}
if (typeof child === 'string') {
s += encode(child)
}
if (typeof child === 'number') {
s += child
}
if (Array.isArray(child)) {
s += this.serialize(child, indent + 1)
}
}
if (indent >= 0 && children.length > 0) |
s += '</' + tag + '>'
if (oldIndent >= 0) {
s += '\n'
}
return s
}
module.exports = toHTML
| {
s += pad.slice(0, (indent) * 2)
} | conditional_block |
toHTML.js | 'use strict'
const he = require('he')
const node = require('./node')
const debug = require('debug')('arraydom.toHTML')
const pad = Array(80).join(' ') // don't bother to indent more than 80
const selfClosing = {
// from http://xahlee.info/js/html5_non-closing_tag.html
// These are the tags that don't need an end-tag
area: true,
base: true,
br: true,
col: true,
command: true,
embed: true,
hr: true,
img: true,
input: true,
keygen: true,
link: true,
meta: true,
param: true,
source: true,
track: true,
wbr: true
}
function encode (s) {
//console.log('ENCODE', JSON.stringify(s))
if (typeof s === 'number') {
s = '' + s
}
return he.encode(s, {useNamedReferences: true})
}
/*
return a copy of this node where the second element is defintely the
attribute object. By calling this in all the right places, we allow
people to omit that element. Not sure yet whether that's a good
practice.
We might also want an in-place normalize-node or normalize-tree.
function normalizedNode (node) {
if (node.length === 0) {
throw Error('cant handle empty nodes')
}
if (node.length === 1) {
return [ node[0], {} ]
}
if (typeof node[1] === 'object' && !Array.isArray(node[1])) {
return node
}
const result = node.slice(1)
result.splice(0,0, node[0], {})
return result
}
*/
function toHTML (tree, options) {
options = options || {}
const s = new Serializer(options)
let indent = 0
if (options.compact) {
indent = undefined
}
return s.serialize(tree, indent)
}
function Serializer (options) {
}
// would be more efficient to use some kind of output stream instead
// of a string, probably...
Serializer.prototype.serialize = function (tree, indent) {
const tags = tree[0]
const attrs = node.attrs(tree)
const children = node.children(tree)
debug('starting with', tags)
let s = ''
const parts = tags.split(' ') | const tag = parts[0]
debug('parts', parts)
// 'document' is our pseudo-element for handling the fact that HTML
// documents are not trees when you allow comments, doctype, etc.
if (tag === 'document') {
return children.map(toHTML).join('')
}
if (indent >= 0) {
s += pad.slice(0, (indent) * 2)
}
/*
We don't want to indent flows of text. There's no real way to
know if we're in a flow of text because JavaScript could change
the CSS on a div. So we use this heuristic: if any of our
children are text, we don't indent, and neither do any of our
decendants
*/
let oldIndent = indent
if (indent >= 0) {
for (let child of children) {
if (typeof child === 'string' || typeof child === 'number') {
indent = undefined
}
}
}
if (tag === 'processinginstruction') {
s += '<' + children[0] + '>\n'
return s
}
if (tag === 'comment') {
s += '<!-- ' + children[0] + '-->\n'
return s
}
s += '<' + tag
if (parts.length > 1) {
s += ' class="' + parts.slice(1).join(' ') + '"'
}
const attrnames = Object.getOwnPropertyNames(attrs).sort()
let style = ''
for (let key of attrnames) {
let val = attrs[key]
debug('key:', key)
if (key.localeCompare('stylf') === 1) {
debug('after style.')
// we're past any style.foo entries
if (style !== '') {
s += ' style="' + style.trim() + '"'
style = ''
}
}
if (val === undefined) {
throw Error('attribute value undefined for attr '+key)
}
if (key.startsWith('style.')) {
style += key.slice(6) + ': ' + encode(val) + '; '
} else {
s += ' ' + key + '="' + encode(val) + '"'
}
}
if (style !== '') { // in case we finished the loop without emitting style
s += ' style="' + style.trim() + '"'
}
if (selfClosing[tag]) {
if (s.endsWith('"')) {
s += ' '
}
s += '/>'
if (oldIndent >= 0) {
s += '\n'
}
return s
}
s += '>'
if (indent >= 0 && children.length > 0) {
s += '\n'
}
for (let child of children) {
if (typeof child === 'function') {
throw Error('someone else should have dealt with this first')
}
if (typeof child === 'string') {
s += encode(child)
}
if (typeof child === 'number') {
s += child
}
if (Array.isArray(child)) {
s += this.serialize(child, indent + 1)
}
}
if (indent >= 0 && children.length > 0) {
s += pad.slice(0, (indent) * 2)
}
s += '</' + tag + '>'
if (oldIndent >= 0) {
s += '\n'
}
return s
}
module.exports = toHTML | random_line_split |
|
toHTML.js | 'use strict'
const he = require('he')
const node = require('./node')
const debug = require('debug')('arraydom.toHTML')
const pad = Array(80).join(' ') // don't bother to indent more than 80
const selfClosing = {
// from http://xahlee.info/js/html5_non-closing_tag.html
// These are the tags that don't need an end-tag
area: true,
base: true,
br: true,
col: true,
command: true,
embed: true,
hr: true,
img: true,
input: true,
keygen: true,
link: true,
meta: true,
param: true,
source: true,
track: true,
wbr: true
}
function encode (s) {
//console.log('ENCODE', JSON.stringify(s))
if (typeof s === 'number') {
s = '' + s
}
return he.encode(s, {useNamedReferences: true})
}
/*
return a copy of this node where the second element is defintely the
attribute object. By calling this in all the right places, we allow
people to omit that element. Not sure yet whether that's a good
practice.
We might also want an in-place normalize-node or normalize-tree.
function normalizedNode (node) {
if (node.length === 0) {
throw Error('cant handle empty nodes')
}
if (node.length === 1) {
return [ node[0], {} ]
}
if (typeof node[1] === 'object' && !Array.isArray(node[1])) {
return node
}
const result = node.slice(1)
result.splice(0,0, node[0], {})
return result
}
*/
function toHTML (tree, options) |
function Serializer (options) {
}
// would be more efficient to use some kind of output stream instead
// of a string, probably...
Serializer.prototype.serialize = function (tree, indent) {
const tags = tree[0]
const attrs = node.attrs(tree)
const children = node.children(tree)
debug('starting with', tags)
let s = ''
const parts = tags.split(' ')
const tag = parts[0]
debug('parts', parts)
// 'document' is our pseudo-element for handling the fact that HTML
// documents are not trees when you allow comments, doctype, etc.
if (tag === 'document') {
return children.map(toHTML).join('')
}
if (indent >= 0) {
s += pad.slice(0, (indent) * 2)
}
/*
We don't want to indent flows of text. There's no real way to
know if we're in a flow of text because JavaScript could change
the CSS on a div. So we use this heuristic: if any of our
children are text, we don't indent, and neither do any of our
decendants
*/
let oldIndent = indent
if (indent >= 0) {
for (let child of children) {
if (typeof child === 'string' || typeof child === 'number') {
indent = undefined
}
}
}
if (tag === 'processinginstruction') {
s += '<' + children[0] + '>\n'
return s
}
if (tag === 'comment') {
s += '<!-- ' + children[0] + '-->\n'
return s
}
s += '<' + tag
if (parts.length > 1) {
s += ' class="' + parts.slice(1).join(' ') + '"'
}
const attrnames = Object.getOwnPropertyNames(attrs).sort()
let style = ''
for (let key of attrnames) {
let val = attrs[key]
debug('key:', key)
if (key.localeCompare('stylf') === 1) {
debug('after style.')
// we're past any style.foo entries
if (style !== '') {
s += ' style="' + style.trim() + '"'
style = ''
}
}
if (val === undefined) {
throw Error('attribute value undefined for attr '+key)
}
if (key.startsWith('style.')) {
style += key.slice(6) + ': ' + encode(val) + '; '
} else {
s += ' ' + key + '="' + encode(val) + '"'
}
}
if (style !== '') { // in case we finished the loop without emitting style
s += ' style="' + style.trim() + '"'
}
if (selfClosing[tag]) {
if (s.endsWith('"')) {
s += ' '
}
s += '/>'
if (oldIndent >= 0) {
s += '\n'
}
return s
}
s += '>'
if (indent >= 0 && children.length > 0) {
s += '\n'
}
for (let child of children) {
if (typeof child === 'function') {
throw Error('someone else should have dealt with this first')
}
if (typeof child === 'string') {
s += encode(child)
}
if (typeof child === 'number') {
s += child
}
if (Array.isArray(child)) {
s += this.serialize(child, indent + 1)
}
}
if (indent >= 0 && children.length > 0) {
s += pad.slice(0, (indent) * 2)
}
s += '</' + tag + '>'
if (oldIndent >= 0) {
s += '\n'
}
return s
}
module.exports = toHTML
| {
options = options || {}
const s = new Serializer(options)
let indent = 0
if (options.compact) {
indent = undefined
}
return s.serialize(tree, indent)
} | identifier_body |
toHTML.js | 'use strict'
const he = require('he')
const node = require('./node')
const debug = require('debug')('arraydom.toHTML')
const pad = Array(80).join(' ') // don't bother to indent more than 80
const selfClosing = {
// from http://xahlee.info/js/html5_non-closing_tag.html
// These are the tags that don't need an end-tag
area: true,
base: true,
br: true,
col: true,
command: true,
embed: true,
hr: true,
img: true,
input: true,
keygen: true,
link: true,
meta: true,
param: true,
source: true,
track: true,
wbr: true
}
function | (s) {
//console.log('ENCODE', JSON.stringify(s))
if (typeof s === 'number') {
s = '' + s
}
return he.encode(s, {useNamedReferences: true})
}
/*
return a copy of this node where the second element is defintely the
attribute object. By calling this in all the right places, we allow
people to omit that element. Not sure yet whether that's a good
practice.
We might also want an in-place normalize-node or normalize-tree.
function normalizedNode (node) {
if (node.length === 0) {
throw Error('cant handle empty nodes')
}
if (node.length === 1) {
return [ node[0], {} ]
}
if (typeof node[1] === 'object' && !Array.isArray(node[1])) {
return node
}
const result = node.slice(1)
result.splice(0,0, node[0], {})
return result
}
*/
function toHTML (tree, options) {
options = options || {}
const s = new Serializer(options)
let indent = 0
if (options.compact) {
indent = undefined
}
return s.serialize(tree, indent)
}
function Serializer (options) {
}
// would be more efficient to use some kind of output stream instead
// of a string, probably...
Serializer.prototype.serialize = function (tree, indent) {
const tags = tree[0]
const attrs = node.attrs(tree)
const children = node.children(tree)
debug('starting with', tags)
let s = ''
const parts = tags.split(' ')
const tag = parts[0]
debug('parts', parts)
// 'document' is our pseudo-element for handling the fact that HTML
// documents are not trees when you allow comments, doctype, etc.
if (tag === 'document') {
return children.map(toHTML).join('')
}
if (indent >= 0) {
s += pad.slice(0, (indent) * 2)
}
/*
We don't want to indent flows of text. There's no real way to
know if we're in a flow of text because JavaScript could change
the CSS on a div. So we use this heuristic: if any of our
children are text, we don't indent, and neither do any of our
decendants
*/
let oldIndent = indent
if (indent >= 0) {
for (let child of children) {
if (typeof child === 'string' || typeof child === 'number') {
indent = undefined
}
}
}
if (tag === 'processinginstruction') {
s += '<' + children[0] + '>\n'
return s
}
if (tag === 'comment') {
s += '<!-- ' + children[0] + '-->\n'
return s
}
s += '<' + tag
if (parts.length > 1) {
s += ' class="' + parts.slice(1).join(' ') + '"'
}
const attrnames = Object.getOwnPropertyNames(attrs).sort()
let style = ''
for (let key of attrnames) {
let val = attrs[key]
debug('key:', key)
if (key.localeCompare('stylf') === 1) {
debug('after style.')
// we're past any style.foo entries
if (style !== '') {
s += ' style="' + style.trim() + '"'
style = ''
}
}
if (val === undefined) {
throw Error('attribute value undefined for attr '+key)
}
if (key.startsWith('style.')) {
style += key.slice(6) + ': ' + encode(val) + '; '
} else {
s += ' ' + key + '="' + encode(val) + '"'
}
}
if (style !== '') { // in case we finished the loop without emitting style
s += ' style="' + style.trim() + '"'
}
if (selfClosing[tag]) {
if (s.endsWith('"')) {
s += ' '
}
s += '/>'
if (oldIndent >= 0) {
s += '\n'
}
return s
}
s += '>'
if (indent >= 0 && children.length > 0) {
s += '\n'
}
for (let child of children) {
if (typeof child === 'function') {
throw Error('someone else should have dealt with this first')
}
if (typeof child === 'string') {
s += encode(child)
}
if (typeof child === 'number') {
s += child
}
if (Array.isArray(child)) {
s += this.serialize(child, indent + 1)
}
}
if (indent >= 0 && children.length > 0) {
s += pad.slice(0, (indent) * 2)
}
s += '</' + tag + '>'
if (oldIndent >= 0) {
s += '\n'
}
return s
}
module.exports = toHTML
| encode | identifier_name |
greedy.py | # -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
TODO...
"""
__all__ = ['GreedyPlayer']
import random
from jdhp.tictactoe.player.abstract import Player
class Gr | layer):
"""
TODO...
"""
def play(self, game, state):
"""
TODO...
"""
action_list = game.getSetOfValidActions(state)
choosen_action = None
# Choose actions that lead to immediate victory...
for action in action_list:
next_state = game.nextState(state, action, self)
if game.hasWon(self, next_state):
choosen_action = action
break
# ... otherwise choose randomly
if choosen_action is None:
#print("randomly choose action") # debug
choosen_action = random.choice(action_list)
return choosen_action
| eedyPlayer(P | identifier_name |
greedy.py | # -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
TODO...
"""
__all__ = ['GreedyPlayer']
import random
from jdhp.tictactoe.player.abstract import Player
class GreedyPlayer(Player):
"""
TODO...
"""
def play(self, game, state):
"""
TODO...
"""
action_list = game.getSetOfValidActions(state)
choosen_action = None
# Choose actions that lead to immediate victory...
for action in action_list:
next_state = game.nextState(state, action, self)
if game.hasWon(self, next_state):
choosen_action = action
break
# ... otherwise choose randomly
if choosen_action is None:
#print("randomly choose action") # debug
ch | return choosen_action
| oosen_action = random.choice(action_list)
| conditional_block |
greedy.py | # -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
TODO...
"""
__all__ = ['GreedyPlayer']
import random
from jdhp.tictactoe.player.abstract import Player
class GreedyPlayer(Player):
"""
TODO...
"""
def play(self, game, state):
"""
TODO...
"""
action_list = game.getSetOfValidActions(state)
choosen_action = None
# Choose actions that lead to immediate victory...
for action in action_list:
next_state = game.nextState(state, action, self)
if game.hasWon(self, next_state):
choosen_action = action
break
# ... otherwise choose randomly
if choosen_action is None:
#print("randomly choose action") # debug
choosen_action = random.choice(action_list)
| return choosen_action | random_line_split |
|
greedy.py | # -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
TODO...
"""
__all__ = ['GreedyPlayer']
import random
from jdhp.tictactoe.player.abstract import Player
class GreedyPlayer(Player):
"""
TODO...
"""
def play(self, game, state):
"" | "
TODO...
"""
action_list = game.getSetOfValidActions(state)
choosen_action = None
# Choose actions that lead to immediate victory...
for action in action_list:
next_state = game.nextState(state, action, self)
if game.hasWon(self, next_state):
choosen_action = action
break
# ... otherwise choose randomly
if choosen_action is None:
#print("randomly choose action") # debug
choosen_action = random.choice(action_list)
return choosen_action | identifier_body |
|
test16a.js | if(typeof exports === 'object') {
var assert = require("assert");
var alasql = require('../alasql.js');
};
if(false) |
alasql.tables.schools = new alasql.Table({data:[
{schoolid:1, schoolname: 'Northern School', regionid:'north'},
{schoolid:2, schoolname: 'Southern School', regionid:'south'},
{schoolid:3, schoolname: 'Eastern School', regionid:'east'},
{schoolid:4, schoolname: 'Western School', regionid:'west'},
]});
var res = alasql.exec('SELECT * '+
' FROM students '+
' LEFT JOIN courses ON students.courseid = courses.courseid AND students.schoolid = courses.schoolid'+
' LEFT JOIN schools ON students.schoolid = schools.schoolid '+
' GROUP BY schoolid, courseid, studentname '+
' ORDER BY studentname DESC' );
console.log(res);
assert.equal(5, res.length);
assert.equal(1, res[0].courseid);
assert.equal(2, res[1].courseid);
assert.equal(2, res[2].courseid);
assert.equal(7, res[3].courseid);
assert.equal(4, res[4].courseid);
alasql('drop database test16');
done();
});
});
} | {
describe('Test 16b', function() {
it('Grouping', function(done){
alasql('create database test16;use test16');
alasql.tables.students = new alasql.Table({data: [
{studentid:58,studentname:'Sarah Patrik',courseid:1, startdate: new Date(2014,0,10), amt:10, schoolid:1},
{studentid:102,studentname:'John Stewart', courseid:2, startdate: new Date(2014,0,20), amt:20, schoolid:1},
{studentid:103,studentname:'Joan Blackmore', courseid:2, startdate: new Date(2014,0,20), amt:20, schoolid:1},
{studentid:104,studentname:'Anna Wooden', courseid:4, startdate: new Date(2014,0,15), amt:30, schoolid:2},
{studentid:150,studentname:'Astrid Carlson', courseid:7, startdate: new Date(2014,0,15), amt:30, schoolid:1},
]});
alasql.tables.courses = new alasql.Table({data:[
{courseid:1, coursename: 'first', schoolid:1},
{courseid:2, coursename: 'second', schoolid:1},
{courseid:3, coursename: 'third', schoolid:2},
{courseid:4, coursename: 'fourth', schoolid:2},
{courseid:5, coursename: 'fifth', schoolid:2}
]}); | conditional_block |
test16a.js | if(typeof exports === 'object') {
var assert = require("assert");
var alasql = require('../alasql.js');
};
if(false) {
describe('Test 16b', function() {
it('Grouping', function(done){
alasql('create database test16;use test16');
alasql.tables.students = new alasql.Table({data: [
{studentid:58,studentname:'Sarah Patrik',courseid:1, startdate: new Date(2014,0,10), amt:10, schoolid:1},
{studentid:102,studentname:'John Stewart', courseid:2, startdate: new Date(2014,0,20), amt:20, schoolid:1},
{studentid:103,studentname:'Joan Blackmore', courseid:2, startdate: new Date(2014,0,20), amt:20, schoolid:1},
{studentid:104,studentname:'Anna Wooden', courseid:4, startdate: new Date(2014,0,15), amt:30, schoolid:2},
{studentid:150,studentname:'Astrid Carlson', courseid:7, startdate: new Date(2014,0,15), amt:30, schoolid:1},
]});
alasql.tables.courses = new alasql.Table({data:[
{courseid:1, coursename: 'first', schoolid:1},
{courseid:2, coursename: 'second', schoolid:1},
{courseid:3, coursename: 'third', schoolid:2}, | {schoolid:1, schoolname: 'Northern School', regionid:'north'},
{schoolid:2, schoolname: 'Southern School', regionid:'south'},
{schoolid:3, schoolname: 'Eastern School', regionid:'east'},
{schoolid:4, schoolname: 'Western School', regionid:'west'},
]});
var res = alasql.exec('SELECT * '+
' FROM students '+
' LEFT JOIN courses ON students.courseid = courses.courseid AND students.schoolid = courses.schoolid'+
' LEFT JOIN schools ON students.schoolid = schools.schoolid '+
' GROUP BY schoolid, courseid, studentname '+
' ORDER BY studentname DESC' );
console.log(res);
assert.equal(5, res.length);
assert.equal(1, res[0].courseid);
assert.equal(2, res[1].courseid);
assert.equal(2, res[2].courseid);
assert.equal(7, res[3].courseid);
assert.equal(4, res[4].courseid);
alasql('drop database test16');
done();
});
});
} | {courseid:4, coursename: 'fourth', schoolid:2},
{courseid:5, coursename: 'fifth', schoolid:2}
]});
alasql.tables.schools = new alasql.Table({data:[ | random_line_split |
switch-between-vi-emacs.py | #!/usr/bin/env python
"""
Example that displays how to switch between Emacs and Vi input mode.
"""
from prompt_toolkit import prompt
from prompt_toolkit.enums import EditingMode
from prompt_toolkit.key_binding.manager import KeyBindingManager
from prompt_toolkit.keys import Keys
from prompt_toolkit.styles import style_from_dict
from prompt_toolkit.token import Token
def run():
# Create a set of key bindings that have Vi mode enabled if the
# ``vi_mode_enabled`` is True..
manager = KeyBindingManager.for_prompt()
# Add an additional key binding for toggling this flag.
@manager.registry.add_binding(Keys.F4)
def _(event):
" Toggle between Emacs and Vi mode. "
if event.cli.editing_mode == EditingMode.VI:
|
else:
event.cli.editing_mode = EditingMode.VI
# Add a bottom toolbar to display the status.
style = style_from_dict({
Token.Toolbar: 'reverse',
})
def get_bottom_toolbar_tokens(cli):
" Display the current input mode. "
text = 'Vi' if cli.editing_mode == EditingMode.VI else 'Emacs'
return [
(Token.Toolbar, ' [F4] %s ' % text)
]
prompt('> ', key_bindings_registry=manager.registry,
get_bottom_toolbar_tokens=get_bottom_toolbar_tokens,
style=style)
if __name__ == '__main__':
run()
| event.cli.editing_mode = EditingMode.EMACS | conditional_block |
switch-between-vi-emacs.py | #!/usr/bin/env python
"""
Example that displays how to switch between Emacs and Vi input mode.
"""
from prompt_toolkit import prompt
from prompt_toolkit.enums import EditingMode
from prompt_toolkit.key_binding.manager import KeyBindingManager
from prompt_toolkit.keys import Keys
from prompt_toolkit.styles import style_from_dict
from prompt_toolkit.token import Token
def run():
# Create a set of key bindings that have Vi mode enabled if the
# ``vi_mode_enabled`` is True..
manager = KeyBindingManager.for_prompt()
# Add an additional key binding for toggling this flag.
@manager.registry.add_binding(Keys.F4)
def _(event):
" Toggle between Emacs and Vi mode. "
if event.cli.editing_mode == EditingMode.VI:
event.cli.editing_mode = EditingMode.EMACS
else:
event.cli.editing_mode = EditingMode.VI
# Add a bottom toolbar to display the status.
style = style_from_dict({
Token.Toolbar: 'reverse',
})
def get_bottom_toolbar_tokens(cli):
|
prompt('> ', key_bindings_registry=manager.registry,
get_bottom_toolbar_tokens=get_bottom_toolbar_tokens,
style=style)
if __name__ == '__main__':
run()
| " Display the current input mode. "
text = 'Vi' if cli.editing_mode == EditingMode.VI else 'Emacs'
return [
(Token.Toolbar, ' [F4] %s ' % text)
] | identifier_body |
switch-between-vi-emacs.py | #!/usr/bin/env python
"""
Example that displays how to switch between Emacs and Vi input mode.
"""
from prompt_toolkit import prompt
from prompt_toolkit.enums import EditingMode
from prompt_toolkit.key_binding.manager import KeyBindingManager
from prompt_toolkit.keys import Keys
from prompt_toolkit.styles import style_from_dict
from prompt_toolkit.token import Token
def run():
# Create a set of key bindings that have Vi mode enabled if the
# ``vi_mode_enabled`` is True..
manager = KeyBindingManager.for_prompt()
# Add an additional key binding for toggling this flag.
@manager.registry.add_binding(Keys.F4)
def _(event):
" Toggle between Emacs and Vi mode. "
if event.cli.editing_mode == EditingMode.VI:
event.cli.editing_mode = EditingMode.EMACS
else:
event.cli.editing_mode = EditingMode.VI
# Add a bottom toolbar to display the status.
style = style_from_dict({
Token.Toolbar: 'reverse', | })
def get_bottom_toolbar_tokens(cli):
" Display the current input mode. "
text = 'Vi' if cli.editing_mode == EditingMode.VI else 'Emacs'
return [
(Token.Toolbar, ' [F4] %s ' % text)
]
prompt('> ', key_bindings_registry=manager.registry,
get_bottom_toolbar_tokens=get_bottom_toolbar_tokens,
style=style)
if __name__ == '__main__':
run() | random_line_split |
|
switch-between-vi-emacs.py | #!/usr/bin/env python
"""
Example that displays how to switch between Emacs and Vi input mode.
"""
from prompt_toolkit import prompt
from prompt_toolkit.enums import EditingMode
from prompt_toolkit.key_binding.manager import KeyBindingManager
from prompt_toolkit.keys import Keys
from prompt_toolkit.styles import style_from_dict
from prompt_toolkit.token import Token
def run():
# Create a set of key bindings that have Vi mode enabled if the
# ``vi_mode_enabled`` is True..
manager = KeyBindingManager.for_prompt()
# Add an additional key binding for toggling this flag.
@manager.registry.add_binding(Keys.F4)
def _(event):
" Toggle between Emacs and Vi mode. "
if event.cli.editing_mode == EditingMode.VI:
event.cli.editing_mode = EditingMode.EMACS
else:
event.cli.editing_mode = EditingMode.VI
# Add a bottom toolbar to display the status.
style = style_from_dict({
Token.Toolbar: 'reverse',
})
def | (cli):
" Display the current input mode. "
text = 'Vi' if cli.editing_mode == EditingMode.VI else 'Emacs'
return [
(Token.Toolbar, ' [F4] %s ' % text)
]
prompt('> ', key_bindings_registry=manager.registry,
get_bottom_toolbar_tokens=get_bottom_toolbar_tokens,
style=style)
if __name__ == '__main__':
run()
| get_bottom_toolbar_tokens | identifier_name |
ip_vtk58.py | = "v5.8.0"
VTK_BASE_VERSION = "vtk-5.8"
# this patch does three things:
# 1. adds try/catch blocks to all python method calls in order
# to trap bad_alloc exceptions
# 2. implements my scheme for turning all VTK errors into Python exceptions
# by making use of a special output window class
# 3. gives up the GIL around all VTK calls. This is also necessary
# for 2 not to deadlock on multi-cores.
EXC_PATCH = "pyvtk580_tryexcept_and_pyexceptions.diff"
# fixes attributes in vtkproperty for shader use in python
VTKPRPRTY_PATCH = "vtkProperty_PyShaderVar.diff"
# recent segfault with vtk 5.6.1 and wxPython 2.8.11.0
# see here for more info:
# http://vtk.1045678.n5.nabble.com/wx-python-scripts-segfault-td1234471.html
WXVTKRWI_DISPLAYID_SEGFAULT_PATCH = "wxvtkrwi_displayid_segfault.diff"
dependencies = ['CMake']
class VTK58(InstallPackage):
def __init__(self):
self.source_dir = os.path.join(config.archive_dir, BASENAME)
self.build_dir = os.path.join(config.build_dir, '%s-build' %
(BASENAME,))
self.inst_dir = os.path.join(config.inst_dir, BASENAME)
self.exc_patch_src = os.path.join(config.patches_dir, EXC_PATCH)
self.exc_patch_dst = os.path.join(config.archive_dir, EXC_PATCH)
self.vtkprprty_patch_filename = os.path.join(config.patches_dir,
VTKPRPRTY_PATCH)
self.wxvtkrwi_displayid_segfault_patch_filename = os.path.join(
config.patches_dir,
WXVTKRWI_DISPLAYID_SEGFAULT_PATCH)
config.VTK_LIB = os.path.join(self.inst_dir, 'lib')
# whatever the case may be, we have to register VTK variables
if os.name == 'nt':
# on Win, inst/VTK/bin contains the so files
config.VTK_SODIR = os.path.join(self.inst_dir, 'bin')
# inst/VTK/lib/site-packages the VTK python package
config.VTK_PYTHON = os.path.join(
config.VTK_LIB, 'site-packages')
else:
# on *ix, inst/VTK/lib contains DLLs
config.VTK_SODIR = os.path.join(
config.VTK_LIB, VTK_BASE_VERSION)
# on *ix, inst/lib/python2.5/site-packages contains the
# VTK python package
# sys.version is (2, 5, 0, 'final', 0)
config.VTK_PYTHON = os.path.join(
config.VTK_LIB, 'python%d.%d/site-packages' % \
sys.version_info[0:2])
# this contains the VTK cmake config (same on *ix and Win)
config.VTK_DIR = os.path.join(config.VTK_LIB, VTK_BASE_VERSION)
def get(self):
|
os.chdir(self.source_dir)
# default git-generated patch, so needs -p1
ret = os.system(
"%s -p1 < %s" % (config.PATCH, self.exc_patch_dst))
if ret != 0:
utils.error(
"Could not apply EXC patch. Fix and try again.")
# # VTKPRPRTY PATCH
# utils.output("Applying VTKPRPRTY patch")
# os.chdir(os.path.join(self.source_dir, 'Rendering'))
# ret = os.system(
# "%s -p0 < %s" % (config.PATCH, self.vtkprprty_patch_filename))
# if ret != 0:
# utils.error(
# "Could not apply VTKPRPRTY patch. Fix and try again.")
# # WXVTKRWI_DISPLAYID_SEGFAULT patch
# utils.output("Applying VTKWXRWI_DISPLAYID_SEGFAULT patch")
# os.chdir(self.source_dir)
# # default git-generated patch, so needs -p1
# ret = os.system(
# "%s -p1 < %s" % (config.PATCH,
# self.wxvtkrwi_displayid_segfault_patch_filename))
# if ret != 0:
# utils.error(
# "Could not apply WXVTKRWI_DISPLAYID_SEGFAULT patch. Fix and try again.")
def unpack(self):
pass
def configure(self):
if os.path.exists(
os.path.join(self.build_dir, 'CMakeFiles/cmake.check_cache')):
utils.output("VTK build already configured.")
return
if not os.path.exists(self.build_dir):
os.mkdir(self.build_dir)
cmake_params = "-DBUILD_SHARED_LIBS=ON " \
"-DBUILD_TESTING=OFF " \
"-DCMAKE_BUILD_TYPE=RelWithDebInfo " \
"-DCMAKE_INSTALL_PREFIX=%s " \
"-DVTK_USE_TK=NO " \
"-DVTK_USE_METAIO=ON " \
"-DVTK_USE_PARALLEL=ON " \
"-DPYTHON_EXECUTABLE=%s " \
"-DPYTHON_LIBRARY=%s " \
"-DPYTHON_INCLUDE_PATH=%s " \
"-DVTK_WRAP_PYTHON=ON " % (self.inst_dir,
config.PYTHON_EXECUTABLE,
config.PYTHON_LIBRARY,
config.PYTHON_INCLUDE_PATH)
ret = utils.cmake_command(self.build_dir, self.source_dir,
cmake_params)
if ret != 0:
utils.error("Could not configure VTK. Fix and try again.")
def build(self):
posix_file = os.path.join(self.build_dir,
'bin/libvtkWidgetsPython.so')
nt_file = os.path.join(self.build_dir, 'bin', config.BUILD_TARGET,
'vtkWidgetsPythonD.dll')
if utils.file_exists(posix_file, nt_file):
utils.output("VTK already built. Skipping build step.")
else:
os.chdir(self.build_dir)
ret = utils.make_command('VTK.sln')
if ret != 0:
utils.error("Error building VTK. Fix and try again.")
def install(self):
posix_file = os.path.join(self.inst_dir, 'bin/vtkpython')
nt_file = os.path.join(self.inst_dir, 'bin', 'vtkpython.exe')
if utils.file_exists(posix_file, nt_file):
utils.output("VTK already installed. Skipping build step.")
else:
# python 2.5.2 setup.py complains that this does not exist
# with VTK PV-3-2-1. This is only on installations with
# EasyInstall / Python Eggs, then the VTK setup.py uses
# EasyInstall and not standard distutils. gah!
# just tested with VTK 5.8.0 and Python 2.7.2
# it indeed installs VTK_PYTHON/VTK-5.8.0-py2.7.egg
# but due to the site.py and easy-install.pth magic in there,
# adding VTK_PYTHON to the PYTHONPATH still works. We can keep
# pip, yay!
if not os.path.exists(config.VTK_PYTHON):
os.makedirs(config.VTK_PYTHON)
os.chdir(self.build_dir)
# we save, set and restore the PP env variable, else
# stupid setuptools complains
save_env = os.environ.get('PYTHONPATH', '')
os.environ['PYTHONPATH'] = config.VTK_PYTHON
ret = utils.make_command('VTK.sln', install=True)
os.environ['PYTHONPATH'] = save_env
if ret != 0:
utils.error("Could not install VTK. Fix and try again.")
# now do some surgery on VTKConfig.cmake and
# VTKLibraryDepends.cmake so builds of VTK-dependent libraries
# with only the DRE to link with Just Work(tm)
# on windows, we need to replace backslash with forward slash
# as that's the style used by the config files. On *ix mostly
# harmless
idp = re.sub(r'\\','/', config.inst_dir)
for fn in [os.path.join(config.VTK_DIR, 'VTKConfig.cmake'),
os.path.join(config | if os.path.exists(self.source_dir):
utils.output("VTK already checked out, skipping step.")
else:
utils.goto_archive()
ret = os.system("git clone %s %s" % (GIT_REPO, BASENAME))
if ret != 0:
utils.error("Could not clone VTK repo. Fix and try again.")
os.chdir(self.source_dir)
ret = os.system("git checkout %s" % (GIT_TAG,))
if ret != 0:
utils.error("Could not checkout VTK %s. Fix and try again." % (GIT_TAG,))
if not os.path.exists(self.exc_patch_dst):
utils.output("Applying EXC patch")
# we do this copy so we can see if the patch has been done yet or not
shutil.copyfile(self.exc_patch_src, self.exc_patch_dst) | identifier_body |
ip_vtk58.py | = "v5.8.0"
VTK_BASE_VERSION = "vtk-5.8"
# this patch does three things:
# 1. adds try/catch blocks to all python method calls in order
# to trap bad_alloc exceptions
# 2. implements my scheme for turning all VTK errors into Python exceptions
# by making use of a special output window class
# 3. gives up the GIL around all VTK calls. This is also necessary
# for 2 not to deadlock on multi-cores.
EXC_PATCH = "pyvtk580_tryexcept_and_pyexceptions.diff"
# fixes attributes in vtkproperty for shader use in python
VTKPRPRTY_PATCH = "vtkProperty_PyShaderVar.diff"
# recent segfault with vtk 5.6.1 and wxPython 2.8.11.0
# see here for more info:
# http://vtk.1045678.n5.nabble.com/wx-python-scripts-segfault-td1234471.html
WXVTKRWI_DISPLAYID_SEGFAULT_PATCH = "wxvtkrwi_displayid_segfault.diff"
dependencies = ['CMake']
class VTK58(InstallPackage):
def __init__(self):
self.source_dir = os.path.join(config.archive_dir, BASENAME)
self.build_dir = os.path.join(config.build_dir, '%s-build' %
(BASENAME,))
self.inst_dir = os.path.join(config.inst_dir, BASENAME)
self.exc_patch_src = os.path.join(config.patches_dir, EXC_PATCH)
self.exc_patch_dst = os.path.join(config.archive_dir, EXC_PATCH)
self.vtkprprty_patch_filename = os.path.join(config.patches_dir,
VTKPRPRTY_PATCH)
self.wxvtkrwi_displayid_segfault_patch_filename = os.path.join(
config.patches_dir,
WXVTKRWI_DISPLAYID_SEGFAULT_PATCH)
config.VTK_LIB = os.path.join(self.inst_dir, 'lib')
# whatever the case may be, we have to register VTK variables
if os.name == 'nt':
# on Win, inst/VTK/bin contains the so files
config.VTK_SODIR = os.path.join(self.inst_dir, 'bin')
# inst/VTK/lib/site-packages the VTK python package
config.VTK_PYTHON = os.path.join(
config.VTK_LIB, 'site-packages')
else:
# on *ix, inst/VTK/lib contains DLLs
config.VTK_SODIR = os.path.join(
config.VTK_LIB, VTK_BASE_VERSION)
# on *ix, inst/lib/python2.5/site-packages contains the
# VTK python package
# sys.version is (2, 5, 0, 'final', 0)
config.VTK_PYTHON = os.path.join(
config.VTK_LIB, 'python%d.%d/site-packages' % \
sys.version_info[0:2])
# this contains the VTK cmake config (same on *ix and Win)
config.VTK_DIR = os.path.join(config.VTK_LIB, VTK_BASE_VERSION)
def get(self):
if os.path.exists(self.source_dir):
utils.output("VTK already checked out, skipping step.")
else:
utils.goto_archive()
ret = os.system("git clone %s %s" % (GIT_REPO, BASENAME))
if ret != 0:
utils.error("Could not clone VTK repo. Fix and try again.")
os.chdir(self.source_dir)
ret = os.system("git checkout %s" % (GIT_TAG,))
if ret != 0:
utils.error("Could not checkout VTK %s. Fix and try again." % (GIT_TAG,))
if not os.path.exists(self.exc_patch_dst):
utils.output("Applying EXC patch")
# we do this copy so we can see if the patch has been done yet or not
shutil.copyfile(self.exc_patch_src, self.exc_patch_dst)
os.chdir(self.source_dir)
# default git-generated patch, so needs -p1
ret = os.system(
"%s -p1 < %s" % (config.PATCH, self.exc_patch_dst))
if ret != 0:
utils.error(
"Could not apply EXC patch. Fix and try again.")
# # VTKPRPRTY PATCH
# utils.output("Applying VTKPRPRTY patch")
# os.chdir(os.path.join(self.source_dir, 'Rendering'))
# ret = os.system(
# "%s -p0 < %s" % (config.PATCH, self.vtkprprty_patch_filename))
# if ret != 0:
# utils.error(
# "Could not apply VTKPRPRTY patch. Fix and try again.")
# # WXVTKRWI_DISPLAYID_SEGFAULT patch
# utils.output("Applying VTKWXRWI_DISPLAYID_SEGFAULT patch")
# os.chdir(self.source_dir)
# # default git-generated patch, so needs -p1
# ret = os.system(
# "%s -p1 < %s" % (config.PATCH,
# self.wxvtkrwi_displayid_segfault_patch_filename))
# if ret != 0:
# utils.error(
# "Could not apply WXVTKRWI_DISPLAYID_SEGFAULT patch. Fix and try again.")
def unpack(self):
pass
def configure(self):
if os.path.exists(
os.path.join(self.build_dir, 'CMakeFiles/cmake.check_cache')):
utils.output("VTK build already configured.")
return
if not os.path.exists(self.build_dir):
os.mkdir(self.build_dir)
cmake_params = "-DBUILD_SHARED_LIBS=ON " \
"-DBUILD_TESTING=OFF " \
"-DCMAKE_BUILD_TYPE=RelWithDebInfo " \
"-DCMAKE_INSTALL_PREFIX=%s " \
"-DVTK_USE_TK=NO " \
"-DVTK_USE_METAIO=ON " \
"-DVTK_USE_PARALLEL=ON " \
"-DPYTHON_EXECUTABLE=%s " \
"-DPYTHON_LIBRARY=%s " \
"-DPYTHON_INCLUDE_PATH=%s " \
"-DVTK_WRAP_PYTHON=ON " % (self.inst_dir,
config.PYTHON_EXECUTABLE,
config.PYTHON_LIBRARY,
config.PYTHON_INCLUDE_PATH)
ret = utils.cmake_command(self.build_dir, self.source_dir,
cmake_params)
if ret != 0:
utils.error("Could not configure VTK. Fix and try again.")
def build(self):
posix_file = os.path.join(self.build_dir,
'bin/libvtkWidgetsPython.so')
nt_file = os.path.join(self.build_dir, 'bin', config.BUILD_TARGET,
'vtkWidgetsPythonD.dll')
if utils.file_exists(posix_file, nt_file):
utils.output("VTK already built. Skipping build step.")
else:
os.chdir(self.build_dir)
ret = utils.make_command('VTK.sln')
if ret != 0:
utils.error("Error building VTK. Fix and try again.")
def install(self):
posix_file = os.path.join(self.inst_dir, 'bin/vtkpython')
nt_file = os.path.join(self.inst_dir, 'bin', 'vtkpython.exe')
if utils.file_exists(posix_file, nt_file):
utils.output("VTK already installed. Skipping build step.")
else:
# python 2.5.2 setup.py complains that this does not exist
# with VTK PV-3-2-1. This is only on installations with
# EasyInstall / Python Eggs, then the VTK setup.py uses
# EasyInstall and not standard distutils. gah!
# just tested with VTK 5.8.0 and Python 2.7.2
# it indeed installs VTK_PYTHON/VTK-5.8.0-py2.7.egg
# but due to the site.py and easy-install.pth magic in there,
# adding VTK_PYTHON to the PYTHONPATH still works. We can keep
# pip, yay!
if not os.path.exists(config.VTK_PYTHON):
|
os.chdir(self.build_dir)
# we save, set and restore the PP env variable, else
# stupid setuptools complains
save_env = os.environ.get('PYTHONPATH', '')
os.environ['PYTHONPATH'] = config.VTK_PYTHON
ret = utils.make_command('VTK.sln', install=True)
os.environ['PYTHONPATH'] = save_env
if ret != 0:
utils.error("Could not install VTK. Fix and try again.")
# now do some surgery on VTKConfig.cmake and
# VTKLibraryDepends.cmake so builds of VTK-dependent libraries
# with only the DRE to link with Just Work(tm)
# on windows, we need to replace backslash with forward slash
# as that's the style used by the config files. On *ix mostly
# harmless
idp = re.sub(r'\\','/', config.inst_dir)
for fn in [os.path.join(config.VTK_DIR, 'VTKConfig.cmake'),
os.path.join(config | os.makedirs(config.VTK_PYTHON) | conditional_block |
ip_vtk58.py | from install_package import InstallPackage
import os
import re
import shutil
import sys
import utils
BASENAME = "VTK"
GIT_REPO = "http://vtk.org/VTK.git"
GIT_TAG = "v5.8.0"
VTK_BASE_VERSION = "vtk-5.8"
# this patch does three things:
# 1. adds try/catch blocks to all python method calls in order
# to trap bad_alloc exceptions
# 2. implements my scheme for turning all VTK errors into Python exceptions
# by making use of a special output window class
# 3. gives up the GIL around all VTK calls. This is also necessary
# for 2 not to deadlock on multi-cores.
EXC_PATCH = "pyvtk580_tryexcept_and_pyexceptions.diff"
# fixes attributes in vtkproperty for shader use in python
VTKPRPRTY_PATCH = "vtkProperty_PyShaderVar.diff"
# recent segfault with vtk 5.6.1 and wxPython 2.8.11.0
# see here for more info:
# http://vtk.1045678.n5.nabble.com/wx-python-scripts-segfault-td1234471.html
WXVTKRWI_DISPLAYID_SEGFAULT_PATCH = "wxvtkrwi_displayid_segfault.diff"
dependencies = ['CMake']
class VTK58(InstallPackage):
def __init__(self):
self.source_dir = os.path.join(config.archive_dir, BASENAME)
self.build_dir = os.path.join(config.build_dir, '%s-build' %
(BASENAME,))
self.inst_dir = os.path.join(config.inst_dir, BASENAME)
self.exc_patch_src = os.path.join(config.patches_dir, EXC_PATCH)
self.exc_patch_dst = os.path.join(config.archive_dir, EXC_PATCH)
self.vtkprprty_patch_filename = os.path.join(config.patches_dir,
VTKPRPRTY_PATCH)
self.wxvtkrwi_displayid_segfault_patch_filename = os.path.join(
config.patches_dir,
WXVTKRWI_DISPLAYID_SEGFAULT_PATCH)
config.VTK_LIB = os.path.join(self.inst_dir, 'lib')
# whatever the case may be, we have to register VTK variables
if os.name == 'nt':
# on Win, inst/VTK/bin contains the so files
config.VTK_SODIR = os.path.join(self.inst_dir, 'bin')
# inst/VTK/lib/site-packages the VTK python package
config.VTK_PYTHON = os.path.join(
config.VTK_LIB, 'site-packages')
else:
# on *ix, inst/VTK/lib contains DLLs
config.VTK_SODIR = os.path.join(
config.VTK_LIB, VTK_BASE_VERSION)
# on *ix, inst/lib/python2.5/site-packages contains the
# VTK python package
# sys.version is (2, 5, 0, 'final', 0)
config.VTK_PYTHON = os.path.join(
config.VTK_LIB, 'python%d.%d/site-packages' % \
sys.version_info[0:2])
# this contains the VTK cmake config (same on *ix and Win)
config.VTK_DIR = os.path.join(config.VTK_LIB, VTK_BASE_VERSION)
def get(self):
if os.path.exists(self.source_dir):
utils.output("VTK already checked out, skipping step.")
else:
utils.goto_archive()
ret = os.system("git clone %s %s" % (GIT_REPO, BASENAME))
if ret != 0:
utils.error("Could not clone VTK repo. Fix and try again.")
os.chdir(self.source_dir)
ret = os.system("git checkout %s" % (GIT_TAG,))
if ret != 0:
utils.error("Could not checkout VTK %s. Fix and try again." % (GIT_TAG,))
if not os.path.exists(self.exc_patch_dst):
utils.output("Applying EXC patch")
# we do this copy so we can see if the patch has been done yet or not
shutil.copyfile(self.exc_patch_src, self.exc_patch_dst)
os.chdir(self.source_dir)
# default git-generated patch, so needs -p1
ret = os.system(
"%s -p1 < %s" % (config.PATCH, self.exc_patch_dst))
if ret != 0:
utils.error(
"Could not apply EXC patch. Fix and try again.")
# # VTKPRPRTY PATCH
# utils.output("Applying VTKPRPRTY patch")
# os.chdir(os.path.join(self.source_dir, 'Rendering'))
# ret = os.system(
# "%s -p0 < %s" % (config.PATCH, self.vtkprprty_patch_filename))
# if ret != 0:
# utils.error(
# "Could not apply VTKPRPRTY patch. Fix and try again.")
# # WXVTKRWI_DISPLAYID_SEGFAULT patch
# utils.output("Applying VTKWXRWI_DISPLAYID_SEGFAULT patch")
# os.chdir(self.source_dir)
# # default git-generated patch, so needs -p1
# ret = os.system(
# "%s -p1 < %s" % (config.PATCH,
# self.wxvtkrwi_displayid_segfault_patch_filename))
# if ret != 0:
# utils.error(
# "Could not apply WXVTKRWI_DISPLAYID_SEGFAULT patch. Fix and try again.")
def unpack(self):
pass
def configure(self):
if os.path.exists(
os.path.join(self.build_dir, 'CMakeFiles/cmake.check_cache')):
utils.output("VTK build already configured.")
return
if not os.path.exists(self.build_dir):
os.mkdir(self.build_dir)
cmake_params = "-DBUILD_SHARED_LIBS=ON " \
"-DBUILD_TESTING=OFF " \
"-DCMAKE_BUILD_TYPE=RelWithDebInfo " \
"-DCMAKE_INSTALL_PREFIX=%s " \
"-DVTK_USE_TK=NO " \
"-DVTK_USE_METAIO=ON " \
"-DVTK_USE_PARALLEL=ON " \
"-DPYTHON_EXECUTABLE=%s " \
"-DPYTHON_LIBRARY=%s " \
"-DPYTHON_INCLUDE_PATH=%s " \
"-DVTK_WRAP_PYTHON=ON " % (self.inst_dir,
config.PYTHON_EXECUTABLE,
config.PYTHON_LIBRARY,
config.PYTHON_INCLUDE_PATH)
ret = utils.cmake_command(self.build_dir, self.source_dir,
cmake_params)
if ret != 0:
utils.error("Could not configure VTK. Fix and try again.")
def build(self):
posix_file = os.path.join(self.build_dir,
'bin/libvtkWidgetsPython.so')
nt_file = os.path.join(self.build_dir, 'bin', config.BUILD_TARGET,
'vtkWidgetsPythonD.dll')
if utils.file_exists(posix_file, nt_file):
utils.output("VTK already built. Skipping build step.")
else:
os.chdir(self.build_dir)
ret = utils.make_command('VTK.sln')
if ret != 0:
utils.error("Error building VTK. Fix and try again.")
def install(self):
posix_file = os.path.join(self.inst_dir, 'bin/vtkpython')
nt_file = os.path.join(self.inst_dir, 'bin', 'vtkpython.exe')
if utils.file_exists(posix_file, nt_file):
utils.output("VTK already installed. Skipping build step.")
else:
# python 2.5.2 setup.py complains that this does not exist
# with VTK PV-3-2-1. This is only on installations with
# EasyInstall / Python Eggs, then the VTK setup.py uses
# EasyInstall and not standard distutils. gah!
# just tested with VTK 5.8.0 and Python 2.7.2
# it indeed installs VTK_PYTHON/VTK-5.8.0-py2.7.egg
# but due to the site.py and easy-install.pth magic in there,
# adding VTK_PYTHON to the PYTHONPATH still works. We can keep
# pip, yay!
if not os.path.exists(config.VTK_PYTHON):
os.makedirs(config.VTK_PYTHON)
os.chdir(self.build_dir)
# we save, set and restore the PP env variable, else
# stupid setuptools complains
save_env = os.environ.get('PYTHONPATH', '')
os.environ['PYTHONPATH'] = config.VTK_PYTHON
ret = utils.make_command('VTK.sln', install=True)
os.environ['PYTHONPATH'] = save_env
if ret != 0:
utils.error("Could not install VTK. Fix and try again.")
# now do some surgery on VTKConfig.cmake and
# VTKLibraryDepends.cmake so builds of VTK-dependent libraries
# with only the DRE to link with Just Work(tm)
# on windows, we need to replace backslash with forward slash
# as that's the style used by the config files | # See COPYRIGHT for details.
import config | random_line_split |
|
ip_vtk58.py | = "v5.8.0"
VTK_BASE_VERSION = "vtk-5.8"
# this patch does three things:
# 1. adds try/catch blocks to all python method calls in order
# to trap bad_alloc exceptions
# 2. implements my scheme for turning all VTK errors into Python exceptions
# by making use of a special output window class
# 3. gives up the GIL around all VTK calls. This is also necessary
# for 2 not to deadlock on multi-cores.
EXC_PATCH = "pyvtk580_tryexcept_and_pyexceptions.diff"
# fixes attributes in vtkproperty for shader use in python
VTKPRPRTY_PATCH = "vtkProperty_PyShaderVar.diff"
# recent segfault with vtk 5.6.1 and wxPython 2.8.11.0
# see here for more info:
# http://vtk.1045678.n5.nabble.com/wx-python-scripts-segfault-td1234471.html
WXVTKRWI_DISPLAYID_SEGFAULT_PATCH = "wxvtkrwi_displayid_segfault.diff"
dependencies = ['CMake']
class VTK58(InstallPackage):
def __init__(self):
self.source_dir = os.path.join(config.archive_dir, BASENAME)
self.build_dir = os.path.join(config.build_dir, '%s-build' %
(BASENAME,))
self.inst_dir = os.path.join(config.inst_dir, BASENAME)
self.exc_patch_src = os.path.join(config.patches_dir, EXC_PATCH)
self.exc_patch_dst = os.path.join(config.archive_dir, EXC_PATCH)
self.vtkprprty_patch_filename = os.path.join(config.patches_dir,
VTKPRPRTY_PATCH)
self.wxvtkrwi_displayid_segfault_patch_filename = os.path.join(
config.patches_dir,
WXVTKRWI_DISPLAYID_SEGFAULT_PATCH)
config.VTK_LIB = os.path.join(self.inst_dir, 'lib')
# whatever the case may be, we have to register VTK variables
if os.name == 'nt':
# on Win, inst/VTK/bin contains the so files
config.VTK_SODIR = os.path.join(self.inst_dir, 'bin')
# inst/VTK/lib/site-packages the VTK python package
config.VTK_PYTHON = os.path.join(
config.VTK_LIB, 'site-packages')
else:
# on *ix, inst/VTK/lib contains DLLs
config.VTK_SODIR = os.path.join(
config.VTK_LIB, VTK_BASE_VERSION)
# on *ix, inst/lib/python2.5/site-packages contains the
# VTK python package
# sys.version is (2, 5, 0, 'final', 0)
config.VTK_PYTHON = os.path.join(
config.VTK_LIB, 'python%d.%d/site-packages' % \
sys.version_info[0:2])
# this contains the VTK cmake config (same on *ix and Win)
config.VTK_DIR = os.path.join(config.VTK_LIB, VTK_BASE_VERSION)
def get(self):
if os.path.exists(self.source_dir):
utils.output("VTK already checked out, skipping step.")
else:
utils.goto_archive()
ret = os.system("git clone %s %s" % (GIT_REPO, BASENAME))
if ret != 0:
utils.error("Could not clone VTK repo. Fix and try again.")
os.chdir(self.source_dir)
ret = os.system("git checkout %s" % (GIT_TAG,))
if ret != 0:
utils.error("Could not checkout VTK %s. Fix and try again." % (GIT_TAG,))
if not os.path.exists(self.exc_patch_dst):
utils.output("Applying EXC patch")
# we do this copy so we can see if the patch has been done yet or not
shutil.copyfile(self.exc_patch_src, self.exc_patch_dst)
os.chdir(self.source_dir)
# default git-generated patch, so needs -p1
ret = os.system(
"%s -p1 < %s" % (config.PATCH, self.exc_patch_dst))
if ret != 0:
utils.error(
"Could not apply EXC patch. Fix and try again.")
# # VTKPRPRTY PATCH
# utils.output("Applying VTKPRPRTY patch")
# os.chdir(os.path.join(self.source_dir, 'Rendering'))
# ret = os.system(
# "%s -p0 < %s" % (config.PATCH, self.vtkprprty_patch_filename))
# if ret != 0:
# utils.error(
# "Could not apply VTKPRPRTY patch. Fix and try again.")
# # WXVTKRWI_DISPLAYID_SEGFAULT patch
# utils.output("Applying VTKWXRWI_DISPLAYID_SEGFAULT patch")
# os.chdir(self.source_dir)
# # default git-generated patch, so needs -p1
# ret = os.system(
# "%s -p1 < %s" % (config.PATCH,
# self.wxvtkrwi_displayid_segfault_patch_filename))
# if ret != 0:
# utils.error(
# "Could not apply WXVTKRWI_DISPLAYID_SEGFAULT patch. Fix and try again.")
def unpack(self):
pass
def configure(self):
if os.path.exists(
os.path.join(self.build_dir, 'CMakeFiles/cmake.check_cache')):
utils.output("VTK build already configured.")
return
if not os.path.exists(self.build_dir):
os.mkdir(self.build_dir)
cmake_params = "-DBUILD_SHARED_LIBS=ON " \
"-DBUILD_TESTING=OFF " \
"-DCMAKE_BUILD_TYPE=RelWithDebInfo " \
"-DCMAKE_INSTALL_PREFIX=%s " \
"-DVTK_USE_TK=NO " \
"-DVTK_USE_METAIO=ON " \
"-DVTK_USE_PARALLEL=ON " \
"-DPYTHON_EXECUTABLE=%s " \
"-DPYTHON_LIBRARY=%s " \
"-DPYTHON_INCLUDE_PATH=%s " \
"-DVTK_WRAP_PYTHON=ON " % (self.inst_dir,
config.PYTHON_EXECUTABLE,
config.PYTHON_LIBRARY,
config.PYTHON_INCLUDE_PATH)
ret = utils.cmake_command(self.build_dir, self.source_dir,
cmake_params)
if ret != 0:
utils.error("Could not configure VTK. Fix and try again.")
def | (self):
posix_file = os.path.join(self.build_dir,
'bin/libvtkWidgetsPython.so')
nt_file = os.path.join(self.build_dir, 'bin', config.BUILD_TARGET,
'vtkWidgetsPythonD.dll')
if utils.file_exists(posix_file, nt_file):
utils.output("VTK already built. Skipping build step.")
else:
os.chdir(self.build_dir)
ret = utils.make_command('VTK.sln')
if ret != 0:
utils.error("Error building VTK. Fix and try again.")
def install(self):
posix_file = os.path.join(self.inst_dir, 'bin/vtkpython')
nt_file = os.path.join(self.inst_dir, 'bin', 'vtkpython.exe')
if utils.file_exists(posix_file, nt_file):
utils.output("VTK already installed. Skipping build step.")
else:
# python 2.5.2 setup.py complains that this does not exist
# with VTK PV-3-2-1. This is only on installations with
# EasyInstall / Python Eggs, then the VTK setup.py uses
# EasyInstall and not standard distutils. gah!
# just tested with VTK 5.8.0 and Python 2.7.2
# it indeed installs VTK_PYTHON/VTK-5.8.0-py2.7.egg
# but due to the site.py and easy-install.pth magic in there,
# adding VTK_PYTHON to the PYTHONPATH still works. We can keep
# pip, yay!
if not os.path.exists(config.VTK_PYTHON):
os.makedirs(config.VTK_PYTHON)
os.chdir(self.build_dir)
# we save, set and restore the PP env variable, else
# stupid setuptools complains
save_env = os.environ.get('PYTHONPATH', '')
os.environ['PYTHONPATH'] = config.VTK_PYTHON
ret = utils.make_command('VTK.sln', install=True)
os.environ['PYTHONPATH'] = save_env
if ret != 0:
utils.error("Could not install VTK. Fix and try again.")
# now do some surgery on VTKConfig.cmake and
# VTKLibraryDepends.cmake so builds of VTK-dependent libraries
# with only the DRE to link with Just Work(tm)
# on windows, we need to replace backslash with forward slash
# as that's the style used by the config files. On *ix mostly
# harmless
idp = re.sub(r'\\','/', config.inst_dir)
for fn in [os.path.join(config.VTK_DIR, 'VTKConfig.cmake'),
os.path.join(config | build | identifier_name |
thrift.rs | // Copyright 2019-2020 Twitter, Inc.
// Licensed under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
#![allow(dead_code)]
use crate::codec::ParseError;
pub const STOP: u8 = 0;
pub const VOID: u8 = 1;
pub const BOOL: u8 = 2;
pub const BYTE: u8 = 3;
pub const DOUBLE: u8 = 4;
pub const I16: u8 = 6;
pub const I32: u8 = 8;
pub const I64: u8 = 10;
pub const STRING: u8 = 11;
pub const STRUCT: u8 = 12;
pub const MAP: u8 = 13;
pub const SET: u8 = 14;
pub const LIST: u8 = 15;
#[derive(Clone)]
pub struct ThriftBuffer {
buffer: Vec<u8>,
}
impl Default for ThriftBuffer {
fn default() -> Self {
let mut buffer = Vec::<u8>::new();
buffer.resize(4, 0);
Self { buffer }
}
}
impl ThriftBuffer {
pub fn new() -> Self {
Self::default()
}
pub fn | (&self) -> usize {
self.buffer.len()
}
/// add protocol version to buffer
pub fn protocol_header(&mut self) -> &Self {
self.buffer.extend_from_slice(&[128, 1, 0, 1]);
self
}
/// write the framed length to the buffer
#[inline]
pub fn frame(&mut self) -> &Self {
let bytes = self.buffer.len() - 4;
for (p, i) in (bytes as i32).to_be_bytes().iter().enumerate() {
self.buffer[p] = *i;
}
self
}
/// add method name to buffer
#[inline]
pub fn method_name(&mut self, method: &str) -> &Self {
self.write_str(method)
}
/// add sequence id to buffer
#[inline]
pub fn sequence_id(&mut self, id: i32) -> &Self {
self.write_i32(id as i32)
}
/// add stop sequence to buffer
pub fn stop(&mut self) -> &Self {
self.write_bytes(&[STOP])
}
// write an i16 to the buffer
#[inline]
pub fn write_i16(&mut self, value: i16) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i32 to the buffer
#[inline]
pub fn write_i32(&mut self, value: i32) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i64 to the buffer
#[inline]
pub fn write_i64(&mut self, value: i64) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write a literal byte sequence to the buffer
#[inline]
pub fn write_bytes(&mut self, bytes: &[u8]) -> &Self {
self.buffer.extend_from_slice(bytes);
self
}
// write bool to the buffer
#[inline]
pub fn write_bool(&mut self, b: bool) -> &Self {
self.buffer.extend_from_slice(&[(b as u8)]);
self
}
#[inline]
pub fn write_str(&mut self, string: &str) -> &Self {
let string = string.as_bytes();
self.write_i32(string.len() as i32);
self.buffer.extend_from_slice(string);
self
}
pub fn as_bytes(&self) -> &[u8] {
&self.buffer
}
}
fn decode(buf: &[u8]) -> Result<(), ParseError> {
let bytes = buf.len() as u32;
if bytes > 4 {
let length = u32::from_be_bytes([buf[0], buf[1], buf[2], buf[3]]);
match length.checked_add(4_u32) {
Some(b) => {
if b == bytes {
Ok(())
} else {
Err(ParseError::Incomplete)
}
}
None => Err(ParseError::Unknown),
}
} else {
Err(ParseError::Incomplete)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn ping() {
let mut buffer = ThriftBuffer::new();
// new buffer has 4 bytes to hold framing later
assert_eq!(buffer.len(), 4);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0]);
buffer.protocol_header();
assert_eq!(buffer.len(), 8);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0, 128, 1, 0, 1]);
buffer.method_name("ping");
assert_eq!(buffer.len(), 16);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103]
);
buffer.sequence_id(0);
assert_eq!(buffer.len(), 20);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0]
);
buffer.stop();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
buffer.frame();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 17, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
assert_eq!(decode(buffer.as_bytes()), Ok(()));
}
}
| len | identifier_name |
thrift.rs | // Copyright 2019-2020 Twitter, Inc.
// Licensed under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
#![allow(dead_code)]
use crate::codec::ParseError;
pub const STOP: u8 = 0;
pub const VOID: u8 = 1;
pub const BOOL: u8 = 2;
pub const BYTE: u8 = 3;
pub const DOUBLE: u8 = 4;
pub const I16: u8 = 6;
pub const I32: u8 = 8;
pub const I64: u8 = 10;
pub const STRING: u8 = 11;
pub const STRUCT: u8 = 12;
pub const MAP: u8 = 13;
pub const SET: u8 = 14;
pub const LIST: u8 = 15;
#[derive(Clone)]
pub struct ThriftBuffer {
buffer: Vec<u8>,
}
impl Default for ThriftBuffer {
fn default() -> Self {
let mut buffer = Vec::<u8>::new();
buffer.resize(4, 0);
Self { buffer }
}
}
impl ThriftBuffer {
pub fn new() -> Self {
Self::default()
}
pub fn len(&self) -> usize {
self.buffer.len()
}
/// add protocol version to buffer
pub fn protocol_header(&mut self) -> &Self {
self.buffer.extend_from_slice(&[128, 1, 0, 1]);
self
}
/// write the framed length to the buffer
#[inline]
pub fn frame(&mut self) -> &Self {
let bytes = self.buffer.len() - 4;
for (p, i) in (bytes as i32).to_be_bytes().iter().enumerate() {
self.buffer[p] = *i;
}
self
}
/// add method name to buffer
#[inline]
pub fn method_name(&mut self, method: &str) -> &Self {
self.write_str(method)
}
/// add sequence id to buffer
#[inline]
pub fn sequence_id(&mut self, id: i32) -> &Self {
self.write_i32(id as i32)
}
/// add stop sequence to buffer
pub fn stop(&mut self) -> &Self {
self.write_bytes(&[STOP])
}
// write an i16 to the buffer
#[inline]
pub fn write_i16(&mut self, value: i16) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i32 to the buffer
#[inline]
pub fn write_i32(&mut self, value: i32) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i64 to the buffer
#[inline]
pub fn write_i64(&mut self, value: i64) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write a literal byte sequence to the buffer
#[inline]
pub fn write_bytes(&mut self, bytes: &[u8]) -> &Self {
self.buffer.extend_from_slice(bytes);
self
}
// write bool to the buffer
#[inline]
pub fn write_bool(&mut self, b: bool) -> &Self {
self.buffer.extend_from_slice(&[(b as u8)]);
self
}
#[inline]
pub fn write_str(&mut self, string: &str) -> &Self {
let string = string.as_bytes();
self.write_i32(string.len() as i32);
self.buffer.extend_from_slice(string);
self
}
pub fn as_bytes(&self) -> &[u8] {
&self.buffer
}
}
fn decode(buf: &[u8]) -> Result<(), ParseError> {
let bytes = buf.len() as u32;
if bytes > 4 {
let length = u32::from_be_bytes([buf[0], buf[1], buf[2], buf[3]]);
match length.checked_add(4_u32) {
Some(b) => {
if b == bytes {
Ok(())
} else {
Err(ParseError::Incomplete)
}
}
None => Err(ParseError::Unknown),
}
} else { | }
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn ping() {
let mut buffer = ThriftBuffer::new();
// new buffer has 4 bytes to hold framing later
assert_eq!(buffer.len(), 4);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0]);
buffer.protocol_header();
assert_eq!(buffer.len(), 8);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0, 128, 1, 0, 1]);
buffer.method_name("ping");
assert_eq!(buffer.len(), 16);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103]
);
buffer.sequence_id(0);
assert_eq!(buffer.len(), 20);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0]
);
buffer.stop();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
buffer.frame();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 17, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
assert_eq!(decode(buffer.as_bytes()), Ok(()));
}
} | Err(ParseError::Incomplete) | random_line_split |
thrift.rs | // Copyright 2019-2020 Twitter, Inc.
// Licensed under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
#![allow(dead_code)]
use crate::codec::ParseError;
pub const STOP: u8 = 0;
pub const VOID: u8 = 1;
pub const BOOL: u8 = 2;
pub const BYTE: u8 = 3;
pub const DOUBLE: u8 = 4;
pub const I16: u8 = 6;
pub const I32: u8 = 8;
pub const I64: u8 = 10;
pub const STRING: u8 = 11;
pub const STRUCT: u8 = 12;
pub const MAP: u8 = 13;
pub const SET: u8 = 14;
pub const LIST: u8 = 15;
#[derive(Clone)]
pub struct ThriftBuffer {
buffer: Vec<u8>,
}
impl Default for ThriftBuffer {
fn default() -> Self {
let mut buffer = Vec::<u8>::new();
buffer.resize(4, 0);
Self { buffer }
}
}
impl ThriftBuffer {
pub fn new() -> Self {
Self::default()
}
pub fn len(&self) -> usize {
self.buffer.len()
}
/// add protocol version to buffer
pub fn protocol_header(&mut self) -> &Self {
self.buffer.extend_from_slice(&[128, 1, 0, 1]);
self
}
/// write the framed length to the buffer
#[inline]
pub fn frame(&mut self) -> &Self {
let bytes = self.buffer.len() - 4;
for (p, i) in (bytes as i32).to_be_bytes().iter().enumerate() {
self.buffer[p] = *i;
}
self
}
/// add method name to buffer
#[inline]
pub fn method_name(&mut self, method: &str) -> &Self {
self.write_str(method)
}
/// add sequence id to buffer
#[inline]
pub fn sequence_id(&mut self, id: i32) -> &Self {
self.write_i32(id as i32)
}
/// add stop sequence to buffer
pub fn stop(&mut self) -> &Self {
self.write_bytes(&[STOP])
}
// write an i16 to the buffer
#[inline]
pub fn write_i16(&mut self, value: i16) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i32 to the buffer
#[inline]
pub fn write_i32(&mut self, value: i32) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i64 to the buffer
#[inline]
pub fn write_i64(&mut self, value: i64) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write a literal byte sequence to the buffer
#[inline]
pub fn write_bytes(&mut self, bytes: &[u8]) -> &Self {
self.buffer.extend_from_slice(bytes);
self
}
// write bool to the buffer
#[inline]
pub fn write_bool(&mut self, b: bool) -> &Self {
self.buffer.extend_from_slice(&[(b as u8)]);
self
}
#[inline]
pub fn write_str(&mut self, string: &str) -> &Self {
let string = string.as_bytes();
self.write_i32(string.len() as i32);
self.buffer.extend_from_slice(string);
self
}
pub fn as_bytes(&self) -> &[u8] {
&self.buffer
}
}
fn decode(buf: &[u8]) -> Result<(), ParseError> {
let bytes = buf.len() as u32;
if bytes > 4 {
let length = u32::from_be_bytes([buf[0], buf[1], buf[2], buf[3]]);
match length.checked_add(4_u32) {
Some(b) => {
if b == bytes | else {
Err(ParseError::Incomplete)
}
}
None => Err(ParseError::Unknown),
}
} else {
Err(ParseError::Incomplete)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn ping() {
let mut buffer = ThriftBuffer::new();
// new buffer has 4 bytes to hold framing later
assert_eq!(buffer.len(), 4);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0]);
buffer.protocol_header();
assert_eq!(buffer.len(), 8);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0, 128, 1, 0, 1]);
buffer.method_name("ping");
assert_eq!(buffer.len(), 16);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103]
);
buffer.sequence_id(0);
assert_eq!(buffer.len(), 20);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0]
);
buffer.stop();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
buffer.frame();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 17, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
assert_eq!(decode(buffer.as_bytes()), Ok(()));
}
}
| {
Ok(())
} | conditional_block |
thrift.rs | // Copyright 2019-2020 Twitter, Inc.
// Licensed under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
#![allow(dead_code)]
use crate::codec::ParseError;
pub const STOP: u8 = 0;
pub const VOID: u8 = 1;
pub const BOOL: u8 = 2;
pub const BYTE: u8 = 3;
pub const DOUBLE: u8 = 4;
pub const I16: u8 = 6;
pub const I32: u8 = 8;
pub const I64: u8 = 10;
pub const STRING: u8 = 11;
pub const STRUCT: u8 = 12;
pub const MAP: u8 = 13;
pub const SET: u8 = 14;
pub const LIST: u8 = 15;
#[derive(Clone)]
pub struct ThriftBuffer {
buffer: Vec<u8>,
}
impl Default for ThriftBuffer {
fn default() -> Self {
let mut buffer = Vec::<u8>::new();
buffer.resize(4, 0);
Self { buffer }
}
}
impl ThriftBuffer {
pub fn new() -> Self {
Self::default()
}
pub fn len(&self) -> usize |
/// add protocol version to buffer
pub fn protocol_header(&mut self) -> &Self {
self.buffer.extend_from_slice(&[128, 1, 0, 1]);
self
}
/// write the framed length to the buffer
#[inline]
pub fn frame(&mut self) -> &Self {
let bytes = self.buffer.len() - 4;
for (p, i) in (bytes as i32).to_be_bytes().iter().enumerate() {
self.buffer[p] = *i;
}
self
}
/// add method name to buffer
#[inline]
pub fn method_name(&mut self, method: &str) -> &Self {
self.write_str(method)
}
/// add sequence id to buffer
#[inline]
pub fn sequence_id(&mut self, id: i32) -> &Self {
self.write_i32(id as i32)
}
/// add stop sequence to buffer
pub fn stop(&mut self) -> &Self {
self.write_bytes(&[STOP])
}
// write an i16 to the buffer
#[inline]
pub fn write_i16(&mut self, value: i16) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i32 to the buffer
#[inline]
pub fn write_i32(&mut self, value: i32) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i64 to the buffer
#[inline]
pub fn write_i64(&mut self, value: i64) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write a literal byte sequence to the buffer
#[inline]
pub fn write_bytes(&mut self, bytes: &[u8]) -> &Self {
self.buffer.extend_from_slice(bytes);
self
}
// write bool to the buffer
#[inline]
pub fn write_bool(&mut self, b: bool) -> &Self {
self.buffer.extend_from_slice(&[(b as u8)]);
self
}
#[inline]
pub fn write_str(&mut self, string: &str) -> &Self {
let string = string.as_bytes();
self.write_i32(string.len() as i32);
self.buffer.extend_from_slice(string);
self
}
pub fn as_bytes(&self) -> &[u8] {
&self.buffer
}
}
fn decode(buf: &[u8]) -> Result<(), ParseError> {
let bytes = buf.len() as u32;
if bytes > 4 {
let length = u32::from_be_bytes([buf[0], buf[1], buf[2], buf[3]]);
match length.checked_add(4_u32) {
Some(b) => {
if b == bytes {
Ok(())
} else {
Err(ParseError::Incomplete)
}
}
None => Err(ParseError::Unknown),
}
} else {
Err(ParseError::Incomplete)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn ping() {
let mut buffer = ThriftBuffer::new();
// new buffer has 4 bytes to hold framing later
assert_eq!(buffer.len(), 4);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0]);
buffer.protocol_header();
assert_eq!(buffer.len(), 8);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0, 128, 1, 0, 1]);
buffer.method_name("ping");
assert_eq!(buffer.len(), 16);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103]
);
buffer.sequence_id(0);
assert_eq!(buffer.len(), 20);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0]
);
buffer.stop();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
buffer.frame();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 17, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
assert_eq!(decode(buffer.as_bytes()), Ok(()));
}
}
| {
self.buffer.len()
} | identifier_body |
typings.d.ts | interface Date {
now(): number;
}
declare module 'http' {
import { IncomingMessage } from 'http';
export interface Response<TBody> extends IncomingMessage {
body?: TBody;
}
}
declare module 'redis' {
export function createClient(port: number, host?: string, options?: ClientOptions): RedisClient;
export function createClient(unix_socket: string, options?: ClientOptions): RedisClient;
export function createClient(redis_url: string, options?: ClientOptions): RedisClient;
export function createClient(options?: ClientOptions): RedisClient;
export interface ClientOptions {
host?: string;
port?: number;
path?: string;
url?: string;
parser?: string;
string_numbers?: boolean;
return_buffers?: boolean;
detect_buffers?: boolean;
socket_keepalive?: boolean;
no_ready_check?: boolean;
enable_offline_queue?: boolean;
retry_max_delay?: number;
connect_timeout?: number;
max_attempts?: number;
retry_unfulfilled_commands?: boolean;
auth_pass?: string;
password?: string;
db?: string; | rename_commands?: { [command: string]: string };
tls?: any;
prefix?: string;
retry_strategy?: Function;
}
export class RedisClient {
expire(key: string, seconds: number): void;
getAsync(key: string): Promise<string>;
setAsync(key: string, value: any): Promise<'OK'>;
delAsync(key: string): Promise<number>;
}
}
declare module 'node-rsa' {
namespace NodeRsa {
}
class NodeRsa {
constructor(options: any);
exportKey(keyType?: string): string;
}
export = NodeRsa;
} | family?: string; | random_line_split |
typings.d.ts | interface Date {
now(): number;
}
declare module 'http' {
import { IncomingMessage } from 'http';
export interface Response<TBody> extends IncomingMessage {
body?: TBody;
}
}
declare module 'redis' {
export function createClient(port: number, host?: string, options?: ClientOptions): RedisClient;
export function createClient(unix_socket: string, options?: ClientOptions): RedisClient;
export function createClient(redis_url: string, options?: ClientOptions): RedisClient;
export function createClient(options?: ClientOptions): RedisClient;
export interface ClientOptions {
host?: string;
port?: number;
path?: string;
url?: string;
parser?: string;
string_numbers?: boolean;
return_buffers?: boolean;
detect_buffers?: boolean;
socket_keepalive?: boolean;
no_ready_check?: boolean;
enable_offline_queue?: boolean;
retry_max_delay?: number;
connect_timeout?: number;
max_attempts?: number;
retry_unfulfilled_commands?: boolean;
auth_pass?: string;
password?: string;
db?: string;
family?: string;
rename_commands?: { [command: string]: string };
tls?: any;
prefix?: string;
retry_strategy?: Function;
}
export class RedisClient {
expire(key: string, seconds: number): void;
getAsync(key: string): Promise<string>;
setAsync(key: string, value: any): Promise<'OK'>;
delAsync(key: string): Promise<number>;
}
}
declare module 'node-rsa' {
namespace NodeRsa {
}
class | {
constructor(options: any);
exportKey(keyType?: string): string;
}
export = NodeRsa;
}
| NodeRsa | identifier_name |
__init__.py | """
"""
from .register import get_registered_layers
#custom layer import begins
import axpy
import flatten
import argmax
import reshape
import roipooling
import priorbox
import permute
import detection_out
import normalize
import select
import crop
import reduction
#custom layer import ends
custom_layers = get_registered_layers()
def set_args(f, params, node=None):
""" set args for function 'f' using the parameters in node.layer.parameters
Args:
f (function): a python function object
params (object): a object contains attributes needed by f's arguments
Returns:
arg_names (list): a list of argument names
kwargs (dict): a dict contains needed arguments
"""
from ..protobuf_to_dict import protobuf_to_dict
argc = f.__code__.co_argcount
arg_list = f.__code__.co_varnames[0:argc]
kwargs = {}
for arg_name in arg_list:
if arg_name in params:
kwargs[arg_name] = params[arg_name]
if node is not None and len(node.metadata):
kwargs.update(node.metadata)
return arg_list, kwargs
def has_layer(kind):
""" test whether this layer exists in custom layer
"""
return kind in custom_layers
def compute_output_shape(kind, node):
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
shape_func = custom_layers[kind]['shape']
parents = node.parents
inputs = [list(p.output_shape) for p in parents]
arg_names, kwargs = set_args(shape_func, node.params)
if len(inputs) == 1:
inputs = inputs[0]
return shape_func(inputs, **kwargs)
def | (template, kind, node):
""" make a PaddleNode for custom layer which means construct
a piece of code to define a layer implemented in 'custom_layers'
Args:
@template (PaddleNode): a factory to new a instance of PaddleNode
@kind (str): type of custom layer
@node (graph.Node): a layer in the net
Returns:
instance of PaddleNode
"""
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
layer_func = custom_layers[kind]['layer']
#construct arguments needed by custom layer function from node's parameters
arg_names, kwargs = set_args(layer_func, node.params, node)
return template('custom_layer', kind, **kwargs)
def make_custom_layer(kind, inputs, name, *args, **kwargs):
""" execute a custom layer which is implemented by users
Args:
@kind (str): type name of this layer
@inputs (vars): variable list created by fluid
@namme (str): name for this layer
@args (tuple): other positional arguments
@kwargs (dict): other kv arguments
Returns:
output (var): output variable for this layer
"""
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
layer_func = custom_layers[kind]['layer']
return layer_func(inputs, name, *args, **kwargs)
| make_node | identifier_name |
__init__.py | """
"""
from .register import get_registered_layers
#custom layer import begins
import axpy
import flatten
import argmax
import reshape
import roipooling
import priorbox
import permute
import detection_out
import normalize
import select
import crop
import reduction
#custom layer import ends
custom_layers = get_registered_layers()
def set_args(f, params, node=None):
""" set args for function 'f' using the parameters in node.layer.parameters
Args:
f (function): a python function object
params (object): a object contains attributes needed by f's arguments
Returns:
arg_names (list): a list of argument names
kwargs (dict): a dict contains needed arguments
"""
from ..protobuf_to_dict import protobuf_to_dict
argc = f.__code__.co_argcount
arg_list = f.__code__.co_varnames[0:argc]
kwargs = {}
for arg_name in arg_list:
if arg_name in params:
kwargs[arg_name] = params[arg_name]
if node is not None and len(node.metadata):
kwargs.update(node.metadata)
return arg_list, kwargs | """
return kind in custom_layers
def compute_output_shape(kind, node):
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
shape_func = custom_layers[kind]['shape']
parents = node.parents
inputs = [list(p.output_shape) for p in parents]
arg_names, kwargs = set_args(shape_func, node.params)
if len(inputs) == 1:
inputs = inputs[0]
return shape_func(inputs, **kwargs)
def make_node(template, kind, node):
""" make a PaddleNode for custom layer which means construct
a piece of code to define a layer implemented in 'custom_layers'
Args:
@template (PaddleNode): a factory to new a instance of PaddleNode
@kind (str): type of custom layer
@node (graph.Node): a layer in the net
Returns:
instance of PaddleNode
"""
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
layer_func = custom_layers[kind]['layer']
#construct arguments needed by custom layer function from node's parameters
arg_names, kwargs = set_args(layer_func, node.params, node)
return template('custom_layer', kind, **kwargs)
def make_custom_layer(kind, inputs, name, *args, **kwargs):
""" execute a custom layer which is implemented by users
Args:
@kind (str): type name of this layer
@inputs (vars): variable list created by fluid
@namme (str): name for this layer
@args (tuple): other positional arguments
@kwargs (dict): other kv arguments
Returns:
output (var): output variable for this layer
"""
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
layer_func = custom_layers[kind]['layer']
return layer_func(inputs, name, *args, **kwargs) |
def has_layer(kind):
""" test whether this layer exists in custom layer | random_line_split |
__init__.py | """
"""
from .register import get_registered_layers
#custom layer import begins
import axpy
import flatten
import argmax
import reshape
import roipooling
import priorbox
import permute
import detection_out
import normalize
import select
import crop
import reduction
#custom layer import ends
custom_layers = get_registered_layers()
def set_args(f, params, node=None):
| if node is not None and len(node.metadata):
kwargs.update(node.metadata)
return arg_list, kwargs
def has_layer(kind):
""" test whether this layer exists in custom layer
"""
return kind in custom_layers
def compute_output_shape(kind, node):
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
shape_func = custom_layers[kind]['shape']
parents = node.parents
inputs = [list(p.output_shape) for p in parents]
arg_names, kwargs = set_args(shape_func, node.params)
if len(inputs) == 1:
inputs = inputs[0]
return shape_func(inputs, **kwargs)
def make_node(template, kind, node):
""" make a PaddleNode for custom layer which means construct
a piece of code to define a layer implemented in 'custom_layers'
Args:
@template (PaddleNode): a factory to new a instance of PaddleNode
@kind (str): type of custom layer
@node (graph.Node): a layer in the net
Returns:
instance of PaddleNode
"""
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
layer_func = custom_layers[kind]['layer']
#construct arguments needed by custom layer function from node's parameters
arg_names, kwargs = set_args(layer_func, node.params, node)
return template('custom_layer', kind, **kwargs)
def make_custom_layer(kind, inputs, name, *args, **kwargs):
""" execute a custom layer which is implemented by users
Args:
@kind (str): type name of this layer
@inputs (vars): variable list created by fluid
@namme (str): name for this layer
@args (tuple): other positional arguments
@kwargs (dict): other kv arguments
Returns:
output (var): output variable for this layer
"""
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
layer_func = custom_layers[kind]['layer']
return layer_func(inputs, name, *args, **kwargs)
| """ set args for function 'f' using the parameters in node.layer.parameters
Args:
f (function): a python function object
params (object): a object contains attributes needed by f's arguments
Returns:
arg_names (list): a list of argument names
kwargs (dict): a dict contains needed arguments
"""
from ..protobuf_to_dict import protobuf_to_dict
argc = f.__code__.co_argcount
arg_list = f.__code__.co_varnames[0:argc]
kwargs = {}
for arg_name in arg_list:
if arg_name in params:
kwargs[arg_name] = params[arg_name]
| identifier_body |
__init__.py | """
"""
from .register import get_registered_layers
#custom layer import begins
import axpy
import flatten
import argmax
import reshape
import roipooling
import priorbox
import permute
import detection_out
import normalize
import select
import crop
import reduction
#custom layer import ends
custom_layers = get_registered_layers()
def set_args(f, params, node=None):
""" set args for function 'f' using the parameters in node.layer.parameters
Args:
f (function): a python function object
params (object): a object contains attributes needed by f's arguments
Returns:
arg_names (list): a list of argument names
kwargs (dict): a dict contains needed arguments
"""
from ..protobuf_to_dict import protobuf_to_dict
argc = f.__code__.co_argcount
arg_list = f.__code__.co_varnames[0:argc]
kwargs = {}
for arg_name in arg_list:
if arg_name in params:
|
if node is not None and len(node.metadata):
kwargs.update(node.metadata)
return arg_list, kwargs
def has_layer(kind):
""" test whether this layer exists in custom layer
"""
return kind in custom_layers
def compute_output_shape(kind, node):
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
shape_func = custom_layers[kind]['shape']
parents = node.parents
inputs = [list(p.output_shape) for p in parents]
arg_names, kwargs = set_args(shape_func, node.params)
if len(inputs) == 1:
inputs = inputs[0]
return shape_func(inputs, **kwargs)
def make_node(template, kind, node):
""" make a PaddleNode for custom layer which means construct
a piece of code to define a layer implemented in 'custom_layers'
Args:
@template (PaddleNode): a factory to new a instance of PaddleNode
@kind (str): type of custom layer
@node (graph.Node): a layer in the net
Returns:
instance of PaddleNode
"""
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
layer_func = custom_layers[kind]['layer']
#construct arguments needed by custom layer function from node's parameters
arg_names, kwargs = set_args(layer_func, node.params, node)
return template('custom_layer', kind, **kwargs)
def make_custom_layer(kind, inputs, name, *args, **kwargs):
""" execute a custom layer which is implemented by users
Args:
@kind (str): type name of this layer
@inputs (vars): variable list created by fluid
@namme (str): name for this layer
@args (tuple): other positional arguments
@kwargs (dict): other kv arguments
Returns:
output (var): output variable for this layer
"""
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
layer_func = custom_layers[kind]['layer']
return layer_func(inputs, name, *args, **kwargs)
| kwargs[arg_name] = params[arg_name] | conditional_block |
upsert_iss_domains.py | #!/usr/bin/env python
"""Upserts Domains from Salesforce Domain__c.
"""
import logging
import os
from django.core.management.base import BaseCommand
import iss.models
import iss.salesforce
logger = logging.getLogger(os.path.basename(__file__))
class Command(BaseCommand):
def add_arguments(self, parser):
|
def handle(self, *args, **options):
upsert_domains(options['modified_within'])
def upsert_domains(modified_since=7):
"""Upsert Domains for SF Domain__c modified in last `modified_since` days.
"""
logger.info('upserting domains modified in last {since} days'.
format(since=modified_since))
modified_domains = (iss.salesforce.Domain.get_domains_modified_since(
days_ago=modified_since))
for domain in modified_domains:
iss.models.Domain.upsert(domain)
| parser.add_argument('-m', '--modified-within',
type=int,
metavar='n-days',
default=7,
help='upsert Domains modified within n-days') | identifier_body |
upsert_iss_domains.py | #!/usr/bin/env python
"""Upserts Domains from Salesforce Domain__c.
"""
import logging
import os
from django.core.management.base import BaseCommand
import iss.models
import iss.salesforce
logger = logging.getLogger(os.path.basename(__file__))
class Command(BaseCommand):
def | (self, parser):
parser.add_argument('-m', '--modified-within',
type=int,
metavar='n-days',
default=7,
help='upsert Domains modified within n-days')
def handle(self, *args, **options):
upsert_domains(options['modified_within'])
def upsert_domains(modified_since=7):
"""Upsert Domains for SF Domain__c modified in last `modified_since` days.
"""
logger.info('upserting domains modified in last {since} days'.
format(since=modified_since))
modified_domains = (iss.salesforce.Domain.get_domains_modified_since(
days_ago=modified_since))
for domain in modified_domains:
iss.models.Domain.upsert(domain)
| add_arguments | identifier_name |
upsert_iss_domains.py | #!/usr/bin/env python
"""Upserts Domains from Salesforce Domain__c.
"""
import logging
import os
from django.core.management.base import BaseCommand
import iss.models
import iss.salesforce
logger = logging.getLogger(os.path.basename(__file__))
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-m', '--modified-within',
type=int,
metavar='n-days',
default=7,
help='upsert Domains modified within n-days')
def handle(self, *args, **options):
upsert_domains(options['modified_within'])
def upsert_domains(modified_since=7):
"""Upsert Domains for SF Domain__c modified in last `modified_since` days.
"""
logger.info('upserting domains modified in last {since} days'.
format(since=modified_since))
modified_domains = (iss.salesforce.Domain.get_domains_modified_since(
days_ago=modified_since))
for domain in modified_domains:
| iss.models.Domain.upsert(domain) | conditional_block |
|
upsert_iss_domains.py | #!/usr/bin/env python
"""Upserts Domains from Salesforce Domain__c.
"""
import logging
import os
| from django.core.management.base import BaseCommand
import iss.models
import iss.salesforce
logger = logging.getLogger(os.path.basename(__file__))
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-m', '--modified-within',
type=int,
metavar='n-days',
default=7,
help='upsert Domains modified within n-days')
def handle(self, *args, **options):
upsert_domains(options['modified_within'])
def upsert_domains(modified_since=7):
"""Upsert Domains for SF Domain__c modified in last `modified_since` days.
"""
logger.info('upserting domains modified in last {since} days'.
format(since=modified_since))
modified_domains = (iss.salesforce.Domain.get_domains_modified_since(
days_ago=modified_since))
for domain in modified_domains:
iss.models.Domain.upsert(domain) | random_line_split |
|
search.component.ts | import { Component } from '@angular/core';
import { Query } from './query';
import { SearchService } from './search.service';
import { Car } from './car';
import { CarListComponent } from './car-list.component';
@Component({
selector: 'to-search',
providers: [ SearchService],
directives: [ CarListComponent ],
styleUrls: ['./search.component.css'],
templateUrl: './search.component.html'
})
export class SearchComponent {
query: Query = this.getDefaultQuery();
error: string;
results: Array<Car>;
searching = false;
constructor(private searchService: SearchService) { }
search() {
this.error = '';
this.searching = true;
this.searchService.search(this.query)
.then(results => this.results = results)
.catch(err => this.error = err)
.then(() => this.searching = false);
}
clear() {
this.query = this.getDefaultQuery();
this.results = null;
}
private getDefaultQuery(): Query |
private getDefaultStartDate() {
let date = new Date();
date.setDate(date.getDate() + 1);
return this.parseDate(date);
}
private getDefaultEndDate() {
let date = new Date();
date.setDate(date.getDate() + 2);
return this.parseDate(date);
}
private parseDate(date: Date) {
let [month, day, year] = date.toLocaleDateString('en-US', { day: '2-digit', month: '2-digit', year: 'numeric' }).split('/');
return [year, month, day].join('-');
}
}
| {
return {
location: '',
startDate: this.getDefaultStartDate(),
endDate: this.getDefaultEndDate(),
pickupTime: '08:00',
dropoffTime: '20:00'
}
} | identifier_body |
search.component.ts | import { Component } from '@angular/core';
import { Query } from './query';
import { SearchService } from './search.service';
import { Car } from './car';
import { CarListComponent } from './car-list.component';
@Component({
selector: 'to-search',
providers: [ SearchService],
directives: [ CarListComponent ],
styleUrls: ['./search.component.css'],
templateUrl: './search.component.html'
})
export class SearchComponent {
query: Query = this.getDefaultQuery();
error: string;
results: Array<Car>;
searching = false;
constructor(private searchService: SearchService) { }
search() {
this.error = '';
this.searching = true;
this.searchService.search(this.query)
.then(results => this.results = results)
.catch(err => this.error = err)
.then(() => this.searching = false);
}
clear() {
this.query = this.getDefaultQuery();
this.results = null;
}
private getDefaultQuery(): Query {
return {
location: '',
startDate: this.getDefaultStartDate(),
endDate: this.getDefaultEndDate(),
pickupTime: '08:00',
dropoffTime: '20:00'
}
}
private getDefaultStartDate() {
let date = new Date();
date.setDate(date.getDate() + 1);
return this.parseDate(date);
}
private getDefaultEndDate() {
let date = new Date();
date.setDate(date.getDate() + 2);
return this.parseDate(date); | let [month, day, year] = date.toLocaleDateString('en-US', { day: '2-digit', month: '2-digit', year: 'numeric' }).split('/');
return [year, month, day].join('-');
}
} | }
private parseDate(date: Date) { | random_line_split |
search.component.ts | import { Component } from '@angular/core';
import { Query } from './query';
import { SearchService } from './search.service';
import { Car } from './car';
import { CarListComponent } from './car-list.component';
@Component({
selector: 'to-search',
providers: [ SearchService],
directives: [ CarListComponent ],
styleUrls: ['./search.component.css'],
templateUrl: './search.component.html'
})
export class SearchComponent {
query: Query = this.getDefaultQuery();
error: string;
results: Array<Car>;
searching = false;
constructor(private searchService: SearchService) { }
search() {
this.error = '';
this.searching = true;
this.searchService.search(this.query)
.then(results => this.results = results)
.catch(err => this.error = err)
.then(() => this.searching = false);
}
| () {
this.query = this.getDefaultQuery();
this.results = null;
}
private getDefaultQuery(): Query {
return {
location: '',
startDate: this.getDefaultStartDate(),
endDate: this.getDefaultEndDate(),
pickupTime: '08:00',
dropoffTime: '20:00'
}
}
private getDefaultStartDate() {
let date = new Date();
date.setDate(date.getDate() + 1);
return this.parseDate(date);
}
private getDefaultEndDate() {
let date = new Date();
date.setDate(date.getDate() + 2);
return this.parseDate(date);
}
private parseDate(date: Date) {
let [month, day, year] = date.toLocaleDateString('en-US', { day: '2-digit', month: '2-digit', year: 'numeric' }).split('/');
return [year, month, day].join('-');
}
}
| clear | identifier_name |
master.py | """
Extensions to mitmproxy master.
"""
import multiprocessing
from seproxer import mitmproxy_extensions
import seproxer.mitmproxy_extensions.addons # NOQA
import seproxer.mitmproxy_extensions.options
| import mitmproxy.addons
import mitmproxy.proxy.server
import mitmproxy.master
class ProxyMaster(mitmproxy.master.Master):
"""
Implements mitmproxy master to produce flows through a shared Queue and a shared
state attribute that specifies if there are any responses pending
"""
def __init__(self, # type: ignore # (mypy doesn't like multiprocessing lib)
options: seproxer.mitmproxy_extensions.options,
server: mitmproxy.proxy.server,
results_queue: multiprocessing.Queue,
push_event: multiprocessing.Event,
active_flows_state: multiprocessing.Value,
) -> None:
"""
:param options: The extended mitmproxy options, used to configure our addons
:param server: The mitmproxy server that the proxy will be interfacing with
:param results_queue: The mitmproxy flows will be pushed into this queue
:param push_event: When this event is set, the stored flows will
be pushed into the `results_queue`
:param active_flows_state: A shared state that determines if there are any active flows,
that is, if any requests have pending responses
"""
super().__init__(options, server)
# This addon will allow us to modify headers, this is particularly useful for appending
# authentication cookies since selenium_extensions cannot modify HTTP ONLY cookies
self.addons.add(mitmproxy.addons.setheaders.SetHeaders())
# This add-on hooks into javascript window.onerror and all the console logging
# methods to log message into our defined "window.__seproxer_logs" object
self.addons.add(mitmproxy_extensions.addons.JSConsoleErrorInjection())
# This addon will be responsible for storing our requests / responses in memory
# and will allow us to push the results through out results_queue
self._memory_stream_addon = mitmproxy_extensions.addons.MemoryStream()
self.addons.add(self._memory_stream_addon)
self.results_queue = results_queue
self.push_event = push_event
self.active_flows_state = active_flows_state
def tick(self, timeout):
"""
Extends the Master's tick method to update our active flows state and to push
our results into the results queue if the push_event is set
"""
tick_result = super().tick(timeout)
# Update our active flow state
has_active_flows = self._memory_stream_addon.has_active_flows()
if has_active_flows != self.active_flows_state.value:
with self.active_flows_state.get_lock():
self.active_flows_state.value = has_active_flows
if self.push_event.is_set():
# Get the flow results and restart by calling start again
flow_results = self._memory_stream_addon.get_stream()
self._memory_stream_addon.start()
# Push the results to the result queue1
self.results_queue.put(flow_results)
self.push_event.clear()
return tick_result | random_line_split |
|
master.py | """
Extensions to mitmproxy master.
"""
import multiprocessing
from seproxer import mitmproxy_extensions
import seproxer.mitmproxy_extensions.addons # NOQA
import seproxer.mitmproxy_extensions.options
import mitmproxy.addons
import mitmproxy.proxy.server
import mitmproxy.master
class ProxyMaster(mitmproxy.master.Master):
| super().__init__(options, server)
# This addon will allow us to modify headers, this is particularly useful for appending
# authentication cookies since selenium_extensions cannot modify HTTP ONLY cookies
self.addons.add(mitmproxy.addons.setheaders.SetHeaders())
# This add-on hooks into javascript window.onerror and all the console logging
# methods to log message into our defined "window.__seproxer_logs" object
self.addons.add(mitmproxy_extensions.addons.JSConsoleErrorInjection())
# This addon will be responsible for storing our requests / responses in memory
# and will allow us to push the results through out results_queue
self._memory_stream_addon = mitmproxy_extensions.addons.MemoryStream()
self.addons.add(self._memory_stream_addon)
self.results_queue = results_queue
self.push_event = push_event
self.active_flows_state = active_flows_state
def tick(self, timeout):
"""
Extends the Master's tick method to update our active flows state and to push
our results into the results queue if the push_event is set
"""
tick_result = super().tick(timeout)
# Update our active flow state
has_active_flows = self._memory_stream_addon.has_active_flows()
if has_active_flows != self.active_flows_state.value:
with self.active_flows_state.get_lock():
self.active_flows_state.value = has_active_flows
if self.push_event.is_set():
# Get the flow results and restart by calling start again
flow_results = self._memory_stream_addon.get_stream()
self._memory_stream_addon.start()
# Push the results to the result queue1
self.results_queue.put(flow_results)
self.push_event.clear()
return tick_result
| """
Implements mitmproxy master to produce flows through a shared Queue and a shared
state attribute that specifies if there are any responses pending
"""
def __init__(self, # type: ignore # (mypy doesn't like multiprocessing lib)
options: seproxer.mitmproxy_extensions.options,
server: mitmproxy.proxy.server,
results_queue: multiprocessing.Queue,
push_event: multiprocessing.Event,
active_flows_state: multiprocessing.Value,
) -> None:
"""
:param options: The extended mitmproxy options, used to configure our addons
:param server: The mitmproxy server that the proxy will be interfacing with
:param results_queue: The mitmproxy flows will be pushed into this queue
:param push_event: When this event is set, the stored flows will
be pushed into the `results_queue`
:param active_flows_state: A shared state that determines if there are any active flows,
that is, if any requests have pending responses
""" | identifier_body |
master.py | """
Extensions to mitmproxy master.
"""
import multiprocessing
from seproxer import mitmproxy_extensions
import seproxer.mitmproxy_extensions.addons # NOQA
import seproxer.mitmproxy_extensions.options
import mitmproxy.addons
import mitmproxy.proxy.server
import mitmproxy.master
class ProxyMaster(mitmproxy.master.Master):
"""
Implements mitmproxy master to produce flows through a shared Queue and a shared
state attribute that specifies if there are any responses pending
"""
def __init__(self, # type: ignore # (mypy doesn't like multiprocessing lib)
options: seproxer.mitmproxy_extensions.options,
server: mitmproxy.proxy.server,
results_queue: multiprocessing.Queue,
push_event: multiprocessing.Event,
active_flows_state: multiprocessing.Value,
) -> None:
"""
:param options: The extended mitmproxy options, used to configure our addons
:param server: The mitmproxy server that the proxy will be interfacing with
:param results_queue: The mitmproxy flows will be pushed into this queue
:param push_event: When this event is set, the stored flows will
be pushed into the `results_queue`
:param active_flows_state: A shared state that determines if there are any active flows,
that is, if any requests have pending responses
"""
super().__init__(options, server)
# This addon will allow us to modify headers, this is particularly useful for appending
# authentication cookies since selenium_extensions cannot modify HTTP ONLY cookies
self.addons.add(mitmproxy.addons.setheaders.SetHeaders())
# This add-on hooks into javascript window.onerror and all the console logging
# methods to log message into our defined "window.__seproxer_logs" object
self.addons.add(mitmproxy_extensions.addons.JSConsoleErrorInjection())
# This addon will be responsible for storing our requests / responses in memory
# and will allow us to push the results through out results_queue
self._memory_stream_addon = mitmproxy_extensions.addons.MemoryStream()
self.addons.add(self._memory_stream_addon)
self.results_queue = results_queue
self.push_event = push_event
self.active_flows_state = active_flows_state
def tick(self, timeout):
"""
Extends the Master's tick method to update our active flows state and to push
our results into the results queue if the push_event is set
"""
tick_result = super().tick(timeout)
# Update our active flow state
has_active_flows = self._memory_stream_addon.has_active_flows()
if has_active_flows != self.active_flows_state.value:
with self.active_flows_state.get_lock():
self.active_flows_state.value = has_active_flows
if self.push_event.is_set():
# Get the flow results and restart by calling start again
|
return tick_result
| flow_results = self._memory_stream_addon.get_stream()
self._memory_stream_addon.start()
# Push the results to the result queue1
self.results_queue.put(flow_results)
self.push_event.clear() | conditional_block |
master.py | """
Extensions to mitmproxy master.
"""
import multiprocessing
from seproxer import mitmproxy_extensions
import seproxer.mitmproxy_extensions.addons # NOQA
import seproxer.mitmproxy_extensions.options
import mitmproxy.addons
import mitmproxy.proxy.server
import mitmproxy.master
class | (mitmproxy.master.Master):
"""
Implements mitmproxy master to produce flows through a shared Queue and a shared
state attribute that specifies if there are any responses pending
"""
def __init__(self, # type: ignore # (mypy doesn't like multiprocessing lib)
options: seproxer.mitmproxy_extensions.options,
server: mitmproxy.proxy.server,
results_queue: multiprocessing.Queue,
push_event: multiprocessing.Event,
active_flows_state: multiprocessing.Value,
) -> None:
"""
:param options: The extended mitmproxy options, used to configure our addons
:param server: The mitmproxy server that the proxy will be interfacing with
:param results_queue: The mitmproxy flows will be pushed into this queue
:param push_event: When this event is set, the stored flows will
be pushed into the `results_queue`
:param active_flows_state: A shared state that determines if there are any active flows,
that is, if any requests have pending responses
"""
super().__init__(options, server)
# This addon will allow us to modify headers, this is particularly useful for appending
# authentication cookies since selenium_extensions cannot modify HTTP ONLY cookies
self.addons.add(mitmproxy.addons.setheaders.SetHeaders())
# This add-on hooks into javascript window.onerror and all the console logging
# methods to log message into our defined "window.__seproxer_logs" object
self.addons.add(mitmproxy_extensions.addons.JSConsoleErrorInjection())
# This addon will be responsible for storing our requests / responses in memory
# and will allow us to push the results through out results_queue
self._memory_stream_addon = mitmproxy_extensions.addons.MemoryStream()
self.addons.add(self._memory_stream_addon)
self.results_queue = results_queue
self.push_event = push_event
self.active_flows_state = active_flows_state
def tick(self, timeout):
"""
Extends the Master's tick method to update our active flows state and to push
our results into the results queue if the push_event is set
"""
tick_result = super().tick(timeout)
# Update our active flow state
has_active_flows = self._memory_stream_addon.has_active_flows()
if has_active_flows != self.active_flows_state.value:
with self.active_flows_state.get_lock():
self.active_flows_state.value = has_active_flows
if self.push_event.is_set():
# Get the flow results and restart by calling start again
flow_results = self._memory_stream_addon.get_stream()
self._memory_stream_addon.start()
# Push the results to the result queue1
self.results_queue.put(flow_results)
self.push_event.clear()
return tick_result
| ProxyMaster | identifier_name |
jquery.smoothscroll.js | new Date;
/**
* Pushes scroll actions to the scrolling queue.
*/
function scrollArray(elem, left, top, delay) {
delay || (delay = 1000);
directionCheck(left, top);
if (acceleration) {
var now = +new Date;
var elapsed = now - lastScroll;
if (elapsed < accelDelta) {
var factor = (1 + (30 / elapsed)) / 2;
if (factor > 1) {
factor = Math.min(factor, accelMax);
left *= factor;
top *= factor;
}
}
lastScroll = +new Date;
}
// push a scroll command
que.push({
x: left,
y: top,
lastX: (left < 0) ? 0.99 : -0.99,
lastY: (top < 0) ? 0.99 : -0.99,
start: +new Date
});
// don't act if there's a pending queue
if (pending) {
return;
}
var scrollWindow = (elem === document.body);
var step = function() {
var now = +new Date;
var scrollX = 0;
var scrollY = 0;
for (var i = 0; i < que.length; i++) {
var item = que[i];
var elapsed = now - item.start;
var finished = (elapsed >= animtime);
// scroll position: [0, 1]
var position = (finished) ? 1 : elapsed / animtime;
// easing [optional]
if (pulseAlgorithm) {
position = pulse(position);
}
// only need the difference
var x = (item.x * position - item.lastX) >> 0;
var y = (item.y * position - item.lastY) >> 0;
// add this to the total scrolling
scrollX += x;
scrollY += y;
// update last values
item.lastX += x;
item.lastY += y;
// delete and step back if it's over
if (finished) {
que.splice(i, 1); i--;
}
}
// scroll left and top
if (scrollWindow) {
window.scrollBy(scrollX, scrollY)
}
else {
if (scrollX) elem.scrollLeft += scrollX;
if (scrollY) elem.scrollTop += scrollY;
}
// clean up if there's nothing left to do
if (!left && !top) {
que = [];
}
if (que.length) {
requestFrame(step, elem, (delay / framerate + 1));
} else {
pending = false;
}
}
// start a new queue of actions
requestFrame(step, elem, 0);
pending = true;
}
/***********************************************
* EVENTS
***********************************************/
/**
* Mouse wheel handler.
* @param {Object} event
*/
function wheel(event) {
if (!initdone) {
init();
}
var target = event.target;
var overflowing = overflowingAncestor(target);
// use default if there's no overflowing
// element or default action is prevented
if (!overflowing || event.defaultPrevented ||
isNodeName(activeElement, "embed") ||
(isNodeName(target, "embed") && /\.pdf/i.test(target.src))) {
return true;
}
var deltaX = event.wheelDeltaX || 0;
var deltaY = event.wheelDeltaY || 0;
// use wheelDelta if deltaX/Y is not available
if (!deltaX && !deltaY) {
deltaY = event.wheelDelta || 0;
}
// scale by step size
// delta is 120 most of the time
// synaptics seems to send 1 sometimes
if (Math.abs(deltaX) > 1.2) {
deltaX *= stepsize / 120;
}
if (Math.abs(deltaY) > 1.2) {
deltaY *= stepsize / 120;
}
scrollArray(overflowing, -deltaX, -deltaY);
event.preventDefault();
}
/**
* Keydown event handler.
* @param {Object} event
*/
function keydown(event) {
var target = event.target;
var modifier = event.ctrlKey || event.altKey || event.metaKey ||
(event.shiftKey && event.keyCode !== key.spacebar);
// do nothing if user is editing text
// or using a modifier key (except shift)
// or in a dropdown
if ( /input|textarea|select|embed/i.test(target.nodeName) ||
target.isContentEditable ||
event.defaultPrevented ||
modifier ) {
return true;
}
// spacebar should trigger button press
if (isNodeName(target, "button") &&
event.keyCode === key.spacebar) {
return true;
}
var shift, x = 0, y = 0;
var elem = overflowingAncestor(activeElement);
var clientHeight = elem.clientHeight;
if (elem == document.body) {
clientHeight = window.innerHeight;
}
switch (event.keyCode) {
case key.up:
y = -arrowscroll;
break;
case key.down:
y = arrowscroll;
break;
case key.spacebar: // (+ shift)
shift = event.shiftKey ? 1 : -1;
y = -shift * clientHeight * 0.9;
break;
case key.pageup:
y = -clientHeight * 0.9;
break;
case key.pagedown:
y = clientHeight * 0.9;
break;
case key.home:
y = -elem.scrollTop;
break;
case key.end:
var damt = elem.scrollHeight - elem.scrollTop - clientHeight;
y = (damt > 0) ? damt+10 : 0;
break;
case key.left:
x = -arrowscroll;
break;
case key.right:
x = arrowscroll;
break;
default:
return true; // a key we don't care about
}
scrollArray(elem, x, y);
event.preventDefault();
}
/**
* Mousedown event only for updating activeElement
*/
function mousedown(event) {
activeElement = event.target;
}
/***********************************************
* OVERFLOW
***********************************************/
var cache = {}; // cleared out every once in while
setInterval(function(){ cache = {}; }, 10 * 1000);
var uniqueID = (function() {
var i = 0;
return function (el) {
return el.uniqueID || (el.uniqueID = i++);
};
})();
function setCache(elems, overflowing) {
for (var i = elems.length; i--;)
cache[uniqueID(elems[i])] = overflowing;
return overflowing;
}
function overflowingAncestor(el) {
var elems = [];
var rootScrollHeight = root.scrollHeight;
do {
var cached = cache[uniqueID(el)];
if (cached) {
return setCache(elems, cached);
}
elems.push(el);
if (rootScrollHeight === el.scrollHeight) {
if (!frame || root.clientHeight + 10 < rootScrollHeight) {
return setCache(elems, document.body); // scrolling root in WebKit
}
} else if (el.clientHeight + 10 < el.scrollHeight) {
overflow = getComputedStyle(el, "").getPropertyValue("overflow-y");
if (overflow === "scroll" || overflow === "auto") {
return setCache(elems, el);
}
}
} while (el = el.parentNode);
}
/***********************************************
* HELPERS
***********************************************/
function addEvent(type, fn, bubble) {
window.addEventListener(type, fn, (bubble||false));
}
function removeEvent(type, fn, bubble) {
window.removeEventListener(type, fn, (bubble||false));
}
function isNodeName(el, tag) {
return (el.nodeName||"").toLowerCase() === tag.toLowerCase();
}
function directionCheck(x, y) {
x = (x > 0) ? 1 : -1;
y = (y > 0) ? 1 : -1;
if (direction.x !== x || direction.y !== y) {
direction.x = x;
direction.y = y;
que = [];
lastScroll = 0;
}
}
var requestFrame = (function(){
return window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
function(callback, element, delay){
window.setTimeout(callback, delay || (1000/60));
};
})();
/***********************************************
* PULSE
***********************************************/
/**
* Viscous fluid with a pulse for part and decay for the rest.
* - Applies a fixed force over an interval (a damped acceleration), and
* - Lets the exponential bleed away the velocity over a longer interval
* - Michael Herf, http://stereopsis.com/stopping/
*/
function pulse_(x) {
var val, start, expx;
// test
x = x * pulseScale;
if (x < 1) { // acceleartion
val = x - (1 - Math.exp(-x));
} else { // tail | // the previous animation ended here:
start = Math.exp(-1);
// simple viscous drag | random_line_split |
|
jquery.smoothscroll.js | where the areas left and right to
* the content does not trigger the onmousewheel event
* on some pages. e.g.: html, body { height: 100% }
*/
else if (scrollHeight > windowHeight &&
(body.offsetHeight <= windowHeight ||
html.offsetHeight <= windowHeight)) {
// DOMChange (throttle): fix height
var pending = false;
var refresh = function() {
if (!pending && html.scrollHeight != document.height) {
pending = true; // add a new pending action
setTimeout(function(){
html.style.height = document.height + 'px';
pending = false;
}, 500); // act rarely to stay fast
}
};
html.style.height = '';
setTimeout(refresh, 10);
addEvent("DOMNodeInserted", refresh);
addEvent("DOMNodeRemoved", refresh);
// clearfix
if (root.offsetHeight <= windowHeight) {
var underlay = document.createElement("div");
underlay.style.clear = "both";
body.appendChild(underlay);
}
}
// gmail performance fix
if (document.URL.indexOf("mail.google.com") > -1) {
var s = document.createElement("style");
s.innerHTML = ".iu { visibility: hidden }";
(document.getElementsByTagName("head")[0] || html).appendChild(s);
}
// disable fixed background
if (!fixedback && !disabled) {
body.style.backgroundAttachment = "scroll";
html.style.backgroundAttachment = "scroll";
}
}
/************************************************
* SCROLLING
************************************************/
var que = [];
var pending = false;
var lastScroll = +new Date;
/**
* Pushes scroll actions to the scrolling queue.
*/
function scrollArray(elem, left, top, delay) {
delay || (delay = 1000);
directionCheck(left, top);
if (acceleration) {
var now = +new Date;
var elapsed = now - lastScroll;
if (elapsed < accelDelta) {
var factor = (1 + (30 / elapsed)) / 2;
if (factor > 1) {
factor = Math.min(factor, accelMax);
left *= factor;
top *= factor;
}
}
lastScroll = +new Date;
}
// push a scroll command
que.push({
x: left,
y: top,
lastX: (left < 0) ? 0.99 : -0.99,
lastY: (top < 0) ? 0.99 : -0.99,
start: +new Date
});
// don't act if there's a pending queue
if (pending) {
return;
}
var scrollWindow = (elem === document.body);
var step = function() {
var now = +new Date;
var scrollX = 0;
var scrollY = 0;
for (var i = 0; i < que.length; i++) {
var item = que[i];
var elapsed = now - item.start;
var finished = (elapsed >= animtime);
// scroll position: [0, 1]
var position = (finished) ? 1 : elapsed / animtime;
// easing [optional]
if (pulseAlgorithm) {
position = pulse(position);
}
// only need the difference
var x = (item.x * position - item.lastX) >> 0;
var y = (item.y * position - item.lastY) >> 0;
// add this to the total scrolling
scrollX += x;
scrollY += y;
// update last values
item.lastX += x;
item.lastY += y;
// delete and step back if it's over
if (finished) {
que.splice(i, 1); i--;
}
}
// scroll left and top
if (scrollWindow) {
window.scrollBy(scrollX, scrollY)
}
else {
if (scrollX) elem.scrollLeft += scrollX;
if (scrollY) elem.scrollTop += scrollY;
}
// clean up if there's nothing left to do
if (!left && !top) {
que = [];
}
if (que.length) {
requestFrame(step, elem, (delay / framerate + 1));
} else {
pending = false;
}
}
// start a new queue of actions
requestFrame(step, elem, 0);
pending = true;
}
/***********************************************
* EVENTS
***********************************************/
/**
* Mouse wheel handler.
* @param {Object} event
*/
function wheel(event) {
if (!initdone) {
init();
}
var target = event.target;
var overflowing = overflowingAncestor(target);
// use default if there's no overflowing
// element or default action is prevented
if (!overflowing || event.defaultPrevented ||
isNodeName(activeElement, "embed") ||
(isNodeName(target, "embed") && /\.pdf/i.test(target.src))) {
return true;
}
var deltaX = event.wheelDeltaX || 0;
var deltaY = event.wheelDeltaY || 0;
// use wheelDelta if deltaX/Y is not available
if (!deltaX && !deltaY) {
deltaY = event.wheelDelta || 0;
}
// scale by step size
// delta is 120 most of the time
// synaptics seems to send 1 sometimes
if (Math.abs(deltaX) > 1.2) {
deltaX *= stepsize / 120;
}
if (Math.abs(deltaY) > 1.2) {
deltaY *= stepsize / 120;
}
scrollArray(overflowing, -deltaX, -deltaY);
event.preventDefault();
}
/**
* Keydown event handler.
* @param {Object} event
*/
function keydown(event) {
var target = event.target;
var modifier = event.ctrlKey || event.altKey || event.metaKey ||
(event.shiftKey && event.keyCode !== key.spacebar);
// do nothing if user is editing text
// or using a modifier key (except shift)
// or in a dropdown
if ( /input|textarea|select|embed/i.test(target.nodeName) ||
target.isContentEditable ||
event.defaultPrevented ||
modifier ) {
return true;
}
// spacebar should trigger button press
if (isNodeName(target, "button") &&
event.keyCode === key.spacebar) {
return true;
}
var shift, x = 0, y = 0;
var elem = overflowingAncestor(activeElement);
var clientHeight = elem.clientHeight;
if (elem == document.body) {
clientHeight = window.innerHeight;
}
switch (event.keyCode) {
case key.up:
y = -arrowscroll;
break;
case key.down:
y = arrowscroll;
break;
case key.spacebar: // (+ shift)
shift = event.shiftKey ? 1 : -1;
y = -shift * clientHeight * 0.9;
break;
case key.pageup:
y = -clientHeight * 0.9;
break;
case key.pagedown:
y = clientHeight * 0.9;
break;
case key.home:
y = -elem.scrollTop;
break;
case key.end:
var damt = elem.scrollHeight - elem.scrollTop - clientHeight;
y = (damt > 0) ? damt+10 : 0;
break;
case key.left:
x = -arrowscroll;
break;
case key.right:
x = arrowscroll;
break;
default:
return true; // a key we don't care about
}
scrollArray(elem, x, y);
event.preventDefault();
}
/**
* Mousedown event only for updating activeElement
*/
function mousedown(event) {
activeElement = event.target;
}
/***********************************************
* OVERFLOW
***********************************************/
var cache = {}; // cleared out every once in while
setInterval(function(){ cache = {}; }, 10 * 1000);
var uniqueID = (function() {
var i = 0;
return function (el) {
return el.uniqueID || (el.uniqueID = i++);
};
})();
function setCache(elems, overflowing) {
for (var i = elems.length; i--;)
cache[uniqueID(elems[i])] = overflowing;
return overflowing;
}
function overflowingAncestor(el) {
var elems = [];
var rootScrollHeight = root.scrollHeight;
do {
var cached = cache[uniqueID(el)];
if (cached) {
return setCache(elems, cached);
}
elems.push(el);
if (rootScrollHeight === el.scrollHeight) {
if (!frame || root.clientHeight + 10 < rootScrollHeight) {
return setCache(elems, document.body); // scrolling root in WebKit
}
} else if (el.clientHeight + 10 < el.scrollHeight) {
overflow = getComputedStyle(el, "").getPropertyValue("overflow-y");
if (overflow === "scroll" || overflow === "auto") {
return setCache(elems, el);
}
}
} while (el = el.parentNode);
}
/***********************************************
* HELPERS
***********************************************/
function | addEvent | identifier_name |
|
jquery.smoothscroll.js | ;
var activeElement;
var key = { left: 37, up: 38, right: 39, down: 40, spacebar: 32, pageup: 33, pagedown: 34, end: 35, home: 36 };
/**
* Sets up scrolls array, determines if frames are involved.
*/
function init() {
if (!document.body) return;
var body = document.body;
var html = document.documentElement;
var windowHeight = window.innerHeight;
var scrollHeight = body.scrollHeight;
// check compat mode for root element
root = (document.compatMode.indexOf('CSS') >= 0) ? html : body;
activeElement = body;
initdone = true;
// Checks if this script is running in a frame
if (top != self) {
frame = true;
}
/**
* This fixes a bug where the areas left and right to
* the content does not trigger the onmousewheel event
* on some pages. e.g.: html, body { height: 100% }
*/
else if (scrollHeight > windowHeight &&
(body.offsetHeight <= windowHeight ||
html.offsetHeight <= windowHeight)) {
// DOMChange (throttle): fix height
var pending = false;
var refresh = function() {
if (!pending && html.scrollHeight != document.height) {
pending = true; // add a new pending action
setTimeout(function(){
html.style.height = document.height + 'px';
pending = false;
}, 500); // act rarely to stay fast
}
};
html.style.height = '';
setTimeout(refresh, 10);
addEvent("DOMNodeInserted", refresh);
addEvent("DOMNodeRemoved", refresh);
// clearfix
if (root.offsetHeight <= windowHeight) {
var underlay = document.createElement("div");
underlay.style.clear = "both";
body.appendChild(underlay);
}
}
// gmail performance fix
if (document.URL.indexOf("mail.google.com") > -1) {
var s = document.createElement("style");
s.innerHTML = ".iu { visibility: hidden }";
(document.getElementsByTagName("head")[0] || html).appendChild(s);
}
// disable fixed background
if (!fixedback && !disabled) |
}
/************************************************
* SCROLLING
************************************************/
var que = [];
var pending = false;
var lastScroll = +new Date;
/**
* Pushes scroll actions to the scrolling queue.
*/
function scrollArray(elem, left, top, delay) {
delay || (delay = 1000);
directionCheck(left, top);
if (acceleration) {
var now = +new Date;
var elapsed = now - lastScroll;
if (elapsed < accelDelta) {
var factor = (1 + (30 / elapsed)) / 2;
if (factor > 1) {
factor = Math.min(factor, accelMax);
left *= factor;
top *= factor;
}
}
lastScroll = +new Date;
}
// push a scroll command
que.push({
x: left,
y: top,
lastX: (left < 0) ? 0.99 : -0.99,
lastY: (top < 0) ? 0.99 : -0.99,
start: +new Date
});
// don't act if there's a pending queue
if (pending) {
return;
}
var scrollWindow = (elem === document.body);
var step = function() {
var now = +new Date;
var scrollX = 0;
var scrollY = 0;
for (var i = 0; i < que.length; i++) {
var item = que[i];
var elapsed = now - item.start;
var finished = (elapsed >= animtime);
// scroll position: [0, 1]
var position = (finished) ? 1 : elapsed / animtime;
// easing [optional]
if (pulseAlgorithm) {
position = pulse(position);
}
// only need the difference
var x = (item.x * position - item.lastX) >> 0;
var y = (item.y * position - item.lastY) >> 0;
// add this to the total scrolling
scrollX += x;
scrollY += y;
// update last values
item.lastX += x;
item.lastY += y;
// delete and step back if it's over
if (finished) {
que.splice(i, 1); i--;
}
}
// scroll left and top
if (scrollWindow) {
window.scrollBy(scrollX, scrollY)
}
else {
if (scrollX) elem.scrollLeft += scrollX;
if (scrollY) elem.scrollTop += scrollY;
}
// clean up if there's nothing left to do
if (!left && !top) {
que = [];
}
if (que.length) {
requestFrame(step, elem, (delay / framerate + 1));
} else {
pending = false;
}
}
// start a new queue of actions
requestFrame(step, elem, 0);
pending = true;
}
/***********************************************
* EVENTS
***********************************************/
/**
* Mouse wheel handler.
* @param {Object} event
*/
function wheel(event) {
if (!initdone) {
init();
}
var target = event.target;
var overflowing = overflowingAncestor(target);
// use default if there's no overflowing
// element or default action is prevented
if (!overflowing || event.defaultPrevented ||
isNodeName(activeElement, "embed") ||
(isNodeName(target, "embed") && /\.pdf/i.test(target.src))) {
return true;
}
var deltaX = event.wheelDeltaX || 0;
var deltaY = event.wheelDeltaY || 0;
// use wheelDelta if deltaX/Y is not available
if (!deltaX && !deltaY) {
deltaY = event.wheelDelta || 0;
}
// scale by step size
// delta is 120 most of the time
// synaptics seems to send 1 sometimes
if (Math.abs(deltaX) > 1.2) {
deltaX *= stepsize / 120;
}
if (Math.abs(deltaY) > 1.2) {
deltaY *= stepsize / 120;
}
scrollArray(overflowing, -deltaX, -deltaY);
event.preventDefault();
}
/**
* Keydown event handler.
* @param {Object} event
*/
function keydown(event) {
var target = event.target;
var modifier = event.ctrlKey || event.altKey || event.metaKey ||
(event.shiftKey && event.keyCode !== key.spacebar);
// do nothing if user is editing text
// or using a modifier key (except shift)
// or in a dropdown
if ( /input|textarea|select|embed/i.test(target.nodeName) ||
target.isContentEditable ||
event.defaultPrevented ||
modifier ) {
return true;
}
// spacebar should trigger button press
if (isNodeName(target, "button") &&
event.keyCode === key.spacebar) {
return true;
}
var shift, x = 0, y = 0;
var elem = overflowingAncestor(activeElement);
var clientHeight = elem.clientHeight;
if (elem == document.body) {
clientHeight = window.innerHeight;
}
switch (event.keyCode) {
case key.up:
y = -arrowscroll;
break;
case key.down:
y = arrowscroll;
break;
case key.spacebar: // (+ shift)
shift = event.shiftKey ? 1 : -1;
y = -shift * clientHeight * 0.9;
break;
case key.pageup:
y = -clientHeight * 0.9;
break;
case key.pagedown:
y = clientHeight * 0.9;
break;
case key.home:
y = -elem.scrollTop;
break;
case key.end:
var damt = elem.scrollHeight - elem.scrollTop - clientHeight;
y = (damt > 0) ? damt+10 : 0;
break;
case key.left:
x = -arrowscroll;
break;
case key.right:
x = arrowscroll;
break;
default:
return true; // a key we don't care about
}
scrollArray(elem, x, y);
event.preventDefault();
}
/**
* Mousedown event only for updating activeElement
*/
function mousedown(event) {
activeElement = event.target;
}
/***********************************************
* OVERFLOW
***********************************************/
var cache = {}; // cleared out every once in while
setInterval(function(){ cache = {}; }, 10 * 1000);
var uniqueID = (function() {
var i = 0;
return function (el) {
return el.uniqueID || (el.uniqueID = i++);
};
})();
function setCache(elems, overflowing) {
for (var i = elems.length; i--;)
| {
body.style.backgroundAttachment = "scroll";
html.style.backgroundAttachment = "scroll";
} | conditional_block |
jquery.smoothscroll.js | 1) {
factor = Math.min(factor, accelMax);
left *= factor;
top *= factor;
}
}
lastScroll = +new Date;
}
// push a scroll command
que.push({
x: left,
y: top,
lastX: (left < 0) ? 0.99 : -0.99,
lastY: (top < 0) ? 0.99 : -0.99,
start: +new Date
});
// don't act if there's a pending queue
if (pending) {
return;
}
var scrollWindow = (elem === document.body);
var step = function() {
var now = +new Date;
var scrollX = 0;
var scrollY = 0;
for (var i = 0; i < que.length; i++) {
var item = que[i];
var elapsed = now - item.start;
var finished = (elapsed >= animtime);
// scroll position: [0, 1]
var position = (finished) ? 1 : elapsed / animtime;
// easing [optional]
if (pulseAlgorithm) {
position = pulse(position);
}
// only need the difference
var x = (item.x * position - item.lastX) >> 0;
var y = (item.y * position - item.lastY) >> 0;
// add this to the total scrolling
scrollX += x;
scrollY += y;
// update last values
item.lastX += x;
item.lastY += y;
// delete and step back if it's over
if (finished) {
que.splice(i, 1); i--;
}
}
// scroll left and top
if (scrollWindow) {
window.scrollBy(scrollX, scrollY)
}
else {
if (scrollX) elem.scrollLeft += scrollX;
if (scrollY) elem.scrollTop += scrollY;
}
// clean up if there's nothing left to do
if (!left && !top) {
que = [];
}
if (que.length) {
requestFrame(step, elem, (delay / framerate + 1));
} else {
pending = false;
}
}
// start a new queue of actions
requestFrame(step, elem, 0);
pending = true;
}
/***********************************************
* EVENTS
***********************************************/
/**
* Mouse wheel handler.
* @param {Object} event
*/
function wheel(event) {
if (!initdone) {
init();
}
var target = event.target;
var overflowing = overflowingAncestor(target);
// use default if there's no overflowing
// element or default action is prevented
if (!overflowing || event.defaultPrevented ||
isNodeName(activeElement, "embed") ||
(isNodeName(target, "embed") && /\.pdf/i.test(target.src))) {
return true;
}
var deltaX = event.wheelDeltaX || 0;
var deltaY = event.wheelDeltaY || 0;
// use wheelDelta if deltaX/Y is not available
if (!deltaX && !deltaY) {
deltaY = event.wheelDelta || 0;
}
// scale by step size
// delta is 120 most of the time
// synaptics seems to send 1 sometimes
if (Math.abs(deltaX) > 1.2) {
deltaX *= stepsize / 120;
}
if (Math.abs(deltaY) > 1.2) {
deltaY *= stepsize / 120;
}
scrollArray(overflowing, -deltaX, -deltaY);
event.preventDefault();
}
/**
* Keydown event handler.
* @param {Object} event
*/
function keydown(event) {
var target = event.target;
var modifier = event.ctrlKey || event.altKey || event.metaKey ||
(event.shiftKey && event.keyCode !== key.spacebar);
// do nothing if user is editing text
// or using a modifier key (except shift)
// or in a dropdown
if ( /input|textarea|select|embed/i.test(target.nodeName) ||
target.isContentEditable ||
event.defaultPrevented ||
modifier ) {
return true;
}
// spacebar should trigger button press
if (isNodeName(target, "button") &&
event.keyCode === key.spacebar) {
return true;
}
var shift, x = 0, y = 0;
var elem = overflowingAncestor(activeElement);
var clientHeight = elem.clientHeight;
if (elem == document.body) {
clientHeight = window.innerHeight;
}
switch (event.keyCode) {
case key.up:
y = -arrowscroll;
break;
case key.down:
y = arrowscroll;
break;
case key.spacebar: // (+ shift)
shift = event.shiftKey ? 1 : -1;
y = -shift * clientHeight * 0.9;
break;
case key.pageup:
y = -clientHeight * 0.9;
break;
case key.pagedown:
y = clientHeight * 0.9;
break;
case key.home:
y = -elem.scrollTop;
break;
case key.end:
var damt = elem.scrollHeight - elem.scrollTop - clientHeight;
y = (damt > 0) ? damt+10 : 0;
break;
case key.left:
x = -arrowscroll;
break;
case key.right:
x = arrowscroll;
break;
default:
return true; // a key we don't care about
}
scrollArray(elem, x, y);
event.preventDefault();
}
/**
* Mousedown event only for updating activeElement
*/
function mousedown(event) {
activeElement = event.target;
}
/***********************************************
* OVERFLOW
***********************************************/
var cache = {}; // cleared out every once in while
setInterval(function(){ cache = {}; }, 10 * 1000);
var uniqueID = (function() {
var i = 0;
return function (el) {
return el.uniqueID || (el.uniqueID = i++);
};
})();
function setCache(elems, overflowing) {
for (var i = elems.length; i--;)
cache[uniqueID(elems[i])] = overflowing;
return overflowing;
}
function overflowingAncestor(el) {
var elems = [];
var rootScrollHeight = root.scrollHeight;
do {
var cached = cache[uniqueID(el)];
if (cached) {
return setCache(elems, cached);
}
elems.push(el);
if (rootScrollHeight === el.scrollHeight) {
if (!frame || root.clientHeight + 10 < rootScrollHeight) {
return setCache(elems, document.body); // scrolling root in WebKit
}
} else if (el.clientHeight + 10 < el.scrollHeight) {
overflow = getComputedStyle(el, "").getPropertyValue("overflow-y");
if (overflow === "scroll" || overflow === "auto") {
return setCache(elems, el);
}
}
} while (el = el.parentNode);
}
/***********************************************
* HELPERS
***********************************************/
function addEvent(type, fn, bubble) {
window.addEventListener(type, fn, (bubble||false));
}
function removeEvent(type, fn, bubble) {
window.removeEventListener(type, fn, (bubble||false));
}
function isNodeName(el, tag) {
return (el.nodeName||"").toLowerCase() === tag.toLowerCase();
}
function directionCheck(x, y) {
x = (x > 0) ? 1 : -1;
y = (y > 0) ? 1 : -1;
if (direction.x !== x || direction.y !== y) {
direction.x = x;
direction.y = y;
que = [];
lastScroll = 0;
}
}
var requestFrame = (function(){
return window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
function(callback, element, delay){
window.setTimeout(callback, delay || (1000/60));
};
})();
/***********************************************
* PULSE
***********************************************/
/**
* Viscous fluid with a pulse for part and decay for the rest.
* - Applies a fixed force over an interval (a damped acceleration), and
* - Lets the exponential bleed away the velocity over a longer interval
* - Michael Herf, http://stereopsis.com/stopping/
*/
function pulse_(x) {
var val, start, expx;
// test
x = x * pulseScale;
if (x < 1) { // acceleartion
val = x - (1 - Math.exp(-x));
} else { // tail
// the previous animation ended here:
start = Math.exp(-1);
// simple viscous drag
x -= 1;
expx = 1 - Math.exp(-x);
val = start + (expx * (1 - start));
}
return val * pulseNormalize;
}
function pulse(x) | {
if (x >= 1) return 1;
if (x <= 0) return 0;
if (pulseNormalize == 1) {
pulseNormalize /= pulse_(1);
}
return pulse_(x);
} | identifier_body |
|
bindings.js | }
// setQuiltIDs deterministically sets the id field of objects based on
// their attributes. The _refID field is required to differentiate between multiple
// references to the same object, and multiple instantiations with the exact
// same attributes.
function setQuiltIDs(objs) {
// The refIDs for each identical instance.
var refIDs = {};
objs.forEach(function(obj) {
var k = key(obj);
if (!refIDs[k]) {
refIDs[k] = [];
}
refIDs[k].push(obj._refID);
});
// If there are multiple references to the same object, there will be duplicate
// refIDs.
Object.keys(refIDs).forEach(function(k) {
refIDs[k] = _.uniq(refIDs[k]).sort();
});
objs.forEach(function(obj) {
var k = key(obj);
obj.id = hash(k + refIDs[k].indexOf(obj._refID));
});
}
// Convert the deployment to the QRI deployment format.
Deployment.prototype.toQuiltRepresentation = function() {
this.vet();
setQuiltIDs(this.machines);
var containers = [];
this.services.forEach(function(serv) {
serv.containers.forEach(function(c) {
containers.push(c);
});
});
setQuiltIDs(containers);
// Map from container ID to container.
var containerMap = {};
var services = [];
var connections = [];
var placements = [];
// For each service, convert the associated connections and placement rules.
// Also, aggregate all containers referenced by services.
this.services.forEach(function(service) {
connections = connections.concat(service.getQuiltConnections());
placements = placements.concat(service.getQuiltPlacements());
// Collect the containers IDs, and add them to the container map.
var ids = [];
service.containers.forEach(function(container) {
ids.push(container.id);
containerMap[container.id] = container;
});
services.push({
name: service.name,
ids: ids,
annotations: service.annotations
});
});
var containers = [];
Object.keys(containerMap).forEach(function(cid) {
containers.push(containerMap[cid]);
});
return {
machines: this.machines,
labels: services,
containers: containers,
connections: connections,
placements: placements,
invariants: this.invariants,
namespace: this.namespace,
adminACL: this.adminACL,
maxPrice: this.maxPrice
};
};
// Check if all referenced services in connections and placements are really deployed.
Deployment.prototype.vet = function() {
var labelMap = {};
this.services.forEach(function(service) {
labelMap[service.name] = true;
});
this.services.forEach(function(service) {
service.connections.forEach(function(conn) {
var to = conn.to.name;
if (!labelMap[to]) {
throw service.name + " has a connection to undeployed service: " + to;
}
});
var hasFloatingIp = false;
service.placements.forEach(function(plcm) {
if (plcm.floatingIp) {
hasFloatingIp = true;
}
var otherLabel = plcm.otherLabel;
if (otherLabel !== undefined && !labelMap[otherLabel]) {
throw service.name + " has a placement in terms of an " +
"undeployed service: " + otherLabel;
}
});
if (hasFloatingIp && service.incomingPublic.length
&& service.containers.length > 1) {
throw service.name + " has a floating IP and multiple containers. " +
"This is not yet supported."
}
});
};
// deploy adds an object, or list of objects, to the deployment.
// Deployable objects must implement the deploy(deployment) interface.
Deployment.prototype.deploy = function(toDeployList) {
if (toDeployList.constructor !== Array) {
toDeployList = [toDeployList];
}
var that = this;
toDeployList.forEach(function(toDeploy) {
if (!toDeploy.deploy) {
throw "only objects that implement \"deploy(deployment)\" can be deployed";
}
toDeploy.deploy(that);
});
};
Deployment.prototype.assert = function(rule, desired) {
this.invariants.push(new Assertion(rule, desired));
};
function Service(name, containers) {
this.name = uniqueLabelName(name);
this.containers = containers;
this.annotations = [];
this.placements = [];
this.connections = [];
this.outgoingPublic = [];
this.incomingPublic = [];
}
// Get the Quilt hostname that represents the entire service.
Service.prototype.hostname = function() {
return this.name + ".q";
};
// Get a list of Quilt hostnames that address the containers within the service.
Service.prototype.children = function() {
var i;
var res = [];
for (i = 1; i < this.containers.length + 1; i++) {
res.push(i + "." + this.name + ".q");
}
return res;
};
Service.prototype.annotate = function(annotation) {
this.annotations.push(annotation);
};
Service.prototype.canReach = function(target) {
if (target === publicInternet) {
return reachable(this.name, publicInternetLabel);
}
return reachable(this.name, target.name);
};
Service.prototype.canReachACL = function(target) {
return reachableACL(this.name, target.name);
};
Service.prototype.between = function(src, dst) {
return between(src.name, this.name, dst.name);
};
Service.prototype.neighborOf = function(target) {
return neighbor(this.name, target.name);
};
Service.prototype.deploy = function(deployment) {
deployment.services.push(this);
};
Service.prototype.connect = function(range, to) {
range = boxRange(range);
if (to === publicInternet) {
return this.connectToPublic(range);
}
this.connections.push(new Connection(range, to));
};
// publicInternet is an object that looks like another service that can be
// connected to or from. However, it is actually just syntactic sugar to hide
// the connectToPublic and connectFromPublic functions.
var publicInternet = {
connect: function(range, to) {
to.connectFromPublic(range);
},
canReach: function(to) {
return reachable(publicInternetLabel, to.name);
}
};
// Allow outbound traffic from the service to public internet.
Service.prototype.connectToPublic = function(range) {
range = boxRange(range);
if (range.min != range.max) {
throw "public internet cannot connect on port ranges";
}
this.outgoingPublic.push(range);
};
// Allow inbound traffic from public internet to the service.
Service.prototype.connectFromPublic = function(range) {
range = boxRange(range);
if (range.min != range.max) {
throw "public internet cannot connect on port ranges";
}
this.incomingPublic.push(range);
};
Service.prototype.place = function(rule) {
this.placements.push(rule);
};
Service.prototype.getQuiltConnections = function() {
var connections = [];
var that = this;
this.connections.forEach(function(conn) {
connections.push({
from: that.name,
to: conn.to.name,
minPort: conn.minPort,
maxPort: conn.maxPort
});
});
this.outgoingPublic.forEach(function(rng) {
connections.push({
from: that.name,
to: publicInternetLabel,
minPort: rng.min,
maxPort: rng.max
});
});
this.incomingPublic.forEach(function(rng) {
connections.push({
from: publicInternetLabel,
to: that.name,
minPort: rng.min,
maxPort: rng.max
});
});
return connections;
};
Service.prototype.getQuiltPlacements = function() {
var placements = [];
var that = this;
this.placements.forEach(function(placement) {
placements.push({
targetLabel: that.name,
exclusive: placement.exclusive,
otherLabel: placement.otherLabel || "",
provider: placement.provider || "",
size: placement.size || "",
region: placement.region || "",
floatingIp: placement.floatingIp || ""
});
});
return placements;
};
var labelNameCount = {};
function uniqueLabelName(name) {
if (!(name in labelNameCount)) {
labelNameCount[name] = 0;
}
var count = ++labelNameCount[name];
if (count == 1) {
return name;
}
return name + labelNameCount[name];
}
// Box raw integers into range.
function boxRange(x) {
if (x === undefined) {
return new Range(0, 0);
}
if (typeof x === "number") {
x = new Range(x, x);
}
return x;
}
function Machine(optionalArgs) {
this._refID = _.uniqueId();
this.provider = optionalArgs.provider || "";
this.role = optionalArgs.role || "";
this.region = optionalArgs.region || "";
this.size = optionalArgs.size || "";
this.floatingIp = optionalArgs.floatingIp || "";
this.diskSize = optionalArgs.diskSize || 0;
this.sshKeys = optionalArgs.sshKeys || [];
this.cpu = boxRange(optionalArgs.cpu);
this.ram = boxRange(optionalArgs.ram);
}
Machine.prototype.deploy = function(deployment) {
deployment.machines.push(this);
};
// Create a new machine with the same attributes.
Machine.prototype.clone = function() {
// _.clone only creates a shallow copy, so we must clone sshKeys ourselves.
var keyClone = _.clone(this.sshKeys);
| return JSON.stringify(keyObj); | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.