file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
gotoSymbolQuickAccess.ts | false;
return;
}
// Reveal
editor.revealRangeInCenter(item.range.selection, ScrollType.Smooth);
// Decorate
this.addDecorations(editor, item.range.decoration);
}
}));
return disposables;
}
protected async doGetSymbolPicks(symbolsPromise: Promise<DocumentSymbol[]>, query: IPreparedQuery, options: { extraContainerLabel?: string } | undefined, token: CancellationToken): Promise<Array<IGotoSymbolQuickPickItem | IQuickPickSeparator>> {
const symbols = await symbolsPromise;
if (token.isCancellationRequested) {
return [];
}
const filterBySymbolKind = query.original.indexOf(AbstractGotoSymbolQuickAccessProvider.SCOPE_PREFIX) === 0;
const filterPos = filterBySymbolKind ? 1 : 0;
// Split between symbol and container query
let symbolQuery: IPreparedQuery;
let containerQuery: IPreparedQuery | undefined;
if (query.values && query.values.length > 1) {
symbolQuery = pieceToQuery(query.values[0]); // symbol: only match on first part
containerQuery = pieceToQuery(query.values.slice(1)); // container: match on all but first parts
} else {
symbolQuery = query;
}
// Convert to symbol picks and apply filtering
const filteredSymbolPicks: IGotoSymbolQuickPickItem[] = [];
for (let index = 0; index < symbols.length; index++) {
const symbol = symbols[index];
const symbolLabel = trim(symbol.name);
const symbolLabelWithIcon = `$(symbol-${SymbolKinds.toString(symbol.kind) || 'property'}) ${symbolLabel}`;
const symbolLabelIconOffset = symbolLabelWithIcon.length - symbolLabel.length;
let containerLabel = symbol.containerName;
if (options?.extraContainerLabel) {
if (containerLabel) {
containerLabel = `${options.extraContainerLabel} • ${containerLabel}`;
} else {
containerLabel = options.extraContainerLabel;
}
}
let symbolScore: number | undefined = undefined;
let symbolMatches: IMatch[] | undefined = undefined;
let containerScore: number | undefined = undefined;
let containerMatches: IMatch[] | undefined = undefined;
if (query.original.length > filterPos) {
// First: try to score on the entire query, it is possible that
// the symbol matches perfectly (e.g. searching for "change log"
// can be a match on a markdown symbol "change log"). In that
// case we want to skip the container query altogether.
let skipContainerQuery = false;
if (symbolQuery !== query) {
[symbolScore, symbolMatches] = scoreFuzzy2(symbolLabelWithIcon, { ...query, values: undefined /* disable multi-query support */ }, filterPos, symbolLabelIconOffset);
if (typeof symbolScore === 'number') {
skipContainerQuery = true; // since we consumed the query, skip any container matching
}
}
// Otherwise: score on the symbol query and match on the container later
if (typeof symbolScore !== 'number') {
[symbolScore, symbolMatches] = scoreFuzzy2(symbolLabelWithIcon, symbolQuery, filterPos, symbolLabelIconOffset);
if (typeof symbolScore !== 'number') {
continue;
}
}
// Score by container if specified
if (!skipContainerQuery && containerQuery) {
if (containerLabel && containerQuery.original.length > 0) {
[containerScore, containerMatches] = scoreFuzzy2(containerLabel, containerQuery);
}
if (typeof containerScore !== 'number') {
continue;
}
if (typeof symbolScore === 'number') {
symbolScore += containerScore; // boost symbolScore by containerScore
}
}
}
const deprecated = symbol.tags && symbol.tags.indexOf(SymbolTag.Deprecated) >= 0;
filteredSymbolPicks.push({
index,
kind: symbol.kind,
score: symbolScore,
label: symbolLabelWithIcon,
ariaLabel: symbolLabel,
description: containerLabel,
highlights: deprecated ? undefined : {
label: symbolMatches,
description: containerMatches
},
range: {
selection: Range.collapseToStart(symbol.selectionRange),
decoration: symbol.range
},
strikethrough: deprecated,
buttons: (() => {
const openSideBySideDirection = this.options?.openSideBySideDirection();
if (!openSideBySideDirection) {
return undefined;
}
return [
{
iconClass: openSideBySideDirection === 'right' ? Codicon.splitHorizontal.classNames : Codicon.splitVertical.classNames,
tooltip: openSideBySideDirection === 'right' ? localize('openToSide', "Open to the Side") : localize('openToBottom', "Open to the Bottom")
}
];
})()
});
}
// Sort by score
const sortedFilteredSymbolPicks = filteredSymbolPicks.sort((symbolA, symbolB) => filterBySymbolKind ?
this.compareByKindAndScore(symbolA, symbolB) :
this.compareByScore(symbolA, symbolB)
);
// Add separator for types
// - @ only total number of symbols
// - @: grouped by symbol kind
let symbolPicks: Array<IGotoSymbolQuickPickItem | IQuickPickSeparator> = [];
if (filterBySymbolKind) {
let lastSymbolKind: SymbolKind | undefined = undefined;
let lastSeparator: IQuickPickSeparator | undefined = undefined;
let lastSymbolKindCounter = 0;
function updateLastSeparatorLabel(): void {
if (lastSeparator && typeof lastSymbolKind === 'number' && lastSymbolKindCounter > 0) {
lastSeparator.label = format(NLS_SYMBOL_KIND_CACHE[lastSymbolKind] || FALLBACK_NLS_SYMBOL_KIND, lastSymbolKindCounter);
}
}
for (const symbolPick of sortedFilteredSymbolPicks) {
// Found new kind
if (lastSymbolKind !== symbolPick.kind) {
// Update last separator with number of symbols we found for kind
updateLastSeparatorLabel();
lastSymbolKind = symbolPick.kind;
lastSymbolKindCounter = 1;
// Add new separator for new kind
lastSeparator = { type: 'separator' };
symbolPicks.push(lastSeparator);
}
// Existing kind, keep counting
else {
lastSymbolKindCounter++;
}
// Add to final result
symbolPicks.push(symbolPick);
}
// Update last separator with number of symbols we found for kind
updateLastSeparatorLabel();
} else if (sortedFilteredSymbolPicks.length > 0) {
symbolPicks = [
{ label: localize('symbols', "symbols ({0})", filteredSymbolPicks.length), type: 'separator' },
...sortedFilteredSymbolPicks
];
}
return symbolPicks;
}
private compareByScore(symbolA: IGotoSymbolQuickPickItem, symbolB: IGotoSymbolQuickPickItem): number {
if (typeof symbolA.score !== 'number' && typeof symbolB.score === 'number') {
return 1;
} else if (typeof symbolA.score === 'number' && typeof symbolB.score !== 'number') {
return -1;
}
if (typeof symbolA.score === 'number' && typeof symbolB.score === 'number') {
if (symbolA.score > symbolB.score) {
return -1;
} else if (symbolA.score < symbolB.score) {
return 1;
}
}
if (symbolA.index < symbolB.index) {
return -1;
} else if (symbolA.index > symbolB.index) {
return 1;
}
return 0;
}
private compareByKindAndScore(symbolA: IGotoSymbolQuickPickItem, symbolB: IGotoSymbolQuickPickItem): number {
const kindA = NLS_SYMBOL_KIND_CACHE[symbolA.kind] || FALLBACK_NLS_SYMBOL_KIND;
const kindB = NLS_SYMBOL_KIND_CACHE[symbolB.kind] || FALLBACK_NLS_SYMBOL_KIND;
// Sort by type first if scoped search
const result = kindA.localeCompare(kindB);
if (result === 0) {
return this.compareByScore(symbolA, symbolB);
}
return result;
}
protected async getDocumentSymbols(document: ITextModel, flatten: boolean, token: CancellationToken): Promise<DocumentSymbol[]> {
const model = await OutlineModel.create(document, token);
if (token.isCancellationRequested) {
return [];
}
const roots: DocumentSymbol[] = [];
for (const child of model.children.values()) {
if (child instanceof OutlineElement) {
roots.push(child.symbol);
} else {
roots.push(...Iterable.map(child.children.values(), child => child.symbol));
}
}
let flatEntries: DocumentSymbol[] = [];
if (flatten) {
| this.flattenDocumentSymbols(flatEntries, roots, '');
} e | conditional_block |
|
sip.rs | (input) {
Ok((_, request)) => {
let mut tx = self.new_tx();
tx.request = Some(request);
if let Ok((_, req_line)) = sip_take_line(input) {
tx.request_line = req_line;
}
self.transactions.push(tx);
return true;
}
Err(nom::Err::Incomplete(_)) => {
self.set_event(SIPEvent::IncompleteData);
return false;
}
Err(_) => {
self.set_event(SIPEvent::InvalidData);
return false;
}
}
}
fn parse_response(&mut self, input: &[u8]) -> bool {
match sip_parse_response(input) {
Ok((_, response)) => {
let mut tx = self.new_tx();
tx.response = Some(response);
if let Ok((_, resp_line)) = sip_take_line(input) {
tx.response_line = resp_line;
}
self.transactions.push(tx);
return true;
}
Err(nom::Err::Incomplete(_)) => {
self.set_event(SIPEvent::IncompleteData);
return false;
}
Err(_) => {
self.set_event(SIPEvent::InvalidData);
return false;
}
}
}
}
impl SIPTransaction {
pub fn new(id: u64) -> SIPTransaction {
SIPTransaction {
id: id,
de_state: None,
request: None,
response: None,
request_line: None,
response_line: None,
events: std::ptr::null_mut(),
tx_data: applayer::AppLayerTxData::new(),
}
}
}
impl Drop for SIPTransaction {
fn drop(&mut self) {
if self.events != std::ptr::null_mut() {
core::sc_app_layer_decoder_events_free_events(&mut self.events);
}
if let Some(state) = self.de_state {
sc_detect_engine_state_free(state);
}
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_new(_orig_state: *mut std::os::raw::c_void, _orig_proto: AppProto) -> *mut std::os::raw::c_void {
let state = SIPState::new();
let boxed = Box::new(state);
return unsafe { std::mem::transmute(boxed) };
}
#[no_mangle]
pub extern "C" fn rs_sip_state_free(state: *mut std::os::raw::c_void) {
let mut state: Box<SIPState> = unsafe { std::mem::transmute(state) };
state.free();
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx(
state: *mut std::os::raw::c_void,
tx_id: u64,
) -> *mut std::os::raw::c_void {
let state = cast_pointer!(state, SIPState);
match state.get_tx_by_id(tx_id) {
Some(tx) => unsafe { std::mem::transmute(tx) },
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx_count(state: *mut std::os::raw::c_void) -> u64 {
let state = cast_pointer!(state, SIPState);
state.tx_id
}
#[no_mangle]
pub extern "C" fn rs_sip_state_tx_free(state: *mut std::os::raw::c_void, tx_id: u64) {
let state = cast_pointer!(state, SIPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_sip_state_progress_completion_status(_direction: u8) -> std::os::raw::c_int {
return 1;
}
#[no_mangle]
pub extern "C" fn rs_sip_tx_get_alstate_progress(
_tx: *mut std::os::raw::c_void,
_direction: u8,
) -> std::os::raw::c_int {
1
}
#[no_mangle]
pub extern "C" fn rs_sip_state_set_tx_detect_state(
tx: *mut std::os::raw::c_void,
de_state: &mut core::DetectEngineState,
) -> std::os::raw::c_int {
let tx = cast_pointer!(tx, SIPTransaction);
tx.de_state = Some(de_state);
0
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx_detect_state(
tx: *mut std::os::raw::c_void,
) -> *mut core::DetectEngineState {
let tx = cast_pointer!(tx, SIPTransaction);
match tx.de_state {
Some(ds) => ds,
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_events(
tx: *mut std::os::raw::c_void,
) -> *mut core::AppLayerDecoderEvents {
let tx = cast_pointer!(tx, SIPTransaction);
return tx.events;
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_event_info(
event_name: *const std::os::raw::c_char,
event_id: *mut std::os::raw::c_int,
event_type: *mut core::AppLayerEventType,
) -> std::os::raw::c_int {
if event_name == std::ptr::null() {
return -1;
}
let c_event_name: &CStr = unsafe { CStr::from_ptr(event_name) };
let event = match c_event_name.to_str() {
Ok(s) => {
match s {
"incomplete_data" => SIPEvent::IncompleteData as i32,
"invalid_data" => SIPEvent::InvalidData as i32,
_ => -1, // unknown event
}
}
Err(_) => -1, // UTF-8 conversion failed
};
unsafe {
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
*event_id = event as std::os::raw::c_int;
};
0
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_event_info_by_id(
event_id: std::os::raw::c_int,
event_name: *mut *const std::os::raw::c_char,
event_type: *mut core::AppLayerEventType,
) -> i8 {
if let Some(e) = SIPEvent::from_i32(event_id as i32) {
let estr = match e {
SIPEvent::IncompleteData => "incomplete_data\0",
SIPEvent::InvalidData => "invalid_data\0",
};
unsafe {
*event_name = estr.as_ptr() as *const std::os::raw::c_char;
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
};
0
} else {
-1
}
}
static mut ALPROTO_SIP: AppProto = ALPROTO_UNKNOWN;
#[no_mangle]
pub extern "C" fn rs_sip_probing_parser_ts(
_flow: *const Flow,
_direction: u8,
input: *const u8,
input_len: u32,
_rdir: *mut u8,
) -> AppProto {
let buf = build_slice!(input, input_len as usize);
if sip_parse_request(buf).is_ok() {
return unsafe { ALPROTO_SIP };
}
return ALPROTO_UNKNOWN;
}
#[no_mangle]
pub extern "C" fn rs_sip_probing_parser_tc(
_flow: *const Flow,
_direction: u8,
input: *const u8,
input_len: u32,
_rdir: *mut u8,
) -> AppProto {
let buf = build_slice!(input, input_len as usize);
if sip_parse_response(buf).is_ok() {
return unsafe { ALPROTO_SIP };
}
return ALPROTO_UNKNOWN;
}
#[no_mangle]
pub extern "C" fn rs_sip_parse_request(
_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
input: *const u8,
input_len: u32,
_data: *const std::os::raw::c_void,
_flags: u8,
) -> AppLayerResult {
let buf = build_slice!(input, input_len as usize);
let state = cast_pointer!(state, SIPState);
state.parse_request(buf).into()
}
#[no_mangle]
pub extern "C" fn rs_sip_parse_response(
_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
input: *const u8,
input_len: u32,
_data: *const std::os::raw::c_void,
_flags: u8,
) -> AppLayerResult {
let buf = build_slice!(input, input_len as usize);
let state = cast_pointer!(state, SIPState);
state.parse_response(buf).into()
} |
export_tx_data_get!(rs_sip_get_tx_data, SIPTransaction);
const PARSER_NAME: &'static [u8] = b"sip\0"; | random_line_split |
|
sip.rs | u64,
pub request: Option<Request>,
pub response: Option<Response>,
pub request_line: Option<String>,
pub response_line: Option<String>,
de_state: Option<*mut core::DetectEngineState>,
events: *mut core::AppLayerDecoderEvents,
tx_data: applayer::AppLayerTxData,
}
impl SIPState {
pub fn new() -> SIPState {
SIPState {
transactions: Vec::new(),
tx_id: 0,
}
}
pub fn free(&mut self) {
self.transactions.clear();
}
fn new_tx(&mut self) -> SIPTransaction {
self.tx_id += 1;
SIPTransaction::new(self.tx_id)
}
fn get_tx_by_id(&mut self, tx_id: u64) -> Option<&SIPTransaction> {
self.transactions.iter().find(|&tx| tx.id == tx_id + 1)
}
fn free_tx(&mut self, tx_id: u64) {
let tx = self
.transactions
.iter()
.position(|ref tx| tx.id == tx_id + 1);
debug_assert!(tx != None);
if let Some(idx) = tx {
let _ = self.transactions.remove(idx);
}
}
fn set_event(&mut self, event: SIPEvent) {
if let Some(tx) = self.transactions.last_mut() {
let ev = event as u8;
core::sc_app_layer_decoder_events_set_event_raw(&mut tx.events, ev);
}
}
fn parse_request(&mut self, input: &[u8]) -> bool {
match sip_parse_request(input) {
Ok((_, request)) => {
let mut tx = self.new_tx();
tx.request = Some(request);
if let Ok((_, req_line)) = sip_take_line(input) {
tx.request_line = req_line;
}
self.transactions.push(tx);
return true;
}
Err(nom::Err::Incomplete(_)) => {
self.set_event(SIPEvent::IncompleteData);
return false;
}
Err(_) => {
self.set_event(SIPEvent::InvalidData);
return false;
}
}
}
fn parse_response(&mut self, input: &[u8]) -> bool {
match sip_parse_response(input) {
Ok((_, response)) => {
let mut tx = self.new_tx();
tx.response = Some(response);
if let Ok((_, resp_line)) = sip_take_line(input) {
tx.response_line = resp_line;
}
self.transactions.push(tx);
return true;
}
Err(nom::Err::Incomplete(_)) => {
self.set_event(SIPEvent::IncompleteData);
return false;
}
Err(_) => {
self.set_event(SIPEvent::InvalidData);
return false;
}
}
}
}
impl SIPTransaction {
pub fn | (id: u64) -> SIPTransaction {
SIPTransaction {
id: id,
de_state: None,
request: None,
response: None,
request_line: None,
response_line: None,
events: std::ptr::null_mut(),
tx_data: applayer::AppLayerTxData::new(),
}
}
}
impl Drop for SIPTransaction {
fn drop(&mut self) {
if self.events != std::ptr::null_mut() {
core::sc_app_layer_decoder_events_free_events(&mut self.events);
}
if let Some(state) = self.de_state {
sc_detect_engine_state_free(state);
}
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_new(_orig_state: *mut std::os::raw::c_void, _orig_proto: AppProto) -> *mut std::os::raw::c_void {
let state = SIPState::new();
let boxed = Box::new(state);
return unsafe { std::mem::transmute(boxed) };
}
#[no_mangle]
pub extern "C" fn rs_sip_state_free(state: *mut std::os::raw::c_void) {
let mut state: Box<SIPState> = unsafe { std::mem::transmute(state) };
state.free();
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx(
state: *mut std::os::raw::c_void,
tx_id: u64,
) -> *mut std::os::raw::c_void {
let state = cast_pointer!(state, SIPState);
match state.get_tx_by_id(tx_id) {
Some(tx) => unsafe { std::mem::transmute(tx) },
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx_count(state: *mut std::os::raw::c_void) -> u64 {
let state = cast_pointer!(state, SIPState);
state.tx_id
}
#[no_mangle]
pub extern "C" fn rs_sip_state_tx_free(state: *mut std::os::raw::c_void, tx_id: u64) {
let state = cast_pointer!(state, SIPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_sip_state_progress_completion_status(_direction: u8) -> std::os::raw::c_int {
return 1;
}
#[no_mangle]
pub extern "C" fn rs_sip_tx_get_alstate_progress(
_tx: *mut std::os::raw::c_void,
_direction: u8,
) -> std::os::raw::c_int {
1
}
#[no_mangle]
pub extern "C" fn rs_sip_state_set_tx_detect_state(
tx: *mut std::os::raw::c_void,
de_state: &mut core::DetectEngineState,
) -> std::os::raw::c_int {
let tx = cast_pointer!(tx, SIPTransaction);
tx.de_state = Some(de_state);
0
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx_detect_state(
tx: *mut std::os::raw::c_void,
) -> *mut core::DetectEngineState {
let tx = cast_pointer!(tx, SIPTransaction);
match tx.de_state {
Some(ds) => ds,
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_events(
tx: *mut std::os::raw::c_void,
) -> *mut core::AppLayerDecoderEvents {
let tx = cast_pointer!(tx, SIPTransaction);
return tx.events;
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_event_info(
event_name: *const std::os::raw::c_char,
event_id: *mut std::os::raw::c_int,
event_type: *mut core::AppLayerEventType,
) -> std::os::raw::c_int {
if event_name == std::ptr::null() {
return -1;
}
let c_event_name: &CStr = unsafe { CStr::from_ptr(event_name) };
let event = match c_event_name.to_str() {
Ok(s) => {
match s {
"incomplete_data" => SIPEvent::IncompleteData as i32,
"invalid_data" => SIPEvent::InvalidData as i32,
_ => -1, // unknown event
}
}
Err(_) => -1, // UTF-8 conversion failed
};
unsafe {
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
*event_id = event as std::os::raw::c_int;
};
0
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_event_info_by_id(
event_id: std::os::raw::c_int,
event_name: *mut *const std::os::raw::c_char,
event_type: *mut core::AppLayerEventType,
) -> i8 {
if let Some(e) = SIPEvent::from_i32(event_id as i32) {
let estr = match e {
SIPEvent::IncompleteData => "incomplete_data\0",
SIPEvent::InvalidData => "invalid_data\0",
};
unsafe {
*event_name = estr.as_ptr() as *const std::os::raw::c_char;
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
};
0
} else {
-1
}
}
static mut ALPROTO_SIP: AppProto = ALPROTO_UNKNOWN;
#[no_mangle]
pub extern "C" fn rs_sip_probing_parser_ts(
_flow: *const Flow,
_direction: u8,
input: *const u8,
input_len: u32,
_rdir: *mut u8,
) -> AppProto {
let buf = build_slice!(input, input_len as usize);
if sip_parse_request(buf).is_ok() {
return unsafe { ALPROTO_SIP };
}
return ALPROTO_UNKNOWN;
}
#[no_mangle]
pub extern "C" fn rs_sip_probing_parser_tc(
_flow: *const Flow,
_direction: u8,
input: *const u8,
input_len: u32,
_rdir: * | new | identifier_name |
sip.rs | u64,
pub request: Option<Request>,
pub response: Option<Response>,
pub request_line: Option<String>,
pub response_line: Option<String>,
de_state: Option<*mut core::DetectEngineState>,
events: *mut core::AppLayerDecoderEvents,
tx_data: applayer::AppLayerTxData,
}
impl SIPState {
pub fn new() -> SIPState {
SIPState {
transactions: Vec::new(),
tx_id: 0,
}
}
pub fn free(&mut self) |
fn new_tx(&mut self) -> SIPTransaction {
self.tx_id += 1;
SIPTransaction::new(self.tx_id)
}
fn get_tx_by_id(&mut self, tx_id: u64) -> Option<&SIPTransaction> {
self.transactions.iter().find(|&tx| tx.id == tx_id + 1)
}
fn free_tx(&mut self, tx_id: u64) {
let tx = self
.transactions
.iter()
.position(|ref tx| tx.id == tx_id + 1);
debug_assert!(tx != None);
if let Some(idx) = tx {
let _ = self.transactions.remove(idx);
}
}
fn set_event(&mut self, event: SIPEvent) {
if let Some(tx) = self.transactions.last_mut() {
let ev = event as u8;
core::sc_app_layer_decoder_events_set_event_raw(&mut tx.events, ev);
}
}
fn parse_request(&mut self, input: &[u8]) -> bool {
match sip_parse_request(input) {
Ok((_, request)) => {
let mut tx = self.new_tx();
tx.request = Some(request);
if let Ok((_, req_line)) = sip_take_line(input) {
tx.request_line = req_line;
}
self.transactions.push(tx);
return true;
}
Err(nom::Err::Incomplete(_)) => {
self.set_event(SIPEvent::IncompleteData);
return false;
}
Err(_) => {
self.set_event(SIPEvent::InvalidData);
return false;
}
}
}
fn parse_response(&mut self, input: &[u8]) -> bool {
match sip_parse_response(input) {
Ok((_, response)) => {
let mut tx = self.new_tx();
tx.response = Some(response);
if let Ok((_, resp_line)) = sip_take_line(input) {
tx.response_line = resp_line;
}
self.transactions.push(tx);
return true;
}
Err(nom::Err::Incomplete(_)) => {
self.set_event(SIPEvent::IncompleteData);
return false;
}
Err(_) => {
self.set_event(SIPEvent::InvalidData);
return false;
}
}
}
}
impl SIPTransaction {
pub fn new(id: u64) -> SIPTransaction {
SIPTransaction {
id: id,
de_state: None,
request: None,
response: None,
request_line: None,
response_line: None,
events: std::ptr::null_mut(),
tx_data: applayer::AppLayerTxData::new(),
}
}
}
impl Drop for SIPTransaction {
fn drop(&mut self) {
if self.events != std::ptr::null_mut() {
core::sc_app_layer_decoder_events_free_events(&mut self.events);
}
if let Some(state) = self.de_state {
sc_detect_engine_state_free(state);
}
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_new(_orig_state: *mut std::os::raw::c_void, _orig_proto: AppProto) -> *mut std::os::raw::c_void {
let state = SIPState::new();
let boxed = Box::new(state);
return unsafe { std::mem::transmute(boxed) };
}
#[no_mangle]
pub extern "C" fn rs_sip_state_free(state: *mut std::os::raw::c_void) {
let mut state: Box<SIPState> = unsafe { std::mem::transmute(state) };
state.free();
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx(
state: *mut std::os::raw::c_void,
tx_id: u64,
) -> *mut std::os::raw::c_void {
let state = cast_pointer!(state, SIPState);
match state.get_tx_by_id(tx_id) {
Some(tx) => unsafe { std::mem::transmute(tx) },
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx_count(state: *mut std::os::raw::c_void) -> u64 {
let state = cast_pointer!(state, SIPState);
state.tx_id
}
#[no_mangle]
pub extern "C" fn rs_sip_state_tx_free(state: *mut std::os::raw::c_void, tx_id: u64) {
let state = cast_pointer!(state, SIPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_sip_state_progress_completion_status(_direction: u8) -> std::os::raw::c_int {
return 1;
}
#[no_mangle]
pub extern "C" fn rs_sip_tx_get_alstate_progress(
_tx: *mut std::os::raw::c_void,
_direction: u8,
) -> std::os::raw::c_int {
1
}
#[no_mangle]
pub extern "C" fn rs_sip_state_set_tx_detect_state(
tx: *mut std::os::raw::c_void,
de_state: &mut core::DetectEngineState,
) -> std::os::raw::c_int {
let tx = cast_pointer!(tx, SIPTransaction);
tx.de_state = Some(de_state);
0
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx_detect_state(
tx: *mut std::os::raw::c_void,
) -> *mut core::DetectEngineState {
let tx = cast_pointer!(tx, SIPTransaction);
match tx.de_state {
Some(ds) => ds,
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_events(
tx: *mut std::os::raw::c_void,
) -> *mut core::AppLayerDecoderEvents {
let tx = cast_pointer!(tx, SIPTransaction);
return tx.events;
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_event_info(
event_name: *const std::os::raw::c_char,
event_id: *mut std::os::raw::c_int,
event_type: *mut core::AppLayerEventType,
) -> std::os::raw::c_int {
if event_name == std::ptr::null() {
return -1;
}
let c_event_name: &CStr = unsafe { CStr::from_ptr(event_name) };
let event = match c_event_name.to_str() {
Ok(s) => {
match s {
"incomplete_data" => SIPEvent::IncompleteData as i32,
"invalid_data" => SIPEvent::InvalidData as i32,
_ => -1, // unknown event
}
}
Err(_) => -1, // UTF-8 conversion failed
};
unsafe {
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
*event_id = event as std::os::raw::c_int;
};
0
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_event_info_by_id(
event_id: std::os::raw::c_int,
event_name: *mut *const std::os::raw::c_char,
event_type: *mut core::AppLayerEventType,
) -> i8 {
if let Some(e) = SIPEvent::from_i32(event_id as i32) {
let estr = match e {
SIPEvent::IncompleteData => "incomplete_data\0",
SIPEvent::InvalidData => "invalid_data\0",
};
unsafe {
*event_name = estr.as_ptr() as *const std::os::raw::c_char;
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
};
0
} else {
-1
}
}
static mut ALPROTO_SIP: AppProto = ALPROTO_UNKNOWN;
#[no_mangle]
pub extern "C" fn rs_sip_probing_parser_ts(
_flow: *const Flow,
_direction: u8,
input: *const u8,
input_len: u32,
_rdir: *mut u8,
) -> AppProto {
let buf = build_slice!(input, input_len as usize);
if sip_parse_request(buf).is_ok() {
return unsafe { ALPROTO_SIP };
}
return ALPROTO_UNKNOWN;
}
#[no_mangle]
pub extern "C" fn rs_sip_probing_parser_tc(
_flow: *const Flow,
_direction: u8,
input: *const u8,
input_len: u32,
_rdir: | {
self.transactions.clear();
} | identifier_body |
sip.rs | u64,
pub request: Option<Request>,
pub response: Option<Response>,
pub request_line: Option<String>,
pub response_line: Option<String>,
de_state: Option<*mut core::DetectEngineState>,
events: *mut core::AppLayerDecoderEvents,
tx_data: applayer::AppLayerTxData,
}
impl SIPState {
pub fn new() -> SIPState {
SIPState {
transactions: Vec::new(),
tx_id: 0,
}
}
pub fn free(&mut self) {
self.transactions.clear();
}
fn new_tx(&mut self) -> SIPTransaction {
self.tx_id += 1;
SIPTransaction::new(self.tx_id)
}
fn get_tx_by_id(&mut self, tx_id: u64) -> Option<&SIPTransaction> {
self.transactions.iter().find(|&tx| tx.id == tx_id + 1)
}
fn free_tx(&mut self, tx_id: u64) {
let tx = self
.transactions
.iter()
.position(|ref tx| tx.id == tx_id + 1);
debug_assert!(tx != None);
if let Some(idx) = tx |
}
fn set_event(&mut self, event: SIPEvent) {
if let Some(tx) = self.transactions.last_mut() {
let ev = event as u8;
core::sc_app_layer_decoder_events_set_event_raw(&mut tx.events, ev);
}
}
fn parse_request(&mut self, input: &[u8]) -> bool {
match sip_parse_request(input) {
Ok((_, request)) => {
let mut tx = self.new_tx();
tx.request = Some(request);
if let Ok((_, req_line)) = sip_take_line(input) {
tx.request_line = req_line;
}
self.transactions.push(tx);
return true;
}
Err(nom::Err::Incomplete(_)) => {
self.set_event(SIPEvent::IncompleteData);
return false;
}
Err(_) => {
self.set_event(SIPEvent::InvalidData);
return false;
}
}
}
fn parse_response(&mut self, input: &[u8]) -> bool {
match sip_parse_response(input) {
Ok((_, response)) => {
let mut tx = self.new_tx();
tx.response = Some(response);
if let Ok((_, resp_line)) = sip_take_line(input) {
tx.response_line = resp_line;
}
self.transactions.push(tx);
return true;
}
Err(nom::Err::Incomplete(_)) => {
self.set_event(SIPEvent::IncompleteData);
return false;
}
Err(_) => {
self.set_event(SIPEvent::InvalidData);
return false;
}
}
}
}
impl SIPTransaction {
pub fn new(id: u64) -> SIPTransaction {
SIPTransaction {
id: id,
de_state: None,
request: None,
response: None,
request_line: None,
response_line: None,
events: std::ptr::null_mut(),
tx_data: applayer::AppLayerTxData::new(),
}
}
}
impl Drop for SIPTransaction {
fn drop(&mut self) {
if self.events != std::ptr::null_mut() {
core::sc_app_layer_decoder_events_free_events(&mut self.events);
}
if let Some(state) = self.de_state {
sc_detect_engine_state_free(state);
}
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_new(_orig_state: *mut std::os::raw::c_void, _orig_proto: AppProto) -> *mut std::os::raw::c_void {
let state = SIPState::new();
let boxed = Box::new(state);
return unsafe { std::mem::transmute(boxed) };
}
#[no_mangle]
pub extern "C" fn rs_sip_state_free(state: *mut std::os::raw::c_void) {
let mut state: Box<SIPState> = unsafe { std::mem::transmute(state) };
state.free();
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx(
state: *mut std::os::raw::c_void,
tx_id: u64,
) -> *mut std::os::raw::c_void {
let state = cast_pointer!(state, SIPState);
match state.get_tx_by_id(tx_id) {
Some(tx) => unsafe { std::mem::transmute(tx) },
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx_count(state: *mut std::os::raw::c_void) -> u64 {
let state = cast_pointer!(state, SIPState);
state.tx_id
}
#[no_mangle]
pub extern "C" fn rs_sip_state_tx_free(state: *mut std::os::raw::c_void, tx_id: u64) {
let state = cast_pointer!(state, SIPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_sip_state_progress_completion_status(_direction: u8) -> std::os::raw::c_int {
return 1;
}
#[no_mangle]
pub extern "C" fn rs_sip_tx_get_alstate_progress(
_tx: *mut std::os::raw::c_void,
_direction: u8,
) -> std::os::raw::c_int {
1
}
#[no_mangle]
pub extern "C" fn rs_sip_state_set_tx_detect_state(
tx: *mut std::os::raw::c_void,
de_state: &mut core::DetectEngineState,
) -> std::os::raw::c_int {
let tx = cast_pointer!(tx, SIPTransaction);
tx.de_state = Some(de_state);
0
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_tx_detect_state(
tx: *mut std::os::raw::c_void,
) -> *mut core::DetectEngineState {
let tx = cast_pointer!(tx, SIPTransaction);
match tx.de_state {
Some(ds) => ds,
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_events(
tx: *mut std::os::raw::c_void,
) -> *mut core::AppLayerDecoderEvents {
let tx = cast_pointer!(tx, SIPTransaction);
return tx.events;
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_event_info(
event_name: *const std::os::raw::c_char,
event_id: *mut std::os::raw::c_int,
event_type: *mut core::AppLayerEventType,
) -> std::os::raw::c_int {
if event_name == std::ptr::null() {
return -1;
}
let c_event_name: &CStr = unsafe { CStr::from_ptr(event_name) };
let event = match c_event_name.to_str() {
Ok(s) => {
match s {
"incomplete_data" => SIPEvent::IncompleteData as i32,
"invalid_data" => SIPEvent::InvalidData as i32,
_ => -1, // unknown event
}
}
Err(_) => -1, // UTF-8 conversion failed
};
unsafe {
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
*event_id = event as std::os::raw::c_int;
};
0
}
#[no_mangle]
pub extern "C" fn rs_sip_state_get_event_info_by_id(
event_id: std::os::raw::c_int,
event_name: *mut *const std::os::raw::c_char,
event_type: *mut core::AppLayerEventType,
) -> i8 {
if let Some(e) = SIPEvent::from_i32(event_id as i32) {
let estr = match e {
SIPEvent::IncompleteData => "incomplete_data\0",
SIPEvent::InvalidData => "invalid_data\0",
};
unsafe {
*event_name = estr.as_ptr() as *const std::os::raw::c_char;
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
};
0
} else {
-1
}
}
static mut ALPROTO_SIP: AppProto = ALPROTO_UNKNOWN;
#[no_mangle]
pub extern "C" fn rs_sip_probing_parser_ts(
_flow: *const Flow,
_direction: u8,
input: *const u8,
input_len: u32,
_rdir: *mut u8,
) -> AppProto {
let buf = build_slice!(input, input_len as usize);
if sip_parse_request(buf).is_ok() {
return unsafe { ALPROTO_SIP };
}
return ALPROTO_UNKNOWN;
}
#[no_mangle]
pub extern "C" fn rs_sip_probing_parser_tc(
_flow: *const Flow,
_direction: u8,
input: *const u8,
input_len: u32,
_rdir: | {
let _ = self.transactions.remove(idx);
} | conditional_block |
ntp.rs | use nom;
#[derive(AppLayerEvent)]
pub enum NTPEvent {
UnsolicitedResponse ,
MalformedData,
NotRequest,
NotResponse,
}
pub struct NTPState {
/// List of transactions for this session
transactions: Vec<NTPTransaction>,
/// Events counter
events: u16,
/// tx counter for assigning incrementing id's to tx's
tx_id: u64,
}
#[derive(Debug)]
pub struct NTPTransaction {
/// The NTP reference ID
pub xid: u32,
/// The internal transaction id
id: u64,
tx_data: applayer::AppLayerTxData,
}
impl Transaction for NTPTransaction {
fn id(&self) -> u64 {
self.id
}
}
impl NTPState {
pub fn new() -> NTPState {
NTPState{
transactions: Vec::new(),
events: 0,
tx_id: 0,
}
}
}
impl State<NTPTransaction> for NTPState {
fn get_transactions(&self) -> &[NTPTransaction] {
&self.transactions
}
}
impl NTPState {
/// Parse an NTP request message
///
/// Returns 0 if successful, or -1 on error
fn | (&mut self, i: &[u8], _direction: u8) -> i32 {
match parse_ntp(i) {
Ok((_,ref msg)) => {
// SCLogDebug!("parse_ntp: {:?}",msg);
if msg.mode == NtpMode::SymmetricActive || msg.mode == NtpMode::Client {
let mut tx = self.new_tx();
// use the reference id as identifier
tx.xid = msg.ref_id;
self.transactions.push(tx);
}
0
},
Err(nom::Err::Incomplete(_)) => {
SCLogDebug!("Insufficient data while parsing NTP data");
self.set_event(NTPEvent::MalformedData);
-1
},
Err(_) => {
SCLogDebug!("Error while parsing NTP data");
self.set_event(NTPEvent::MalformedData);
-1
},
}
}
fn free(&mut self) {
// All transactions are freed when the `transactions` object is freed.
// But let's be explicit
self.transactions.clear();
}
fn new_tx(&mut self) -> NTPTransaction {
self.tx_id += 1;
NTPTransaction::new(self.tx_id)
}
pub fn get_tx_by_id(&mut self, tx_id: u64) -> Option<&NTPTransaction> {
self.transactions.iter().find(|&tx| tx.id == tx_id + 1)
}
fn free_tx(&mut self, tx_id: u64) {
let tx = self.transactions.iter().position(|tx| tx.id == tx_id + 1);
debug_assert!(tx != None);
if let Some(idx) = tx {
let _ = self.transactions.remove(idx);
}
}
/// Set an event. The event is set on the most recent transaction.
pub fn set_event(&mut self, event: NTPEvent) {
if let Some(tx) = self.transactions.last_mut() {
tx.tx_data.set_event(event as u8);
self.events += 1;
}
}
}
impl NTPTransaction {
pub fn new(id: u64) -> NTPTransaction {
NTPTransaction {
xid: 0,
id: id,
tx_data: applayer::AppLayerTxData::new(),
}
}
}
/// Returns *mut NTPState
#[no_mangle]
pub extern "C" fn rs_ntp_state_new(_orig_state: *mut std::os::raw::c_void, _orig_proto: AppProto) -> *mut std::os::raw::c_void {
let state = NTPState::new();
let boxed = Box::new(state);
return Box::into_raw(boxed) as *mut _;
}
/// Params:
/// - state: *mut NTPState as void pointer
#[no_mangle]
pub extern "C" fn rs_ntp_state_free(state: *mut std::os::raw::c_void) {
let mut ntp_state = unsafe{ Box::from_raw(state as *mut NTPState) };
ntp_state.free();
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_parse_request(_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
stream_slice: StreamSlice,
_data: *const std::os::raw::c_void,
) -> AppLayerResult {
let state = cast_pointer!(state,NTPState);
if state.parse(stream_slice.as_slice(), 0) < 0 {
return AppLayerResult::err();
}
AppLayerResult::ok()
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_parse_response(_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
stream_slice: StreamSlice,
_data: *const std::os::raw::c_void,
) -> AppLayerResult {
let state = cast_pointer!(state,NTPState);
if state.parse(stream_slice.as_slice(), 1) < 0 {
return AppLayerResult::err();
}
AppLayerResult::ok()
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_get_tx(state: *mut std::os::raw::c_void,
tx_id: u64)
-> *mut std::os::raw::c_void
{
let state = cast_pointer!(state,NTPState);
match state.get_tx_by_id(tx_id) {
Some(tx) => tx as *const _ as *mut _,
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_get_tx_count(state: *mut std::os::raw::c_void)
-> u64
{
let state = cast_pointer!(state,NTPState);
state.tx_id
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_tx_free(state: *mut std::os::raw::c_void,
tx_id: u64)
{
let state = cast_pointer!(state,NTPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_ntp_tx_get_alstate_progress(_tx: *mut std::os::raw::c_void,
_direction: u8)
-> std::os::raw::c_int
{
1
}
static mut ALPROTO_NTP : AppProto = ALPROTO_UNKNOWN;
#[no_mangle]
pub extern "C" fn ntp_probing_parser(_flow: *const Flow,
_direction: u8,
input:*const u8, input_len: u32,
_rdir: *mut u8) -> AppProto
{
let slice: &[u8] = unsafe { std::slice::from_raw_parts(input as *mut u8, input_len as usize) };
let alproto = unsafe{ ALPROTO_NTP };
match parse_ntp(slice) {
Ok((_, ref msg)) => {
if msg.version == 3 || msg.version == 4 {
return alproto;
} else {
return unsafe{ALPROTO_FAILED};
}
},
Err(nom::Err::Incomplete(_)) => {
return ALPROTO_UNKNOWN;
},
Err(_) => {
return unsafe{ALPROTO_FAILED};
},
}
}
export_tx_data_get!(rs_ntp_get_tx_data, NTPTransaction);
const PARSER_NAME : &'static [u8] = b"ntp\0";
#[no_mangle]
pub unsafe extern "C" fn rs_register_ntp_parser() {
let default_port = CString::new("123").unwrap();
let parser = RustParser {
name : PARSER_NAME.as_ptr() as *const std::os::raw::c_char,
default_port : default_port.as_ptr(),
ipproto : core::IPPROTO_UDP,
probe_ts : Some(ntp_probing_parser),
probe_tc : Some(ntp_probing_parser),
min_depth : 0,
max_depth : 16,
state_new : rs_ntp_state_new,
state_free : rs_ntp_state_free,
tx_free : rs_ntp_state_tx_free,
parse_ts : rs_ntp_parse_request,
parse_tc : rs_ntp_parse_response,
get_tx_count : rs_ntp_state_get_tx_count,
get_tx : rs_ntp_state_get_tx,
tx_comp_st_ts : 1,
tx_comp_st_tc : 1,
tx_get_progress : rs_ntp_tx_get_alstate_progress,
get_eventinfo : Some(NTPEvent::get_event_info),
get_eventinfo_byid : Some(NTPEvent::get_event_info_by_id),
localstorage_new : None,
localstorage_free : None,
get_files : None,
get_tx_iterator : Some(applayer::state_get | parse | identifier_name |
ntp.rs | use nom;
#[derive(AppLayerEvent)]
pub enum NTPEvent {
UnsolicitedResponse ,
MalformedData,
NotRequest,
NotResponse,
}
pub struct NTPState {
/// List of transactions for this session
transactions: Vec<NTPTransaction>,
/// Events counter
events: u16,
/// tx counter for assigning incrementing id's to tx's
tx_id: u64,
}
#[derive(Debug)]
pub struct NTPTransaction {
/// The NTP reference ID
pub xid: u32,
/// The internal transaction id
id: u64,
tx_data: applayer::AppLayerTxData,
}
impl Transaction for NTPTransaction {
fn id(&self) -> u64 {
self.id
}
}
impl NTPState {
pub fn new() -> NTPState {
NTPState{
transactions: Vec::new(),
events: 0,
tx_id: 0,
}
}
}
impl State<NTPTransaction> for NTPState {
fn get_transactions(&self) -> &[NTPTransaction] {
&self.transactions
}
}
impl NTPState {
/// Parse an NTP request message
///
/// Returns 0 if successful, or -1 on error
fn parse(&mut self, i: &[u8], _direction: u8) -> i32 {
match parse_ntp(i) {
Ok((_,ref msg)) => {
// SCLogDebug!("parse_ntp: {:?}",msg);
if msg.mode == NtpMode::SymmetricActive || msg.mode == NtpMode::Client {
let mut tx = self.new_tx();
// use the reference id as identifier
tx.xid = msg.ref_id;
self.transactions.push(tx);
}
0
},
Err(nom::Err::Incomplete(_)) => {
SCLogDebug!("Insufficient data while parsing NTP data");
self.set_event(NTPEvent::MalformedData);
-1
},
Err(_) => {
SCLogDebug!("Error while parsing NTP data");
self.set_event(NTPEvent::MalformedData);
-1
},
}
}
fn free(&mut self) {
// All transactions are freed when the `transactions` object is freed.
// But let's be explicit
self.transactions.clear();
}
fn new_tx(&mut self) -> NTPTransaction {
self.tx_id += 1;
NTPTransaction::new(self.tx_id)
}
pub fn get_tx_by_id(&mut self, tx_id: u64) -> Option<&NTPTransaction> {
self.transactions.iter().find(|&tx| tx.id == tx_id + 1)
}
fn free_tx(&mut self, tx_id: u64) {
let tx = self.transactions.iter().position(|tx| tx.id == tx_id + 1);
debug_assert!(tx != None);
if let Some(idx) = tx {
let _ = self.transactions.remove(idx);
}
}
/// Set an event. The event is set on the most recent transaction.
pub fn set_event(&mut self, event: NTPEvent) {
if let Some(tx) = self.transactions.last_mut() {
tx.tx_data.set_event(event as u8);
self.events += 1;
}
}
}
impl NTPTransaction {
pub fn new(id: u64) -> NTPTransaction {
NTPTransaction {
xid: 0,
id: id,
tx_data: applayer::AppLayerTxData::new(),
}
}
}
/// Returns *mut NTPState
#[no_mangle]
pub extern "C" fn rs_ntp_state_new(_orig_state: *mut std::os::raw::c_void, _orig_proto: AppProto) -> *mut std::os::raw::c_void {
let state = NTPState::new();
let boxed = Box::new(state);
return Box::into_raw(boxed) as *mut _;
}
/// Params:
/// - state: *mut NTPState as void pointer
#[no_mangle]
pub extern "C" fn rs_ntp_state_free(state: *mut std::os::raw::c_void) {
let mut ntp_state = unsafe{ Box::from_raw(state as *mut NTPState) };
ntp_state.free();
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_parse_request(_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
stream_slice: StreamSlice,
_data: *const std::os::raw::c_void,
) -> AppLayerResult {
let state = cast_pointer!(state,NTPState);
if state.parse(stream_slice.as_slice(), 0) < 0 {
return AppLayerResult::err();
}
AppLayerResult::ok()
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_parse_response(_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
stream_slice: StreamSlice,
_data: *const std::os::raw::c_void,
) -> AppLayerResult {
let state = cast_pointer!(state,NTPState);
if state.parse(stream_slice.as_slice(), 1) < 0 {
return AppLayerResult::err();
}
AppLayerResult::ok()
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_get_tx(state: *mut std::os::raw::c_void,
tx_id: u64)
-> *mut std::os::raw::c_void
{
let state = cast_pointer!(state,NTPState);
match state.get_tx_by_id(tx_id) {
Some(tx) => tx as *const _ as *mut _,
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_get_tx_count(state: *mut std::os::raw::c_void)
-> u64
{
let state = cast_pointer!(state,NTPState);
state.tx_id
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_tx_free(state: *mut std::os::raw::c_void,
tx_id: u64)
{
let state = cast_pointer!(state,NTPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_ntp_tx_get_alstate_progress(_tx: *mut std::os::raw::c_void,
_direction: u8)
-> std::os::raw::c_int
{
1
}
static mut ALPROTO_NTP : AppProto = ALPROTO_UNKNOWN;
#[no_mangle]
pub extern "C" fn ntp_probing_parser(_flow: *const Flow,
_direction: u8,
input:*const u8, input_len: u32,
_rdir: *mut u8) -> AppProto
{
let slice: &[u8] = unsafe { std::slice::from_raw_parts(input as *mut u8, input_len as usize) };
let alproto = unsafe{ ALPROTO_NTP };
match parse_ntp(slice) {
Ok((_, ref msg)) => {
if msg.version == 3 || msg.version == 4 {
return alproto;
} else {
return unsafe{ALPROTO_FAILED};
}
},
Err(nom::Err::Incomplete(_)) => {
return ALPROTO_UNKNOWN;
},
Err(_) => {
return unsafe{ALPROTO_FAILED};
},
}
}
export_tx_data_get!(rs_ntp_get_tx_data, NTPTransaction);
const PARSER_NAME : &'static [u8] = b"ntp\0";
#[no_mangle]
pub unsafe extern "C" fn rs_register_ntp_parser() {
let default_port = CString::new("123").unwrap();
let parser = RustParser {
name : PARSER_NAME.as_ptr() as *const std::os::raw::c_char,
default_port : default_port.as_ptr(),
ipproto : core::IPPROTO_UDP, | max_depth : 16,
state_new : rs_ntp_state_new,
state_free : rs_ntp_state_free,
tx_free : rs_ntp_state_tx_free,
parse_ts : rs_ntp_parse_request,
parse_tc : rs_ntp_parse_response,
get_tx_count : rs_ntp_state_get_tx_count,
get_tx : rs_ntp_state_get_tx,
tx_comp_st_ts : 1,
tx_comp_st_tc : 1,
tx_get_progress : rs_ntp_tx_get_alstate_progress,
get_eventinfo : Some(NTPEvent::get_event_info),
get_eventinfo_byid : Some(NTPEvent::get_event_info_by_id),
localstorage_new : None,
localstorage_free : None,
get_files : None,
get_tx_iterator : Some(applayer::state_get_tx | probe_ts : Some(ntp_probing_parser),
probe_tc : Some(ntp_probing_parser),
min_depth : 0, | random_line_split |
ntp.rs | nom;
#[derive(AppLayerEvent)]
pub enum NTPEvent {
UnsolicitedResponse ,
MalformedData,
NotRequest,
NotResponse,
}
pub struct NTPState {
/// List of transactions for this session
transactions: Vec<NTPTransaction>,
/// Events counter
events: u16,
/// tx counter for assigning incrementing id's to tx's
tx_id: u64,
}
#[derive(Debug)]
pub struct NTPTransaction {
/// The NTP reference ID
pub xid: u32,
/// The internal transaction id
id: u64,
tx_data: applayer::AppLayerTxData,
}
impl Transaction for NTPTransaction {
fn id(&self) -> u64 {
self.id
}
}
impl NTPState {
pub fn new() -> NTPState {
NTPState{
transactions: Vec::new(),
events: 0,
tx_id: 0,
}
}
}
impl State<NTPTransaction> for NTPState {
fn get_transactions(&self) -> &[NTPTransaction] {
&self.transactions
}
}
impl NTPState {
/// Parse an NTP request message
///
/// Returns 0 if successful, or -1 on error
fn parse(&mut self, i: &[u8], _direction: u8) -> i32 {
match parse_ntp(i) {
Ok((_,ref msg)) => {
// SCLogDebug!("parse_ntp: {:?}",msg);
if msg.mode == NtpMode::SymmetricActive || msg.mode == NtpMode::Client {
let mut tx = self.new_tx();
// use the reference id as identifier
tx.xid = msg.ref_id;
self.transactions.push(tx);
}
0
},
Err(nom::Err::Incomplete(_)) => {
SCLogDebug!("Insufficient data while parsing NTP data");
self.set_event(NTPEvent::MalformedData);
-1
},
Err(_) => {
SCLogDebug!("Error while parsing NTP data");
self.set_event(NTPEvent::MalformedData);
-1
},
}
}
fn free(&mut self) {
// All transactions are freed when the `transactions` object is freed.
// But let's be explicit
self.transactions.clear();
}
fn new_tx(&mut self) -> NTPTransaction {
self.tx_id += 1;
NTPTransaction::new(self.tx_id)
}
pub fn get_tx_by_id(&mut self, tx_id: u64) -> Option<&NTPTransaction> {
self.transactions.iter().find(|&tx| tx.id == tx_id + 1)
}
fn free_tx(&mut self, tx_id: u64) |
/// Set an event. The event is set on the most recent transaction.
pub fn set_event(&mut self, event: NTPEvent) {
if let Some(tx) = self.transactions.last_mut() {
tx.tx_data.set_event(event as u8);
self.events += 1;
}
}
}
impl NTPTransaction {
pub fn new(id: u64) -> NTPTransaction {
NTPTransaction {
xid: 0,
id: id,
tx_data: applayer::AppLayerTxData::new(),
}
}
}
/// Returns *mut NTPState
#[no_mangle]
pub extern "C" fn rs_ntp_state_new(_orig_state: *mut std::os::raw::c_void, _orig_proto: AppProto) -> *mut std::os::raw::c_void {
let state = NTPState::new();
let boxed = Box::new(state);
return Box::into_raw(boxed) as *mut _;
}
/// Params:
/// - state: *mut NTPState as void pointer
#[no_mangle]
pub extern "C" fn rs_ntp_state_free(state: *mut std::os::raw::c_void) {
let mut ntp_state = unsafe{ Box::from_raw(state as *mut NTPState) };
ntp_state.free();
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_parse_request(_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
stream_slice: StreamSlice,
_data: *const std::os::raw::c_void,
) -> AppLayerResult {
let state = cast_pointer!(state,NTPState);
if state.parse(stream_slice.as_slice(), 0) < 0 {
return AppLayerResult::err();
}
AppLayerResult::ok()
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_parse_response(_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
stream_slice: StreamSlice,
_data: *const std::os::raw::c_void,
) -> AppLayerResult {
let state = cast_pointer!(state,NTPState);
if state.parse(stream_slice.as_slice(), 1) < 0 {
return AppLayerResult::err();
}
AppLayerResult::ok()
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_get_tx(state: *mut std::os::raw::c_void,
tx_id: u64)
-> *mut std::os::raw::c_void
{
let state = cast_pointer!(state,NTPState);
match state.get_tx_by_id(tx_id) {
Some(tx) => tx as *const _ as *mut _,
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_get_tx_count(state: *mut std::os::raw::c_void)
-> u64
{
let state = cast_pointer!(state,NTPState);
state.tx_id
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_tx_free(state: *mut std::os::raw::c_void,
tx_id: u64)
{
let state = cast_pointer!(state,NTPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_ntp_tx_get_alstate_progress(_tx: *mut std::os::raw::c_void,
_direction: u8)
-> std::os::raw::c_int
{
1
}
static mut ALPROTO_NTP : AppProto = ALPROTO_UNKNOWN;
#[no_mangle]
pub extern "C" fn ntp_probing_parser(_flow: *const Flow,
_direction: u8,
input:*const u8, input_len: u32,
_rdir: *mut u8) -> AppProto
{
let slice: &[u8] = unsafe { std::slice::from_raw_parts(input as *mut u8, input_len as usize) };
let alproto = unsafe{ ALPROTO_NTP };
match parse_ntp(slice) {
Ok((_, ref msg)) => {
if msg.version == 3 || msg.version == 4 {
return alproto;
} else {
return unsafe{ALPROTO_FAILED};
}
},
Err(nom::Err::Incomplete(_)) => {
return ALPROTO_UNKNOWN;
},
Err(_) => {
return unsafe{ALPROTO_FAILED};
},
}
}
export_tx_data_get!(rs_ntp_get_tx_data, NTPTransaction);
const PARSER_NAME : &'static [u8] = b"ntp\0";
#[no_mangle]
pub unsafe extern "C" fn rs_register_ntp_parser() {
let default_port = CString::new("123").unwrap();
let parser = RustParser {
name : PARSER_NAME.as_ptr() as *const std::os::raw::c_char,
default_port : default_port.as_ptr(),
ipproto : core::IPPROTO_UDP,
probe_ts : Some(ntp_probing_parser),
probe_tc : Some(ntp_probing_parser),
min_depth : 0,
max_depth : 16,
state_new : rs_ntp_state_new,
state_free : rs_ntp_state_free,
tx_free : rs_ntp_state_tx_free,
parse_ts : rs_ntp_parse_request,
parse_tc : rs_ntp_parse_response,
get_tx_count : rs_ntp_state_get_tx_count,
get_tx : rs_ntp_state_get_tx,
tx_comp_st_ts : 1,
tx_comp_st_tc : 1,
tx_get_progress : rs_ntp_tx_get_alstate_progress,
get_eventinfo : Some(NTPEvent::get_event_info),
get_eventinfo_byid : Some(NTPEvent::get_event_info_by_id),
localstorage_new : None,
localstorage_free : None,
get_files : None,
get_tx_iterator : Some(applayer::state_get | {
let tx = self.transactions.iter().position(|tx| tx.id == tx_id + 1);
debug_assert!(tx != None);
if let Some(idx) = tx {
let _ = self.transactions.remove(idx);
}
} | identifier_body |
ntp.rs | nom;
#[derive(AppLayerEvent)]
pub enum NTPEvent {
UnsolicitedResponse ,
MalformedData,
NotRequest,
NotResponse,
}
pub struct NTPState {
/// List of transactions for this session
transactions: Vec<NTPTransaction>,
/// Events counter
events: u16,
/// tx counter for assigning incrementing id's to tx's
tx_id: u64,
}
#[derive(Debug)]
pub struct NTPTransaction {
/// The NTP reference ID
pub xid: u32,
/// The internal transaction id
id: u64,
tx_data: applayer::AppLayerTxData,
}
impl Transaction for NTPTransaction {
fn id(&self) -> u64 {
self.id
}
}
impl NTPState {
pub fn new() -> NTPState {
NTPState{
transactions: Vec::new(),
events: 0,
tx_id: 0,
}
}
}
impl State<NTPTransaction> for NTPState {
fn get_transactions(&self) -> &[NTPTransaction] {
&self.transactions
}
}
impl NTPState {
/// Parse an NTP request message
///
/// Returns 0 if successful, or -1 on error
fn parse(&mut self, i: &[u8], _direction: u8) -> i32 {
match parse_ntp(i) {
Ok((_,ref msg)) => {
// SCLogDebug!("parse_ntp: {:?}",msg);
if msg.mode == NtpMode::SymmetricActive || msg.mode == NtpMode::Client {
let mut tx = self.new_tx();
// use the reference id as identifier
tx.xid = msg.ref_id;
self.transactions.push(tx);
}
0
},
Err(nom::Err::Incomplete(_)) => {
SCLogDebug!("Insufficient data while parsing NTP data");
self.set_event(NTPEvent::MalformedData);
-1
},
Err(_) => {
SCLogDebug!("Error while parsing NTP data");
self.set_event(NTPEvent::MalformedData);
-1
},
}
}
fn free(&mut self) {
// All transactions are freed when the `transactions` object is freed.
// But let's be explicit
self.transactions.clear();
}
fn new_tx(&mut self) -> NTPTransaction {
self.tx_id += 1;
NTPTransaction::new(self.tx_id)
}
pub fn get_tx_by_id(&mut self, tx_id: u64) -> Option<&NTPTransaction> {
self.transactions.iter().find(|&tx| tx.id == tx_id + 1)
}
fn free_tx(&mut self, tx_id: u64) {
let tx = self.transactions.iter().position(|tx| tx.id == tx_id + 1);
debug_assert!(tx != None);
if let Some(idx) = tx {
let _ = self.transactions.remove(idx);
}
}
/// Set an event. The event is set on the most recent transaction.
pub fn set_event(&mut self, event: NTPEvent) {
if let Some(tx) = self.transactions.last_mut() {
tx.tx_data.set_event(event as u8);
self.events += 1;
}
}
}
impl NTPTransaction {
pub fn new(id: u64) -> NTPTransaction {
NTPTransaction {
xid: 0,
id: id,
tx_data: applayer::AppLayerTxData::new(),
}
}
}
/// Returns *mut NTPState
#[no_mangle]
pub extern "C" fn rs_ntp_state_new(_orig_state: *mut std::os::raw::c_void, _orig_proto: AppProto) -> *mut std::os::raw::c_void {
let state = NTPState::new();
let boxed = Box::new(state);
return Box::into_raw(boxed) as *mut _;
}
/// Params:
/// - state: *mut NTPState as void pointer
#[no_mangle]
pub extern "C" fn rs_ntp_state_free(state: *mut std::os::raw::c_void) {
let mut ntp_state = unsafe{ Box::from_raw(state as *mut NTPState) };
ntp_state.free();
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_parse_request(_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
stream_slice: StreamSlice,
_data: *const std::os::raw::c_void,
) -> AppLayerResult {
let state = cast_pointer!(state,NTPState);
if state.parse(stream_slice.as_slice(), 0) < 0 {
return AppLayerResult::err();
}
AppLayerResult::ok()
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_parse_response(_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
stream_slice: StreamSlice,
_data: *const std::os::raw::c_void,
) -> AppLayerResult {
let state = cast_pointer!(state,NTPState);
if state.parse(stream_slice.as_slice(), 1) < 0 {
return AppLayerResult::err();
}
AppLayerResult::ok()
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_get_tx(state: *mut std::os::raw::c_void,
tx_id: u64)
-> *mut std::os::raw::c_void
{
let state = cast_pointer!(state,NTPState);
match state.get_tx_by_id(tx_id) {
Some(tx) => tx as *const _ as *mut _,
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_get_tx_count(state: *mut std::os::raw::c_void)
-> u64
{
let state = cast_pointer!(state,NTPState);
state.tx_id
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_tx_free(state: *mut std::os::raw::c_void,
tx_id: u64)
{
let state = cast_pointer!(state,NTPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_ntp_tx_get_alstate_progress(_tx: *mut std::os::raw::c_void,
_direction: u8)
-> std::os::raw::c_int
{
1
}
static mut ALPROTO_NTP : AppProto = ALPROTO_UNKNOWN;
#[no_mangle]
pub extern "C" fn ntp_probing_parser(_flow: *const Flow,
_direction: u8,
input:*const u8, input_len: u32,
_rdir: *mut u8) -> AppProto
{
let slice: &[u8] = unsafe { std::slice::from_raw_parts(input as *mut u8, input_len as usize) };
let alproto = unsafe{ ALPROTO_NTP };
match parse_ntp(slice) {
Ok((_, ref msg)) => {
if msg.version == 3 || msg.version == 4 {
return alproto;
} else {
return unsafe{ALPROTO_FAILED};
}
},
Err(nom::Err::Incomplete(_)) => | ,
Err(_) => {
return unsafe{ALPROTO_FAILED};
},
}
}
export_tx_data_get!(rs_ntp_get_tx_data, NTPTransaction);
const PARSER_NAME : &'static [u8] = b"ntp\0";
#[no_mangle]
pub unsafe extern "C" fn rs_register_ntp_parser() {
let default_port = CString::new("123").unwrap();
let parser = RustParser {
name : PARSER_NAME.as_ptr() as *const std::os::raw::c_char,
default_port : default_port.as_ptr(),
ipproto : core::IPPROTO_UDP,
probe_ts : Some(ntp_probing_parser),
probe_tc : Some(ntp_probing_parser),
min_depth : 0,
max_depth : 16,
state_new : rs_ntp_state_new,
state_free : rs_ntp_state_free,
tx_free : rs_ntp_state_tx_free,
parse_ts : rs_ntp_parse_request,
parse_tc : rs_ntp_parse_response,
get_tx_count : rs_ntp_state_get_tx_count,
get_tx : rs_ntp_state_get_tx,
tx_comp_st_ts : 1,
tx_comp_st_tc : 1,
tx_get_progress : rs_ntp_tx_get_alstate_progress,
get_eventinfo : Some(NTPEvent::get_event_info),
get_eventinfo_byid : Some(NTPEvent::get_event_info_by_id),
localstorage_new : None,
localstorage_free : None,
get_files : None,
get_tx_iterator : Some(applayer::state_get | {
return ALPROTO_UNKNOWN;
} | conditional_block |
task-comm-13.rs | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::sync::mpsc::{channel, Sender};
use std::thread::Thread;
fn start(tx: &Sender<int>, start: int, number_of_messages: int) {
let mut i: int = 0;
while i< number_of_messages { tx.send(start + i).unwrap(); i += 1; }
}
pub fn main() {
println!("Check that we don't deadlock.");
let (tx, rx) = channel();
let _ = Thread::scoped(move|| { start(&tx, 0, 10) }).join();
println!("Joined task");
} | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at | random_line_split |
|
task-comm-13.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::sync::mpsc::{channel, Sender};
use std::thread::Thread;
fn start(tx: &Sender<int>, start: int, number_of_messages: int) {
let mut i: int = 0;
while i< number_of_messages { tx.send(start + i).unwrap(); i += 1; }
}
pub fn main() | {
println!("Check that we don't deadlock.");
let (tx, rx) = channel();
let _ = Thread::scoped(move|| { start(&tx, 0, 10) }).join();
println!("Joined task");
} | identifier_body |
|
task-comm-13.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::sync::mpsc::{channel, Sender};
use std::thread::Thread;
fn start(tx: &Sender<int>, start: int, number_of_messages: int) {
let mut i: int = 0;
while i< number_of_messages { tx.send(start + i).unwrap(); i += 1; }
}
pub fn | () {
println!("Check that we don't deadlock.");
let (tx, rx) = channel();
let _ = Thread::scoped(move|| { start(&tx, 0, 10) }).join();
println!("Joined task");
}
| main | identifier_name |
user.js | 'use strict';
var mongoose = require('mongoose');
var bcrypt = require('bcrypt'); | required: true
},
email: {
type: String,
required: true,
unique: true,
trim: true
},
username: {
type: String,
required: true,
unique: true,
trim: true
},
biography: {
type: String
},
location: {
type: String
},
auth: {
basic: {
username: String,
password: String
}
}
});
userSchema.methods.hashPassword = function(password) {
var hash = this.auth.basic.password = bcrypt.hashSync(password, 8);
return hash;
};
userSchema.methods.checkPassword = function(password) {
return bcrypt.compareSync(password, this.auth.basic.password);
};
userSchema.methods.generateToken = function(callback) {
var id = this._id;
eat.encode({id: id}, process.env.APP_SECRET, callback);
};
module.exports = mongoose.model('User', userSchema); | var eat = require('eat');
var userSchema = new mongoose.Schema({
name: {
type: String, | random_line_split |
releases.py | self.SVN("log https://v8.googlecode.com/svn/tags -v --limit 20")
releases = []
for (tag, revision) in re.findall(BLEEDING_EDGE_TAGS_RE, tag_text):
git_hash = self.vc.SvnGit(revision)
# Add bleeding edge release. It does not contain patches or a code
# review link, as tags are not uploaded.
releases.append(self.GetReleaseDict(
git_hash, revision, git_hash, self.vc.MasterBranch(), tag, "", ""))
return releases
def GetReleasesFromBranch(self, branch):
self.GitReset(self.vc.RemoteBranch(branch))
if branch == self.vc.MasterBranch():
return self.GetReleasesFromMaster()
releases = []
try:
for git_hash in self.GitLog(format="%H").splitlines():
if VERSION_FILE not in self.GitChangedFiles(git_hash):
continue
if self.ExceedsMax(releases):
break # pragma: no cover
if not self.GitCheckoutFileSafe(VERSION_FILE, git_hash):
break # pragma: no cover
release, patch_level = self.GetRelease(git_hash, branch)
releases.append(release)
# Follow branches only until their creation point.
# TODO(machenbach): This omits patches if the version file wasn't
# manipulated correctly. Find a better way to detect the point where
# the parent of the branch head leads to the trunk branch.
if branch != self.vc.CandidateBranch() and patch_level == "0":
break
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up checked-out version file.
self.GitCheckoutFileSafe(VERSION_FILE, "HEAD")
return releases
def RunStep(self):
self.GitCreateBranch(self._config["BRANCHNAME"])
branches = self.vc.GetBranches()
releases = []
if self._options.branch == 'recent':
# Get only recent development on trunk, beta and stable.
if self._options.max_releases == 0: # pragma: no cover
self._options.max_releases = 10
beta, stable = SortBranches(branches)[0:2]
releases += self.GetReleasesFromBranch(stable)
releases += self.GetReleasesFromBranch(beta)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
elif self._options.branch == 'all': # pragma: no cover
# Retrieve the full release history.
for branch in branches:
releases += self.GetReleasesFromBranch(branch)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
else: # pragma: no cover
# Retrieve history for a specified branch.
assert self._options.branch in (branches +
[self.vc.CandidateBranch(), self.vc.MasterBranch()])
releases += self.GetReleasesFromBranch(self._options.branch)
self["releases"] = sorted(releases,
key=lambda r: SortingKey(r["version"]),
reverse=True)
class SwitchChromium(Step):
MESSAGE = "Switch to Chromium checkout."
def RunStep(self):
cwd = self._options.chromium
# Check for a clean workdir.
if not self.GitIsWorkdirClean(cwd=cwd): # pragma: no cover
self.Die("Workspace is not clean. Please commit or undo your changes.")
# Assert that the DEPS file is there.
if not os.path.exists(os.path.join(cwd, "DEPS")): # pragma: no cover
self.Die("DEPS file not present.")
class UpdateChromiumCheckout(Step):
MESSAGE = "Update the checkout and create a new branch."
def RunStep(self):
cwd = self._options.chromium
self.GitCheckout("master", cwd=cwd)
self.GitPull(cwd=cwd)
self.GitCreateBranch(self.Config("BRANCHNAME"), cwd=cwd)
def ConvertToCommitNumber(step, revision):
# Simple check for git hashes.
if revision.isdigit() and len(revision) < 8:
return revision
return step.GitConvertToSVNRevision(
revision, cwd=os.path.join(step._options.chromium, "v8"))
class RetrieveChromiumV8Releases(Step):
MESSAGE = "Retrieve V8 releases from Chromium DEPS."
def RunStep(self):
cwd = self._options.chromium
releases = filter(
lambda r: r["branch"] in [self.vc.CandidateBranch(),
self.vc.MasterBranch()],
self["releases"])
if not releases: # pragma: no cover
print "No releases detected. Skipping chromium history."
return True
# Update v8 checkout in chromium.
self.GitFetchOrigin(cwd=os.path.join(cwd, "v8"))
oldest_v8_rev = int(releases[-1]["revision"])
cr_releases = []
try:
for git_hash in self.GitLog(
format="%H", grep="V8", cwd=cwd).splitlines():
if "DEPS" not in self.GitChangedFiles(git_hash, cwd=cwd):
continue
if not self.GitCheckoutFileSafe("DEPS", git_hash, cwd=cwd):
break # pragma: no cover
deps = FileToText(os.path.join(cwd, "DEPS"))
match = DEPS_RE.search(deps)
if match:
cr_rev = self.GetCommitPositionNumber(git_hash, cwd=cwd)
if cr_rev:
v8_rev = ConvertToCommitNumber(self, match.group(1))
cr_releases.append([cr_rev, v8_rev])
# Stop after reaching beyond the last v8 revision we want to update.
# We need a small buffer for possible revert/reland frenzies.
# TODO(machenbach): Subtraction is not git friendly.
if int(v8_rev) < oldest_v8_rev - 100:
break # pragma: no cover
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up.
self.GitCheckoutFileSafe("DEPS", "HEAD", cwd=cwd)
# Add the chromium ranges to the v8 trunk and bleeding_edge releases.
all_ranges = BuildRevisionRanges(cr_releases)
releases_dict = dict((r["revision"], r) for r in releases)
for revision, ranges in all_ranges.iteritems():
releases_dict.get(revision, {})["chromium_revision"] = ranges
# TODO(machenbach): Unify common code with method above.
class RietrieveChromiumBranches(Step):
MESSAGE = "Retrieve Chromium branch information."
def RunStep(self):
cwd = self._options.chromium
trunk_releases = filter(lambda r: r["branch"] == self.vc.CandidateBranch(),
self["releases"])
if not trunk_releases: # pragma: no cover
print "No trunk releases detected. Skipping chromium history."
return True
oldest_v8_rev = int(trunk_releases[-1]["revision"])
# Filter out irrelevant branches.
branches = filter(lambda r: re.match(r"branch-heads/\d+", r),
self.GitRemotes(cwd=cwd))
# Transform into pure branch numbers.
branches = map(lambda r: int(re.match(r"branch-heads/(\d+)", r).group(1)),
branches)
branches = sorted(branches, reverse=True)
cr_branches = []
try:
for branch in branches:
if not self.GitCheckoutFileSafe("DEPS",
"branch-heads/%d" % branch,
cwd=cwd):
break # pragma: no cover
deps = FileToText(os.path.join(cwd, "DEPS"))
match = DEPS_RE.search(deps)
if match:
v8_rev = ConvertToCommitNumber(self, match.group(1))
cr_branches.append([str(branch), v8_rev])
# Stop after reaching beyond the last v8 revision we want to update.
# We need a small buffer for possible revert/reland frenzies.
# TODO(machenbach): Subtraction is not git friendly.
if int(v8_rev) < oldest_v8_rev - 100:
break # pragma: no cover
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up.
self.GitCheckoutFileSafe("DEPS", "HEAD", cwd=cwd)
# Add the chromium branches to the v8 trunk releases.
all_ranges = BuildRevisionRanges(cr_branches)
trunk_dict = dict((r["revision"], r) for r in trunk_releases)
for revision, ranges in all_ranges.iteritems():
trunk_dict.get(revision, {})["chromium_branch"] = ranges
class CleanUp(Step):
| MESSAGE = "Clean up."
def RunStep(self):
self.GitCheckout("master", cwd=self._options.chromium)
self.GitDeleteBranch(self.Config("BRANCHNAME"), cwd=self._options.chromium)
self.CommonCleanup() | identifier_body |
|
releases.py | def GetBleedingEdgeGitFromPush(self, title):
return MatchSafe(PUSH_MSG_GIT_RE.match(title))
def GetMergedPatches(self, body):
patches = MatchSafe(MERGE_MESSAGE_RE.search(body))
if not patches:
patches = MatchSafe(ROLLBACK_MESSAGE_RE.search(body))
if patches:
# Indicate reverted patches with a "-".
patches = "-%s" % patches
return patches
def GetMergedPatchesGit(self, body):
patches = []
for line in body.splitlines():
patch = MatchSafe(MERGE_MESSAGE_GIT_RE.match(line))
if patch:
patches.append(patch)
patch = MatchSafe(ROLLBACK_MESSAGE_GIT_RE.match(line))
if patch:
patches.append("-%s" % patch)
return ", ".join(patches)
def GetReleaseDict(
self, git_hash, bleeding_edge_rev, bleeding_edge_git, branch, version,
patches, cl_body):
revision = self.vc.GitSvn(git_hash)
return {
# The SVN revision on the branch.
"revision": revision,
# The git revision on the branch.
"revision_git": git_hash,
# The SVN revision on bleeding edge (only for newer trunk pushes).
"bleeding_edge": bleeding_edge_rev,
# The same for git.
"bleeding_edge_git": bleeding_edge_git,
# The branch name.
"branch": branch,
# The version for displaying in the form 3.26.3 or 3.26.3.12.
"version": version,
# The date of the commit.
"date": self.GitLog(n=1, format="%ci", git_hash=git_hash),
# Merged patches if available in the form 'r1234, r2345'.
"patches_merged": patches,
# Default for easier output formatting.
"chromium_revision": "",
# Default for easier output formatting.
"chromium_branch": "",
# Link to the CL on code review. Trunk pushes are not uploaded, so this
# field will be populated below with the recent roll CL link.
"review_link": MatchSafe(REVIEW_LINK_RE.search(cl_body)),
# Link to the commit message on google code.
"revision_link": ("https://code.google.com/p/v8/source/detail?r=%s"
% revision),
}
def GetRelease(self, git_hash, branch):
self.ReadAndPersistVersion()
base_version = [self["major"], self["minor"], self["build"]]
version = ".".join(base_version)
body = self.GitLog(n=1, format="%B", git_hash=git_hash)
patches = ""
if self["patch"] != "0":
version += ".%s" % self["patch"]
if CHERRY_PICK_TITLE_GIT_RE.match(body.splitlines()[0]):
patches = self.GetMergedPatchesGit(body)
else:
patches = self.GetMergedPatches(body)
title = self.GitLog(n=1, format="%s", git_hash=git_hash)
bleeding_edge_revision = self.GetBleedingEdgeFromPush(title)
bleeding_edge_git = ""
if bleeding_edge_revision:
bleeding_edge_git = self.vc.SvnGit(bleeding_edge_revision,
self.vc.RemoteMasterBranch())
else:
bleeding_edge_git = self.GetBleedingEdgeGitFromPush(title)
return self.GetReleaseDict(
git_hash, bleeding_edge_revision, bleeding_edge_git, branch, version,
patches, body), self["patch"]
def GetReleasesFromMaster(self):
tag_text = self.SVN("log https://v8.googlecode.com/svn/tags -v --limit 20")
releases = []
for (tag, revision) in re.findall(BLEEDING_EDGE_TAGS_RE, tag_text):
git_hash = self.vc.SvnGit(revision)
# Add bleeding edge release. It does not contain patches or a code
# review link, as tags are not uploaded.
releases.append(self.GetReleaseDict(
git_hash, revision, git_hash, self.vc.MasterBranch(), tag, "", ""))
return releases
def GetReleasesFromBranch(self, branch):
self.GitReset(self.vc.RemoteBranch(branch))
if branch == self.vc.MasterBranch():
return self.GetReleasesFromMaster()
releases = []
try:
for git_hash in self.GitLog(format="%H").splitlines():
if VERSION_FILE not in self.GitChangedFiles(git_hash):
continue
if self.ExceedsMax(releases):
break # pragma: no cover
if not self.GitCheckoutFileSafe(VERSION_FILE, git_hash):
break # pragma: no cover
release, patch_level = self.GetRelease(git_hash, branch)
releases.append(release)
# Follow branches only until their creation point.
# TODO(machenbach): This omits patches if the version file wasn't
# manipulated correctly. Find a better way to detect the point where
# the parent of the branch head leads to the trunk branch.
if branch != self.vc.CandidateBranch() and patch_level == "0":
break
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up checked-out version file.
self.GitCheckoutFileSafe(VERSION_FILE, "HEAD")
return releases
def RunStep(self):
self.GitCreateBranch(self._config["BRANCHNAME"])
branches = self.vc.GetBranches()
releases = []
if self._options.branch == 'recent':
# Get only recent development on trunk, beta and stable.
if self._options.max_releases == 0: # pragma: no cover
self._options.max_releases = 10
beta, stable = SortBranches(branches)[0:2]
releases += self.GetReleasesFromBranch(stable)
releases += self.GetReleasesFromBranch(beta)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
elif self._options.branch == 'all': # pragma: no cover
# Retrieve the full release history.
for branch in branches:
releases += self.GetReleasesFromBranch(branch)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
else: # pragma: no cover
# Retrieve history for a specified branch.
assert self._options.branch in (branches +
[self.vc.CandidateBranch(), self.vc.MasterBranch()])
releases += self.GetReleasesFromBranch(self._options.branch)
self["releases"] = sorted(releases,
key=lambda r: SortingKey(r["version"]),
reverse=True)
class SwitchChromium(Step):
MESSAGE = "Switch to Chromium checkout."
def RunStep(self):
cwd = self._options.chromium
# Check for a clean workdir.
if not self.GitIsWorkdirClean(cwd=cwd): # pragma: no cover
self.Die("Workspace is not clean. Please commit or undo your changes.")
# Assert that the DEPS file is there.
if not os.path.exists(os.path.join(cwd, "DEPS")): # pragma: no cover
self.Die("DEPS file not present.")
class UpdateChromiumCheckout(Step):
MESSAGE = "Update the checkout and create a new branch."
def RunStep(self):
cwd = self._options.chromium
self.GitCheckout("master", cwd=cwd)
self.GitPull(cwd=cwd)
self.GitCreateBranch(self.Config("BRANCHNAME"), cwd=cwd)
def ConvertToCommitNumber(step, revision):
# Simple check for git hashes.
if revision.isdigit() and len(revision) < 8:
return revision
return step.GitConvertToSVNRevision(
revision, cwd=os.path.join(step._options.chromium, "v8"))
class RetrieveChromiumV8Releases(Step):
MESSAGE = "Retrieve V8 releases from Chromium DEPS."
def RunStep(self):
cwd = self._options.chromium
releases = filter(
lambda r: r["branch"] in [self.vc.CandidateBranch(),
self.vc.MasterBranch()],
self["releases"])
if not releases: # pragma: no cover
print "No releases detected. Skipping chromium history."
return True
# Update v8 checkout in chromium.
self.GitFetchOrigin(cwd=os.path.join(cwd, "v8"))
oldest_v8_rev = int(releases[-1]["revision"])
cr_releases = []
try:
for git_hash in self.GitLog(
format="%H", grep="V8", cwd=cwd).splitlines():
if "DEPS" not in self.GitChangedFiles(git_hash, cwd=cwd):
continue
if not self.GitCheckoutFileSafe("DEPS", git_hash, cwd=cwd):
break # pragma: no cover
deps = FileToText(os.path.join(cwd, "DEPS"))
match = DEPS_RE.search(deps)
if match: | cr_rev = self.GetCommitPositionNumber(git_hash, cwd=cwd) | random_line_split |
|
releases.py | )
return result
def BuildRevisionRanges(cr_releases):
"""Returns a mapping of v8 revision -> chromium ranges.
The ranges are comma-separated, each range has the form R1:R2. The newest
entry is the only one of the form R1, as there is no end range.
cr_releases is a list of [cr_rev, v8_rev] reverse-sorted by cr_rev.
cr_rev either refers to a chromium svn revision or a chromium branch number.
"""
range_lists = {}
cr_releases = FilterDuplicatesAndReverse(cr_releases)
# Visit pairs of cr releases from oldest to newest.
for cr_from, cr_to in itertools.izip(
cr_releases, itertools.islice(cr_releases, 1, None)):
# Assume the chromium revisions are all different.
assert cr_from[0] != cr_to[0]
# TODO(machenbach): Subtraction is not git friendly.
ran = "%s:%d" % (cr_from[0], int(cr_to[0]) - 1)
# Collect the ranges in lists per revision.
range_lists.setdefault(cr_from[1], []).append(ran)
# Add the newest revision.
if cr_releases:
range_lists.setdefault(cr_releases[-1][1], []).append(cr_releases[-1][0])
# Stringify and comma-separate the range lists.
return dict((rev, ", ".join(ran)) for rev, ran in range_lists.iteritems())
def MatchSafe(match):
if match:
return match.group(1)
else:
return ""
class Preparation(Step):
MESSAGE = "Preparation."
def RunStep(self):
self.CommonPrepare()
self.PrepareBranch()
class RetrieveV8Releases(Step):
MESSAGE = "Retrieve all V8 releases."
def ExceedsMax(self, releases):
return (self._options.max_releases > 0
and len(releases) > self._options.max_releases)
def GetBleedingEdgeFromPush(self, title):
return MatchSafe(PUSH_MSG_SVN_RE.match(title))
def GetBleedingEdgeGitFromPush(self, title):
return MatchSafe(PUSH_MSG_GIT_RE.match(title))
def GetMergedPatches(self, body):
patches = MatchSafe(MERGE_MESSAGE_RE.search(body))
if not patches:
patches = MatchSafe(ROLLBACK_MESSAGE_RE.search(body))
if patches:
# Indicate reverted patches with a "-".
patches = "-%s" % patches
return patches
def GetMergedPatchesGit(self, body):
patches = []
for line in body.splitlines():
patch = MatchSafe(MERGE_MESSAGE_GIT_RE.match(line))
if patch:
patches.append(patch)
patch = MatchSafe(ROLLBACK_MESSAGE_GIT_RE.match(line))
if patch:
patches.append("-%s" % patch)
return ", ".join(patches)
def GetReleaseDict(
self, git_hash, bleeding_edge_rev, bleeding_edge_git, branch, version,
patches, cl_body):
revision = self.vc.GitSvn(git_hash)
return {
# The SVN revision on the branch.
"revision": revision,
# The git revision on the branch.
"revision_git": git_hash,
# The SVN revision on bleeding edge (only for newer trunk pushes).
"bleeding_edge": bleeding_edge_rev,
# The same for git.
"bleeding_edge_git": bleeding_edge_git,
# The branch name.
"branch": branch,
# The version for displaying in the form 3.26.3 or 3.26.3.12.
"version": version,
# The date of the commit.
"date": self.GitLog(n=1, format="%ci", git_hash=git_hash),
# Merged patches if available in the form 'r1234, r2345'.
"patches_merged": patches,
# Default for easier output formatting.
"chromium_revision": "",
# Default for easier output formatting.
"chromium_branch": "",
# Link to the CL on code review. Trunk pushes are not uploaded, so this
# field will be populated below with the recent roll CL link.
"review_link": MatchSafe(REVIEW_LINK_RE.search(cl_body)),
# Link to the commit message on google code.
"revision_link": ("https://code.google.com/p/v8/source/detail?r=%s"
% revision),
}
def GetRelease(self, git_hash, branch):
self.ReadAndPersistVersion()
base_version = [self["major"], self["minor"], self["build"]]
version = ".".join(base_version)
body = self.GitLog(n=1, format="%B", git_hash=git_hash)
patches = ""
if self["patch"] != "0":
version += ".%s" % self["patch"]
if CHERRY_PICK_TITLE_GIT_RE.match(body.splitlines()[0]):
|
else:
patches = self.GetMergedPatches(body)
title = self.GitLog(n=1, format="%s", git_hash=git_hash)
bleeding_edge_revision = self.GetBleedingEdgeFromPush(title)
bleeding_edge_git = ""
if bleeding_edge_revision:
bleeding_edge_git = self.vc.SvnGit(bleeding_edge_revision,
self.vc.RemoteMasterBranch())
else:
bleeding_edge_git = self.GetBleedingEdgeGitFromPush(title)
return self.GetReleaseDict(
git_hash, bleeding_edge_revision, bleeding_edge_git, branch, version,
patches, body), self["patch"]
def GetReleasesFromMaster(self):
tag_text = self.SVN("log https://v8.googlecode.com/svn/tags -v --limit 20")
releases = []
for (tag, revision) in re.findall(BLEEDING_EDGE_TAGS_RE, tag_text):
git_hash = self.vc.SvnGit(revision)
# Add bleeding edge release. It does not contain patches or a code
# review link, as tags are not uploaded.
releases.append(self.GetReleaseDict(
git_hash, revision, git_hash, self.vc.MasterBranch(), tag, "", ""))
return releases
def GetReleasesFromBranch(self, branch):
self.GitReset(self.vc.RemoteBranch(branch))
if branch == self.vc.MasterBranch():
return self.GetReleasesFromMaster()
releases = []
try:
for git_hash in self.GitLog(format="%H").splitlines():
if VERSION_FILE not in self.GitChangedFiles(git_hash):
continue
if self.ExceedsMax(releases):
break # pragma: no cover
if not self.GitCheckoutFileSafe(VERSION_FILE, git_hash):
break # pragma: no cover
release, patch_level = self.GetRelease(git_hash, branch)
releases.append(release)
# Follow branches only until their creation point.
# TODO(machenbach): This omits patches if the version file wasn't
# manipulated correctly. Find a better way to detect the point where
# the parent of the branch head leads to the trunk branch.
if branch != self.vc.CandidateBranch() and patch_level == "0":
break
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up checked-out version file.
self.GitCheckoutFileSafe(VERSION_FILE, "HEAD")
return releases
def RunStep(self):
self.GitCreateBranch(self._config["BRANCHNAME"])
branches = self.vc.GetBranches()
releases = []
if self._options.branch == 'recent':
# Get only recent development on trunk, beta and stable.
if self._options.max_releases == 0: # pragma: no cover
self._options.max_releases = 10
beta, stable = SortBranches(branches)[0:2]
releases += self.GetReleasesFromBranch(stable)
releases += self.GetReleasesFromBranch(beta)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
elif self._options.branch == 'all': # pragma: no cover
# Retrieve the full release history.
for branch in branches:
releases += self.GetReleasesFromBranch(branch)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
else: # pragma: no cover
# Retrieve history for a specified branch.
assert self._options.branch in (branches +
[self.vc.CandidateBranch(), self.vc.MasterBranch()])
releases += self.GetReleasesFromBranch(self._options.branch)
self["releases"] = sorted(releases,
key=lambda r: SortingKey(r["version"]),
reverse=True)
class SwitchChromium(Step):
MESSAGE = "Switch to Chromium checkout."
def RunStep(self):
cwd = self._options.chromium
# Check for a clean workdir.
if not self.GitIsWorkdirClean(cwd=cwd): # pragma: no cover
self.Die("Workspace is not clean. Please commit or undo your changes.")
# Assert that the DE | patches = self.GetMergedPatchesGit(body) | conditional_block |
releases.py | limit 20")
releases = []
for (tag, revision) in re.findall(BLEEDING_EDGE_TAGS_RE, tag_text):
git_hash = self.vc.SvnGit(revision)
# Add bleeding edge release. It does not contain patches or a code
# review link, as tags are not uploaded.
releases.append(self.GetReleaseDict(
git_hash, revision, git_hash, self.vc.MasterBranch(), tag, "", ""))
return releases
def GetReleasesFromBranch(self, branch):
self.GitReset(self.vc.RemoteBranch(branch))
if branch == self.vc.MasterBranch():
return self.GetReleasesFromMaster()
releases = []
try:
for git_hash in self.GitLog(format="%H").splitlines():
if VERSION_FILE not in self.GitChangedFiles(git_hash):
continue
if self.ExceedsMax(releases):
break # pragma: no cover
if not self.GitCheckoutFileSafe(VERSION_FILE, git_hash):
break # pragma: no cover
release, patch_level = self.GetRelease(git_hash, branch)
releases.append(release)
# Follow branches only until their creation point.
# TODO(machenbach): This omits patches if the version file wasn't
# manipulated correctly. Find a better way to detect the point where
# the parent of the branch head leads to the trunk branch.
if branch != self.vc.CandidateBranch() and patch_level == "0":
break
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up checked-out version file.
self.GitCheckoutFileSafe(VERSION_FILE, "HEAD")
return releases
def RunStep(self):
self.GitCreateBranch(self._config["BRANCHNAME"])
branches = self.vc.GetBranches()
releases = []
if self._options.branch == 'recent':
# Get only recent development on trunk, beta and stable.
if self._options.max_releases == 0: # pragma: no cover
self._options.max_releases = 10
beta, stable = SortBranches(branches)[0:2]
releases += self.GetReleasesFromBranch(stable)
releases += self.GetReleasesFromBranch(beta)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
elif self._options.branch == 'all': # pragma: no cover
# Retrieve the full release history.
for branch in branches:
releases += self.GetReleasesFromBranch(branch)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
else: # pragma: no cover
# Retrieve history for a specified branch.
assert self._options.branch in (branches +
[self.vc.CandidateBranch(), self.vc.MasterBranch()])
releases += self.GetReleasesFromBranch(self._options.branch)
self["releases"] = sorted(releases,
key=lambda r: SortingKey(r["version"]),
reverse=True)
class SwitchChromium(Step):
MESSAGE = "Switch to Chromium checkout."
def RunStep(self):
cwd = self._options.chromium
# Check for a clean workdir.
if not self.GitIsWorkdirClean(cwd=cwd): # pragma: no cover
self.Die("Workspace is not clean. Please commit or undo your changes.")
# Assert that the DEPS file is there.
if not os.path.exists(os.path.join(cwd, "DEPS")): # pragma: no cover
self.Die("DEPS file not present.")
class UpdateChromiumCheckout(Step):
MESSAGE = "Update the checkout and create a new branch."
def RunStep(self):
cwd = self._options.chromium
self.GitCheckout("master", cwd=cwd)
self.GitPull(cwd=cwd)
self.GitCreateBranch(self.Config("BRANCHNAME"), cwd=cwd)
def ConvertToCommitNumber(step, revision):
# Simple check for git hashes.
if revision.isdigit() and len(revision) < 8:
return revision
return step.GitConvertToSVNRevision(
revision, cwd=os.path.join(step._options.chromium, "v8"))
class RetrieveChromiumV8Releases(Step):
MESSAGE = "Retrieve V8 releases from Chromium DEPS."
def RunStep(self):
cwd = self._options.chromium
releases = filter(
lambda r: r["branch"] in [self.vc.CandidateBranch(),
self.vc.MasterBranch()],
self["releases"])
if not releases: # pragma: no cover
print "No releases detected. Skipping chromium history."
return True
# Update v8 checkout in chromium.
self.GitFetchOrigin(cwd=os.path.join(cwd, "v8"))
oldest_v8_rev = int(releases[-1]["revision"])
cr_releases = []
try:
for git_hash in self.GitLog(
format="%H", grep="V8", cwd=cwd).splitlines():
if "DEPS" not in self.GitChangedFiles(git_hash, cwd=cwd):
continue
if not self.GitCheckoutFileSafe("DEPS", git_hash, cwd=cwd):
break # pragma: no cover
deps = FileToText(os.path.join(cwd, "DEPS"))
match = DEPS_RE.search(deps)
if match:
cr_rev = self.GetCommitPositionNumber(git_hash, cwd=cwd)
if cr_rev:
v8_rev = ConvertToCommitNumber(self, match.group(1))
cr_releases.append([cr_rev, v8_rev])
# Stop after reaching beyond the last v8 revision we want to update.
# We need a small buffer for possible revert/reland frenzies.
# TODO(machenbach): Subtraction is not git friendly.
if int(v8_rev) < oldest_v8_rev - 100:
break # pragma: no cover
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up.
self.GitCheckoutFileSafe("DEPS", "HEAD", cwd=cwd)
# Add the chromium ranges to the v8 trunk and bleeding_edge releases.
all_ranges = BuildRevisionRanges(cr_releases)
releases_dict = dict((r["revision"], r) for r in releases)
for revision, ranges in all_ranges.iteritems():
releases_dict.get(revision, {})["chromium_revision"] = ranges
# TODO(machenbach): Unify common code with method above.
class RietrieveChromiumBranches(Step):
MESSAGE = "Retrieve Chromium branch information."
def RunStep(self):
cwd = self._options.chromium
trunk_releases = filter(lambda r: r["branch"] == self.vc.CandidateBranch(),
self["releases"])
if not trunk_releases: # pragma: no cover
print "No trunk releases detected. Skipping chromium history."
return True
oldest_v8_rev = int(trunk_releases[-1]["revision"])
# Filter out irrelevant branches.
branches = filter(lambda r: re.match(r"branch-heads/\d+", r),
self.GitRemotes(cwd=cwd))
# Transform into pure branch numbers.
branches = map(lambda r: int(re.match(r"branch-heads/(\d+)", r).group(1)),
branches)
branches = sorted(branches, reverse=True)
cr_branches = []
try:
for branch in branches:
if not self.GitCheckoutFileSafe("DEPS",
"branch-heads/%d" % branch,
cwd=cwd):
break # pragma: no cover
deps = FileToText(os.path.join(cwd, "DEPS"))
match = DEPS_RE.search(deps)
if match:
v8_rev = ConvertToCommitNumber(self, match.group(1))
cr_branches.append([str(branch), v8_rev])
# Stop after reaching beyond the last v8 revision we want to update.
# We need a small buffer for possible revert/reland frenzies.
# TODO(machenbach): Subtraction is not git friendly.
if int(v8_rev) < oldest_v8_rev - 100:
break # pragma: no cover
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up.
self.GitCheckoutFileSafe("DEPS", "HEAD", cwd=cwd)
# Add the chromium branches to the v8 trunk releases.
all_ranges = BuildRevisionRanges(cr_branches)
trunk_dict = dict((r["revision"], r) for r in trunk_releases)
for revision, ranges in all_ranges.iteritems():
trunk_dict.get(revision, {})["chromium_branch"] = ranges
class CleanUp(Step):
MESSAGE = "Clean up."
def RunStep(self):
self.GitCheckout("master", cwd=self._options.chromium)
self.GitDeleteBranch(self.Config("BRANCHNAME"), cwd=self._options.chromium)
self.CommonCleanup()
class WriteOutput(Step):
MESSAGE = "Print output."
def | Run | identifier_name |
|
search.js | ) {
if (inSearch != true)
execSearch();
});
if(navigator.userAgent.match(/Windows Phone/i)){
$("#backButton").hide();
}
// Handle differences of Windows Phone hardware (back button)
// if (window.WinJS && window.WinJS.Application) {
// window.WinJS.Application.onbackclick = function () {
// if (window.location.hash === '#/') {
// return false;
// }
// window.history.back();
// return true;
// };
// }
});
function execSearch()
{
inSearch = true;
$.mobile.loading("show", {
text: "Loading...",
textVisible: true,
textonly: true,
theme: "b"
});
lastSearchTerm = $("#searchbar").val();
// Append ~ to each word for fuzzy search
if (lastSearchTerm == '')
lastSearchTerm = '*';
var fuzzyQ = "";
if (lastSearchTerm.length > 2) {
var res = lastSearchTerm.split(" ");
for (var item in res) {
fuzzyQ += res[item] + '~ ';
}
} else
fuzzyQ = lastSearchTerm;
// Get Facet Query
RefreshFacets();
// Extract parameters from URL
var searchAPI = "https://azs-playground.search.windows.net/indexes/historicsites/docs?api-version=2015-02-28-Preview&$select=NRIS_Refnum,RESNAME,ResType,City,State,ImageCount&facet=ResType,sort:value&facet=State,sort:value&$top=10&scoringProfile=default&queryType=full&search=" + fuzzyQ + filterQuery;
$.ajax({
url: searchAPI,
beforeSend: function (request) {
request.setRequestHeader("api-key", apikey);
request.setRequestHeader("Content-Type", "application/json");
request.setRequestHeader("Accept", "application/json; odata.metadata=none");
},
type: "GET",
success: function (data) {
$("#searchResults").html(''); | var thumbNail = 'https://azsplayground.blob.core.windows.net/historicsites/img/nrhp_thumbnail.png';
if (data.value[item]["ImageCount"] > 0)
thumbNail = 'https://azsplayground.blob.core.windows.net/historicsites/img/' + data.value[item]["NRIS_Refnum"] + '_1.jpeg';
htmlString = '<li id="' + data.value[item]["NRIS_Refnum"] + '"><a><img src="' + thumbNail + '" style="padding-top:15px;padding-left:20px;margin-right:60px;">';
htmlString += '<div style="padding-left: 15px;"><font style="white-space:normal; font-size: small;" >' + data.value[item]["RESNAME"];
htmlString += '<br>';
htmlString += data.value[item]["City"] + ', ';
htmlString += data.value[item]["State"];
htmlString += '<br>';
htmlString += data.value[item]["ResType"];
htmlString += '</font></div></a></li>';
$("#searchResults").append(htmlString);
}
$('#searchResults').delegate('li', 'click', function () {
changePage('#pageDetails', false, $(this).attr('id'));
});
$("#searchResults").listview("refresh");
facets = data["@search.facets"];
updateFacets();
if ((lastSearchTerm != $("#searchbar").val()) && (lastSearchTerm != '*'))
{
lastSearchTerm = $("#searchbar").val();
execSearch(lastSearchTerm);
} else {
$.mobile.loading("hide");
inSearch = false;
}
}
});
}
function execLookup(q) {
// Do a lookup on a specific item to get details
var searchAPI = "https://azs-playground.search.windows.net/indexes/historicsites/docs/" + q +"?api-version=2015-02-28&$select=RESNAME,ResType,Address,City,County,State,NumCBldg,NumCSite,NumCStru,CertifiedDate,Edited,ImageCount";
$.ajax({
url: searchAPI,
beforeSend: function (request) {
request.setRequestHeader("api-key", apikey);
request.setRequestHeader("Content-Type", "application/json");
request.setRequestHeader("Accept", "application/json; odata.metadata=none");
},
type: "GET",
success: function (data) {
$("#detailsResNameAndType").html(data["RESNAME"] + " (" + data["ResType"] + ")");
$("#detailsAddress").html('<label>Address: ' + data["Address"] + ", " + data["City"] + ", " + data["State"] + '</label>');
$("#detailsCertified").html("Certified: " + data["CertifiedDate"].substring(0, 10));
$("#detailsLastEdited").html("Last Edited: " + data["Edited"].substring(0, 10));
var pdfLoc = "http://focus.nps.gov/pdfhost/docs/nrhp/text/" + q + ".pdf";
$("#detailsLastEdited").html("<a href='" + pdfLoc + "'>Learn more...</a>");
$("#detailsImages").html("");
for (var i = 1; i <= data["ImageCount"]; i++)
{
$("#detailsImages").append("<img style='width: 100%;height: auto;max-width: 100%;' src='https://azsplayground.blob.core.windows.net/historicsites/img/" + q + "_" + i + ".jpeg'>");
}
}
});
}
function changePage(page, reverse, nrisRefnum)
{
$.mobile.changePage(page, { transition: 'slide', reverse: reverse });
if (page == "#pageDetails") {
execLookup(nrisRefnum);
}
}
function getStaticHTML(html)
{
if(Object.hasOwnProperty.call(window, "ActiveXObject")){ //using IE
return window.toStaticHTML(html);
} else {
return html;
}
}
function checkIfSelected(facet, value)
{
for (var filter in filterArray) {
if ((filterArray[filter].facet == facet) &&
(filterArray[filter].value == value))
return 'checked';
}
return '';
}
function updateFacets()
{
$("#FilterResType").html('');
$("#FilterResType").append('<legend>Type:</legend>').trigger('create');;
for (var facet in facets.ResType)
{
// Check if this is a selected facet
var checked = checkIfSelected('ResType', facets.ResType[facet].value);
var html = getStaticHTML('<label id="' + facets.ResType[facet].value + '"><input type="checkbox" value = "' + facets.ResType[facet].value + '" ' + checked + ' id="chk' + facets.ResType[facet].value + '">' + facets.ResType[facet].value + ' (' + facets.ResType[facet].count + ')</label>');
$("#FilterResType").append(html).trigger('create');
$('#chk' + facets.ResType[facet].value).change(function () {
RefreshfilterArray('ResType', this.value, this.checked);
});
}
$("#FilterState").html('');
$("#FilterState").append('<legend>State:</legend>').trigger('create');;
for (var facet in facets.State) {
var checked = checkIfSelected('State', facets.State[facet].value);
var html = getStaticHTML('<label id="' + facets.State[facet].value + '"><input type="checkbox" value = "' + facets.State[facet].value + '" ' + checked + ' id="chk' + facets.State[facet].value + '">' + facets.State[facet].value + ' (' + facets.State[facet].count + ')</label>');
$("#FilterState").append(html).trigger('create');
$('#chk' + facets.State[facet].value).change(function () {
RefreshfilterArray('State', this.value, this.checked);
});
}
}
function RefreshfilterArray(facet, value, checked)
{
if (checked) {
filterArray.push({ facet: facet, value: value });
if (inSearch != true)
execSearch();
} else
{
filterArray.forEach(function (result, index) {
if ((result.facet == facet) && (result.value == value) ){
//Remove from array
filterArray.splice(index, 1);
if (inSearch != true)
execSearch();
}
});
}
}
function RefreshFacets()
{
var lastFacet = '';
filterQuery = '';
if (filterArray.length > 0 ){
filterQuery = '&$filter=';
filterArray = filterArray.sort(compare);
for (var filter in filterArray) {
if (filterArray[filter].facet != lastFacet)
filterQuery += "(";
else
filterQuery = filterQuery.substring(0, filterQuery.length-6) + " or ";
filterQuery += filterArray[filter].facet + " eq '" + filterArray[filter].value + "') and ";
lastFacet = filterArray[filter].facet;
}
filterQuery = | for (var item in data.value)
{ | random_line_split |
search.js | ) {
if (inSearch != true)
execSearch();
});
if(navigator.userAgent.match(/Windows Phone/i)){
$("#backButton").hide();
}
// Handle differences of Windows Phone hardware (back button)
// if (window.WinJS && window.WinJS.Application) {
// window.WinJS.Application.onbackclick = function () {
// if (window.location.hash === '#/') {
// return false;
// }
// window.history.back();
// return true;
// };
// }
});
function execSearch()
{
inSearch = true;
$.mobile.loading("show", {
text: "Loading...",
textVisible: true,
textonly: true,
theme: "b"
});
lastSearchTerm = $("#searchbar").val();
// Append ~ to each word for fuzzy search
if (lastSearchTerm == '')
lastSearchTerm = '*';
var fuzzyQ = "";
if (lastSearchTerm.length > 2) {
var res = lastSearchTerm.split(" ");
for (var item in res) {
fuzzyQ += res[item] + '~ ';
}
} else
fuzzyQ = lastSearchTerm;
// Get Facet Query
RefreshFacets();
// Extract parameters from URL
var searchAPI = "https://azs-playground.search.windows.net/indexes/historicsites/docs?api-version=2015-02-28-Preview&$select=NRIS_Refnum,RESNAME,ResType,City,State,ImageCount&facet=ResType,sort:value&facet=State,sort:value&$top=10&scoringProfile=default&queryType=full&search=" + fuzzyQ + filterQuery;
$.ajax({
url: searchAPI,
beforeSend: function (request) {
request.setRequestHeader("api-key", apikey);
request.setRequestHeader("Content-Type", "application/json");
request.setRequestHeader("Accept", "application/json; odata.metadata=none");
},
type: "GET",
success: function (data) {
$("#searchResults").html('');
for (var item in data.value)
{
var thumbNail = 'https://azsplayground.blob.core.windows.net/historicsites/img/nrhp_thumbnail.png';
if (data.value[item]["ImageCount"] > 0)
thumbNail = 'https://azsplayground.blob.core.windows.net/historicsites/img/' + data.value[item]["NRIS_Refnum"] + '_1.jpeg';
htmlString = '<li id="' + data.value[item]["NRIS_Refnum"] + '"><a><img src="' + thumbNail + '" style="padding-top:15px;padding-left:20px;margin-right:60px;">';
htmlString += '<div style="padding-left: 15px;"><font style="white-space:normal; font-size: small;" >' + data.value[item]["RESNAME"];
htmlString += '<br>';
htmlString += data.value[item]["City"] + ', ';
htmlString += data.value[item]["State"];
htmlString += '<br>';
htmlString += data.value[item]["ResType"];
htmlString += '</font></div></a></li>';
$("#searchResults").append(htmlString);
}
$('#searchResults').delegate('li', 'click', function () {
changePage('#pageDetails', false, $(this).attr('id'));
});
$("#searchResults").listview("refresh");
facets = data["@search.facets"];
updateFacets();
if ((lastSearchTerm != $("#searchbar").val()) && (lastSearchTerm != '*'))
{
lastSearchTerm = $("#searchbar").val();
execSearch(lastSearchTerm);
} else {
$.mobile.loading("hide");
inSearch = false;
}
}
});
}
function execLookup(q) {
// Do a lookup on a specific item to get details
var searchAPI = "https://azs-playground.search.windows.net/indexes/historicsites/docs/" + q +"?api-version=2015-02-28&$select=RESNAME,ResType,Address,City,County,State,NumCBldg,NumCSite,NumCStru,CertifiedDate,Edited,ImageCount";
$.ajax({
url: searchAPI,
beforeSend: function (request) {
request.setRequestHeader("api-key", apikey);
request.setRequestHeader("Content-Type", "application/json");
request.setRequestHeader("Accept", "application/json; odata.metadata=none");
},
type: "GET",
success: function (data) {
$("#detailsResNameAndType").html(data["RESNAME"] + " (" + data["ResType"] + ")");
$("#detailsAddress").html('<label>Address: ' + data["Address"] + ", " + data["City"] + ", " + data["State"] + '</label>');
$("#detailsCertified").html("Certified: " + data["CertifiedDate"].substring(0, 10));
$("#detailsLastEdited").html("Last Edited: " + data["Edited"].substring(0, 10));
var pdfLoc = "http://focus.nps.gov/pdfhost/docs/nrhp/text/" + q + ".pdf";
$("#detailsLastEdited").html("<a href='" + pdfLoc + "'>Learn more...</a>");
$("#detailsImages").html("");
for (var i = 1; i <= data["ImageCount"]; i++)
{
$("#detailsImages").append("<img style='width: 100%;height: auto;max-width: 100%;' src='https://azsplayground.blob.core.windows.net/historicsites/img/" + q + "_" + i + ".jpeg'>");
}
}
});
}
function changePage(page, reverse, nrisRefnum)
{
$.mobile.changePage(page, { transition: 'slide', reverse: reverse });
if (page == "#pageDetails") {
execLookup(nrisRefnum);
}
}
function getStaticHTML(html)
{
if(Object.hasOwnProperty.call(window, "ActiveXObject")){ //using IE
return window.toStaticHTML(html);
} else {
return html;
}
}
function checkIfSelected(facet, value)
|
function updateFacets()
{
$("#FilterResType").html('');
$("#FilterResType").append('<legend>Type:</legend>').trigger('create');;
for (var facet in facets.ResType)
{
// Check if this is a selected facet
var checked = checkIfSelected('ResType', facets.ResType[facet].value);
var html = getStaticHTML('<label id="' + facets.ResType[facet].value + '"><input type="checkbox" value = "' + facets.ResType[facet].value + '" ' + checked + ' id="chk' + facets.ResType[facet].value + '">' + facets.ResType[facet].value + ' (' + facets.ResType[facet].count + ')</label>');
$("#FilterResType").append(html).trigger('create');
$('#chk' + facets.ResType[facet].value).change(function () {
RefreshfilterArray('ResType', this.value, this.checked);
});
}
$("#FilterState").html('');
$("#FilterState").append('<legend>State:</legend>').trigger('create');;
for (var facet in facets.State) {
var checked = checkIfSelected('State', facets.State[facet].value);
var html = getStaticHTML('<label id="' + facets.State[facet].value + '"><input type="checkbox" value = "' + facets.State[facet].value + '" ' + checked + ' id="chk' + facets.State[facet].value + '">' + facets.State[facet].value + ' (' + facets.State[facet].count + ')</label>');
$("#FilterState").append(html).trigger('create');
$('#chk' + facets.State[facet].value).change(function () {
RefreshfilterArray('State', this.value, this.checked);
});
}
}
function RefreshfilterArray(facet, value, checked)
{
if (checked) {
filterArray.push({ facet: facet, value: value });
if (inSearch != true)
execSearch();
} else
{
filterArray.forEach(function (result, index) {
if ((result.facet == facet) && (result.value == value) ){
//Remove from array
filterArray.splice(index, 1);
if (inSearch != true)
execSearch();
}
});
}
}
function RefreshFacets()
{
var lastFacet = '';
filterQuery = '';
if (filterArray.length > 0 ){
filterQuery = '&$filter=';
filterArray = filterArray.sort(compare);
for (var filter in filterArray) {
if (filterArray[filter].facet != lastFacet)
filterQuery += "(";
else
filterQuery = filterQuery.substring(0, filterQuery.length-6) + " or ";
filterQuery += filterArray[filter].facet + " eq '" + filterArray[filter].value + "') and ";
lastFacet = filterArray[filter].facet;
}
filterQuery = | {
for (var filter in filterArray) {
if ((filterArray[filter].facet == facet) &&
(filterArray[filter].value == value))
return 'checked';
}
return '';
} | identifier_body |
search.js | ) {
if (inSearch != true)
execSearch();
});
if(navigator.userAgent.match(/Windows Phone/i)){
$("#backButton").hide();
}
// Handle differences of Windows Phone hardware (back button)
// if (window.WinJS && window.WinJS.Application) {
// window.WinJS.Application.onbackclick = function () {
// if (window.location.hash === '#/') {
// return false;
// }
// window.history.back();
// return true;
// };
// }
});
function execSearch()
{
inSearch = true;
$.mobile.loading("show", {
text: "Loading...",
textVisible: true,
textonly: true,
theme: "b"
});
lastSearchTerm = $("#searchbar").val();
// Append ~ to each word for fuzzy search
if (lastSearchTerm == '')
lastSearchTerm = '*';
var fuzzyQ = "";
if (lastSearchTerm.length > 2) {
var res = lastSearchTerm.split(" ");
for (var item in res) {
fuzzyQ += res[item] + '~ ';
}
} else
fuzzyQ = lastSearchTerm;
// Get Facet Query
RefreshFacets();
// Extract parameters from URL
var searchAPI = "https://azs-playground.search.windows.net/indexes/historicsites/docs?api-version=2015-02-28-Preview&$select=NRIS_Refnum,RESNAME,ResType,City,State,ImageCount&facet=ResType,sort:value&facet=State,sort:value&$top=10&scoringProfile=default&queryType=full&search=" + fuzzyQ + filterQuery;
$.ajax({
url: searchAPI,
beforeSend: function (request) {
request.setRequestHeader("api-key", apikey);
request.setRequestHeader("Content-Type", "application/json");
request.setRequestHeader("Accept", "application/json; odata.metadata=none");
},
type: "GET",
success: function (data) {
$("#searchResults").html('');
for (var item in data.value)
{
var thumbNail = 'https://azsplayground.blob.core.windows.net/historicsites/img/nrhp_thumbnail.png';
if (data.value[item]["ImageCount"] > 0)
thumbNail = 'https://azsplayground.blob.core.windows.net/historicsites/img/' + data.value[item]["NRIS_Refnum"] + '_1.jpeg';
htmlString = '<li id="' + data.value[item]["NRIS_Refnum"] + '"><a><img src="' + thumbNail + '" style="padding-top:15px;padding-left:20px;margin-right:60px;">';
htmlString += '<div style="padding-left: 15px;"><font style="white-space:normal; font-size: small;" >' + data.value[item]["RESNAME"];
htmlString += '<br>';
htmlString += data.value[item]["City"] + ', ';
htmlString += data.value[item]["State"];
htmlString += '<br>';
htmlString += data.value[item]["ResType"];
htmlString += '</font></div></a></li>';
$("#searchResults").append(htmlString);
}
$('#searchResults').delegate('li', 'click', function () {
changePage('#pageDetails', false, $(this).attr('id'));
});
$("#searchResults").listview("refresh");
facets = data["@search.facets"];
updateFacets();
if ((lastSearchTerm != $("#searchbar").val()) && (lastSearchTerm != '*'))
{
lastSearchTerm = $("#searchbar").val();
execSearch(lastSearchTerm);
} else {
$.mobile.loading("hide");
inSearch = false;
}
}
});
}
function execLookup(q) {
// Do a lookup on a specific item to get details
var searchAPI = "https://azs-playground.search.windows.net/indexes/historicsites/docs/" + q +"?api-version=2015-02-28&$select=RESNAME,ResType,Address,City,County,State,NumCBldg,NumCSite,NumCStru,CertifiedDate,Edited,ImageCount";
$.ajax({
url: searchAPI,
beforeSend: function (request) {
request.setRequestHeader("api-key", apikey);
request.setRequestHeader("Content-Type", "application/json");
request.setRequestHeader("Accept", "application/json; odata.metadata=none");
},
type: "GET",
success: function (data) {
$("#detailsResNameAndType").html(data["RESNAME"] + " (" + data["ResType"] + ")");
$("#detailsAddress").html('<label>Address: ' + data["Address"] + ", " + data["City"] + ", " + data["State"] + '</label>');
$("#detailsCertified").html("Certified: " + data["CertifiedDate"].substring(0, 10));
$("#detailsLastEdited").html("Last Edited: " + data["Edited"].substring(0, 10));
var pdfLoc = "http://focus.nps.gov/pdfhost/docs/nrhp/text/" + q + ".pdf";
$("#detailsLastEdited").html("<a href='" + pdfLoc + "'>Learn more...</a>");
$("#detailsImages").html("");
for (var i = 1; i <= data["ImageCount"]; i++)
|
}
});
}
function changePage(page, reverse, nrisRefnum)
{
$.mobile.changePage(page, { transition: 'slide', reverse: reverse });
if (page == "#pageDetails") {
execLookup(nrisRefnum);
}
}
function getStaticHTML(html)
{
if(Object.hasOwnProperty.call(window, "ActiveXObject")){ //using IE
return window.toStaticHTML(html);
} else {
return html;
}
}
function checkIfSelected(facet, value)
{
for (var filter in filterArray) {
if ((filterArray[filter].facet == facet) &&
(filterArray[filter].value == value))
return 'checked';
}
return '';
}
function updateFacets()
{
$("#FilterResType").html('');
$("#FilterResType").append('<legend>Type:</legend>').trigger('create');;
for (var facet in facets.ResType)
{
// Check if this is a selected facet
var checked = checkIfSelected('ResType', facets.ResType[facet].value);
var html = getStaticHTML('<label id="' + facets.ResType[facet].value + '"><input type="checkbox" value = "' + facets.ResType[facet].value + '" ' + checked + ' id="chk' + facets.ResType[facet].value + '">' + facets.ResType[facet].value + ' (' + facets.ResType[facet].count + ')</label>');
$("#FilterResType").append(html).trigger('create');
$('#chk' + facets.ResType[facet].value).change(function () {
RefreshfilterArray('ResType', this.value, this.checked);
});
}
$("#FilterState").html('');
$("#FilterState").append('<legend>State:</legend>').trigger('create');;
for (var facet in facets.State) {
var checked = checkIfSelected('State', facets.State[facet].value);
var html = getStaticHTML('<label id="' + facets.State[facet].value + '"><input type="checkbox" value = "' + facets.State[facet].value + '" ' + checked + ' id="chk' + facets.State[facet].value + '">' + facets.State[facet].value + ' (' + facets.State[facet].count + ')</label>');
$("#FilterState").append(html).trigger('create');
$('#chk' + facets.State[facet].value).change(function () {
RefreshfilterArray('State', this.value, this.checked);
});
}
}
function RefreshfilterArray(facet, value, checked)
{
if (checked) {
filterArray.push({ facet: facet, value: value });
if (inSearch != true)
execSearch();
} else
{
filterArray.forEach(function (result, index) {
if ((result.facet == facet) && (result.value == value) ){
//Remove from array
filterArray.splice(index, 1);
if (inSearch != true)
execSearch();
}
});
}
}
function RefreshFacets()
{
var lastFacet = '';
filterQuery = '';
if (filterArray.length > 0 ){
filterQuery = '&$filter=';
filterArray = filterArray.sort(compare);
for (var filter in filterArray) {
if (filterArray[filter].facet != lastFacet)
filterQuery += "(";
else
filterQuery = filterQuery.substring(0, filterQuery.length-6) + " or ";
filterQuery += filterArray[filter].facet + " eq '" + filterArray[filter].value + "') and ";
lastFacet = filterArray[filter].facet;
}
filterQuery = | {
$("#detailsImages").append("<img style='width: 100%;height: auto;max-width: 100%;' src='https://azsplayground.blob.core.windows.net/historicsites/img/" + q + "_" + i + ".jpeg'>");
} | conditional_block |
search.js | ) {
if (inSearch != true)
execSearch();
});
if(navigator.userAgent.match(/Windows Phone/i)){
$("#backButton").hide();
}
// Handle differences of Windows Phone hardware (back button)
// if (window.WinJS && window.WinJS.Application) {
// window.WinJS.Application.onbackclick = function () {
// if (window.location.hash === '#/') {
// return false;
// }
// window.history.back();
// return true;
// };
// }
});
function | ()
{
inSearch = true;
$.mobile.loading("show", {
text: "Loading...",
textVisible: true,
textonly: true,
theme: "b"
});
lastSearchTerm = $("#searchbar").val();
// Append ~ to each word for fuzzy search
if (lastSearchTerm == '')
lastSearchTerm = '*';
var fuzzyQ = "";
if (lastSearchTerm.length > 2) {
var res = lastSearchTerm.split(" ");
for (var item in res) {
fuzzyQ += res[item] + '~ ';
}
} else
fuzzyQ = lastSearchTerm;
// Get Facet Query
RefreshFacets();
// Extract parameters from URL
var searchAPI = "https://azs-playground.search.windows.net/indexes/historicsites/docs?api-version=2015-02-28-Preview&$select=NRIS_Refnum,RESNAME,ResType,City,State,ImageCount&facet=ResType,sort:value&facet=State,sort:value&$top=10&scoringProfile=default&queryType=full&search=" + fuzzyQ + filterQuery;
$.ajax({
url: searchAPI,
beforeSend: function (request) {
request.setRequestHeader("api-key", apikey);
request.setRequestHeader("Content-Type", "application/json");
request.setRequestHeader("Accept", "application/json; odata.metadata=none");
},
type: "GET",
success: function (data) {
$("#searchResults").html('');
for (var item in data.value)
{
var thumbNail = 'https://azsplayground.blob.core.windows.net/historicsites/img/nrhp_thumbnail.png';
if (data.value[item]["ImageCount"] > 0)
thumbNail = 'https://azsplayground.blob.core.windows.net/historicsites/img/' + data.value[item]["NRIS_Refnum"] + '_1.jpeg';
htmlString = '<li id="' + data.value[item]["NRIS_Refnum"] + '"><a><img src="' + thumbNail + '" style="padding-top:15px;padding-left:20px;margin-right:60px;">';
htmlString += '<div style="padding-left: 15px;"><font style="white-space:normal; font-size: small;" >' + data.value[item]["RESNAME"];
htmlString += '<br>';
htmlString += data.value[item]["City"] + ', ';
htmlString += data.value[item]["State"];
htmlString += '<br>';
htmlString += data.value[item]["ResType"];
htmlString += '</font></div></a></li>';
$("#searchResults").append(htmlString);
}
$('#searchResults').delegate('li', 'click', function () {
changePage('#pageDetails', false, $(this).attr('id'));
});
$("#searchResults").listview("refresh");
facets = data["@search.facets"];
updateFacets();
if ((lastSearchTerm != $("#searchbar").val()) && (lastSearchTerm != '*'))
{
lastSearchTerm = $("#searchbar").val();
execSearch(lastSearchTerm);
} else {
$.mobile.loading("hide");
inSearch = false;
}
}
});
}
function execLookup(q) {
// Do a lookup on a specific item to get details
var searchAPI = "https://azs-playground.search.windows.net/indexes/historicsites/docs/" + q +"?api-version=2015-02-28&$select=RESNAME,ResType,Address,City,County,State,NumCBldg,NumCSite,NumCStru,CertifiedDate,Edited,ImageCount";
$.ajax({
url: searchAPI,
beforeSend: function (request) {
request.setRequestHeader("api-key", apikey);
request.setRequestHeader("Content-Type", "application/json");
request.setRequestHeader("Accept", "application/json; odata.metadata=none");
},
type: "GET",
success: function (data) {
$("#detailsResNameAndType").html(data["RESNAME"] + " (" + data["ResType"] + ")");
$("#detailsAddress").html('<label>Address: ' + data["Address"] + ", " + data["City"] + ", " + data["State"] + '</label>');
$("#detailsCertified").html("Certified: " + data["CertifiedDate"].substring(0, 10));
$("#detailsLastEdited").html("Last Edited: " + data["Edited"].substring(0, 10));
var pdfLoc = "http://focus.nps.gov/pdfhost/docs/nrhp/text/" + q + ".pdf";
$("#detailsLastEdited").html("<a href='" + pdfLoc + "'>Learn more...</a>");
$("#detailsImages").html("");
for (var i = 1; i <= data["ImageCount"]; i++)
{
$("#detailsImages").append("<img style='width: 100%;height: auto;max-width: 100%;' src='https://azsplayground.blob.core.windows.net/historicsites/img/" + q + "_" + i + ".jpeg'>");
}
}
});
}
function changePage(page, reverse, nrisRefnum)
{
$.mobile.changePage(page, { transition: 'slide', reverse: reverse });
if (page == "#pageDetails") {
execLookup(nrisRefnum);
}
}
function getStaticHTML(html)
{
if(Object.hasOwnProperty.call(window, "ActiveXObject")){ //using IE
return window.toStaticHTML(html);
} else {
return html;
}
}
function checkIfSelected(facet, value)
{
for (var filter in filterArray) {
if ((filterArray[filter].facet == facet) &&
(filterArray[filter].value == value))
return 'checked';
}
return '';
}
function updateFacets()
{
$("#FilterResType").html('');
$("#FilterResType").append('<legend>Type:</legend>').trigger('create');;
for (var facet in facets.ResType)
{
// Check if this is a selected facet
var checked = checkIfSelected('ResType', facets.ResType[facet].value);
var html = getStaticHTML('<label id="' + facets.ResType[facet].value + '"><input type="checkbox" value = "' + facets.ResType[facet].value + '" ' + checked + ' id="chk' + facets.ResType[facet].value + '">' + facets.ResType[facet].value + ' (' + facets.ResType[facet].count + ')</label>');
$("#FilterResType").append(html).trigger('create');
$('#chk' + facets.ResType[facet].value).change(function () {
RefreshfilterArray('ResType', this.value, this.checked);
});
}
$("#FilterState").html('');
$("#FilterState").append('<legend>State:</legend>').trigger('create');;
for (var facet in facets.State) {
var checked = checkIfSelected('State', facets.State[facet].value);
var html = getStaticHTML('<label id="' + facets.State[facet].value + '"><input type="checkbox" value = "' + facets.State[facet].value + '" ' + checked + ' id="chk' + facets.State[facet].value + '">' + facets.State[facet].value + ' (' + facets.State[facet].count + ')</label>');
$("#FilterState").append(html).trigger('create');
$('#chk' + facets.State[facet].value).change(function () {
RefreshfilterArray('State', this.value, this.checked);
});
}
}
function RefreshfilterArray(facet, value, checked)
{
if (checked) {
filterArray.push({ facet: facet, value: value });
if (inSearch != true)
execSearch();
} else
{
filterArray.forEach(function (result, index) {
if ((result.facet == facet) && (result.value == value) ){
//Remove from array
filterArray.splice(index, 1);
if (inSearch != true)
execSearch();
}
});
}
}
function RefreshFacets()
{
var lastFacet = '';
filterQuery = '';
if (filterArray.length > 0 ){
filterQuery = '&$filter=';
filterArray = filterArray.sort(compare);
for (var filter in filterArray) {
if (filterArray[filter].facet != lastFacet)
filterQuery += "(";
else
filterQuery = filterQuery.substring(0, filterQuery.length-6) + " or ";
filterQuery += filterArray[filter].facet + " eq '" + filterArray[filter].value + "') and ";
lastFacet = filterArray[filter].facet;
}
filterQuery = | execSearch | identifier_name |
tensor.rs | use std::ops::{Deref, DerefMut};
use enum_map::EnumMap;
use crate::features::Layer;
use tensorflow::{Tensor, TensorType};
/// Ad-hoc trait for shrinking batches.
pub trait ShrinkBatch {
fn shrink_batch(&self, n_instances: u64) -> Self;
}
impl<T> ShrinkBatch for Tensor<T>
where
T: Copy + TensorType,
{
fn shrink_batch(&self, n_instances: u64) -> Self {
assert!(
n_instances <= self.dims()[0],
"Trying to shrink batch of size {} to {}",
self.dims()[0],
n_instances
); | let mut copy = Tensor::new(&new_shape);
copy.copy_from_slice(&self[..new_shape.iter().cloned().product::<u64>() as usize]);
copy
}
}
impl<T> ShrinkBatch for TensorWrap<T>
where
T: Copy + TensorType,
{
fn shrink_batch(&self, n_instances: u64) -> Self {
TensorWrap(self.0.shrink_batch(n_instances))
}
}
impl<T> ShrinkBatch for LayerTensors<T>
where
T: Copy + TensorType,
{
fn shrink_batch(&self, n_instances: u64) -> Self {
let mut copy = LayerTensors::new();
// Note: EnumMap does not support FromIterator.
for (layer, tensor) in self.iter() {
copy[layer] = tensor.shrink_batch(n_instances);
}
copy
}
}
/// Ad-hoc trait for converting extracting slices from tensors.
pub trait InstanceSlices<T> {
/// Extract for each layer the slice corresponding to the `idx`-th
/// instance from the batch.
fn to_instance_slices(&mut self, idx: usize) -> EnumMap<Layer, &mut [T]>;
}
impl<T> InstanceSlices<T> for LayerTensors<T>
where
T: TensorType,
{
fn to_instance_slices(&mut self, idx: usize) -> EnumMap<Layer, &mut [T]> {
let mut slices = EnumMap::new();
for (layer, tensor) in self.iter_mut() {
let layer_size = tensor.dims()[1] as usize;
let offset = idx * layer_size;
slices[layer] = &mut tensor[offset..offset + layer_size];
}
slices
}
}
pub type LayerTensors<T> = EnumMap<Layer, TensorWrap<T>>;
/// Simple wrapper for `Tensor` that implements the `Default`
/// trait.
pub struct TensorWrap<T>(pub Tensor<T>)
where
T: TensorType;
impl<T> Default for TensorWrap<T>
where
T: TensorType,
{
fn default() -> Self {
TensorWrap(Tensor::new(&[]))
}
}
impl<T> From<Tensor<T>> for TensorWrap<T>
where
T: TensorType,
{
fn from(tensor: Tensor<T>) -> Self {
TensorWrap(tensor)
}
}
impl<T> Deref for TensorWrap<T>
where
T: TensorType,
{
type Target = Tensor<T>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> DerefMut for TensorWrap<T>
where
T: TensorType,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[cfg(test)]
mod tests {
use tensorflow::Tensor;
use super::ShrinkBatch;
#[test]
fn copy_batches() {
let original = Tensor::new(&[4, 2])
.with_values(&[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0])
.expect("Cannot initialize tensor.");
let copy = original.shrink_batch(2);
assert_eq!(&*copy, &[1.0, 2.0, 3.0, 4.0]);
}
} |
let mut new_shape = self.dims().to_owned();
new_shape[0] = n_instances; | random_line_split |
tensor.rs | use std::ops::{Deref, DerefMut};
use enum_map::EnumMap;
use crate::features::Layer;
use tensorflow::{Tensor, TensorType};
/// Ad-hoc trait for shrinking batches.
pub trait ShrinkBatch {
fn shrink_batch(&self, n_instances: u64) -> Self;
}
impl<T> ShrinkBatch for Tensor<T>
where
T: Copy + TensorType,
{
fn shrink_batch(&self, n_instances: u64) -> Self {
assert!(
n_instances <= self.dims()[0],
"Trying to shrink batch of size {} to {}",
self.dims()[0],
n_instances
);
let mut new_shape = self.dims().to_owned();
new_shape[0] = n_instances;
let mut copy = Tensor::new(&new_shape);
copy.copy_from_slice(&self[..new_shape.iter().cloned().product::<u64>() as usize]);
copy
}
}
impl<T> ShrinkBatch for TensorWrap<T>
where
T: Copy + TensorType,
{
fn shrink_batch(&self, n_instances: u64) -> Self {
TensorWrap(self.0.shrink_batch(n_instances))
}
}
impl<T> ShrinkBatch for LayerTensors<T>
where
T: Copy + TensorType,
{
fn shrink_batch(&self, n_instances: u64) -> Self {
let mut copy = LayerTensors::new();
// Note: EnumMap does not support FromIterator.
for (layer, tensor) in self.iter() {
copy[layer] = tensor.shrink_batch(n_instances);
}
copy
}
}
/// Ad-hoc trait for converting extracting slices from tensors.
pub trait InstanceSlices<T> {
/// Extract for each layer the slice corresponding to the `idx`-th
/// instance from the batch.
fn to_instance_slices(&mut self, idx: usize) -> EnumMap<Layer, &mut [T]>;
}
impl<T> InstanceSlices<T> for LayerTensors<T>
where
T: TensorType,
{
fn | (&mut self, idx: usize) -> EnumMap<Layer, &mut [T]> {
let mut slices = EnumMap::new();
for (layer, tensor) in self.iter_mut() {
let layer_size = tensor.dims()[1] as usize;
let offset = idx * layer_size;
slices[layer] = &mut tensor[offset..offset + layer_size];
}
slices
}
}
pub type LayerTensors<T> = EnumMap<Layer, TensorWrap<T>>;
/// Simple wrapper for `Tensor` that implements the `Default`
/// trait.
pub struct TensorWrap<T>(pub Tensor<T>)
where
T: TensorType;
impl<T> Default for TensorWrap<T>
where
T: TensorType,
{
fn default() -> Self {
TensorWrap(Tensor::new(&[]))
}
}
impl<T> From<Tensor<T>> for TensorWrap<T>
where
T: TensorType,
{
fn from(tensor: Tensor<T>) -> Self {
TensorWrap(tensor)
}
}
impl<T> Deref for TensorWrap<T>
where
T: TensorType,
{
type Target = Tensor<T>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> DerefMut for TensorWrap<T>
where
T: TensorType,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[cfg(test)]
mod tests {
use tensorflow::Tensor;
use super::ShrinkBatch;
#[test]
fn copy_batches() {
let original = Tensor::new(&[4, 2])
.with_values(&[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0])
.expect("Cannot initialize tensor.");
let copy = original.shrink_batch(2);
assert_eq!(&*copy, &[1.0, 2.0, 3.0, 4.0]);
}
}
| to_instance_slices | identifier_name |
tensor.rs | use std::ops::{Deref, DerefMut};
use enum_map::EnumMap;
use crate::features::Layer;
use tensorflow::{Tensor, TensorType};
/// Ad-hoc trait for shrinking batches.
pub trait ShrinkBatch {
fn shrink_batch(&self, n_instances: u64) -> Self;
}
impl<T> ShrinkBatch for Tensor<T>
where
T: Copy + TensorType,
{
fn shrink_batch(&self, n_instances: u64) -> Self {
assert!(
n_instances <= self.dims()[0],
"Trying to shrink batch of size {} to {}",
self.dims()[0],
n_instances
);
let mut new_shape = self.dims().to_owned();
new_shape[0] = n_instances;
let mut copy = Tensor::new(&new_shape);
copy.copy_from_slice(&self[..new_shape.iter().cloned().product::<u64>() as usize]);
copy
}
}
impl<T> ShrinkBatch for TensorWrap<T>
where
T: Copy + TensorType,
{
fn shrink_batch(&self, n_instances: u64) -> Self {
TensorWrap(self.0.shrink_batch(n_instances))
}
}
impl<T> ShrinkBatch for LayerTensors<T>
where
T: Copy + TensorType,
{
fn shrink_batch(&self, n_instances: u64) -> Self {
let mut copy = LayerTensors::new();
// Note: EnumMap does not support FromIterator.
for (layer, tensor) in self.iter() {
copy[layer] = tensor.shrink_batch(n_instances);
}
copy
}
}
/// Ad-hoc trait for converting extracting slices from tensors.
pub trait InstanceSlices<T> {
/// Extract for each layer the slice corresponding to the `idx`-th
/// instance from the batch.
fn to_instance_slices(&mut self, idx: usize) -> EnumMap<Layer, &mut [T]>;
}
impl<T> InstanceSlices<T> for LayerTensors<T>
where
T: TensorType,
{
fn to_instance_slices(&mut self, idx: usize) -> EnumMap<Layer, &mut [T]> {
let mut slices = EnumMap::new();
for (layer, tensor) in self.iter_mut() {
let layer_size = tensor.dims()[1] as usize;
let offset = idx * layer_size;
slices[layer] = &mut tensor[offset..offset + layer_size];
}
slices
}
}
pub type LayerTensors<T> = EnumMap<Layer, TensorWrap<T>>;
/// Simple wrapper for `Tensor` that implements the `Default`
/// trait.
pub struct TensorWrap<T>(pub Tensor<T>)
where
T: TensorType;
impl<T> Default for TensorWrap<T>
where
T: TensorType,
{
fn default() -> Self {
TensorWrap(Tensor::new(&[]))
}
}
impl<T> From<Tensor<T>> for TensorWrap<T>
where
T: TensorType,
{
fn from(tensor: Tensor<T>) -> Self |
}
impl<T> Deref for TensorWrap<T>
where
T: TensorType,
{
type Target = Tensor<T>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> DerefMut for TensorWrap<T>
where
T: TensorType,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[cfg(test)]
mod tests {
use tensorflow::Tensor;
use super::ShrinkBatch;
#[test]
fn copy_batches() {
let original = Tensor::new(&[4, 2])
.with_values(&[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0])
.expect("Cannot initialize tensor.");
let copy = original.shrink_batch(2);
assert_eq!(&*copy, &[1.0, 2.0, 3.0, 4.0]);
}
}
| {
TensorWrap(tensor)
} | identifier_body |
promise-ensurer.js | /**
* @author joel
* 25-11-15.
*/
/// <reference path="../typings/tsd.d.ts" />
/// <reference path="../typings/app.d.ts" />
'use strict';
var _ = require('lodash');
var Promise = require('bluebird');
var IsPromise = require('is-promise');
var PromiseEnsurer = (function () {
function PromiseEnsurer() |
PromiseEnsurer.isPromise = function (value) {
return IsPromise(value);
};
PromiseEnsurer.transformToPromise = function (value) {
return new Promise(function (resolve, reject) {
if (_.isUndefined(value)) {
reject(undefined);
}
else if (_.isBoolean(value)) {
value ? resolve(undefined) : reject(undefined);
}
else {
resolve(value);
}
});
};
PromiseEnsurer.ensure = function (value) {
return this.isPromise(value) ? value : this.transformToPromise(value);
};
return PromiseEnsurer;
})();
module.exports = PromiseEnsurer;
| {
} | identifier_body |
promise-ensurer.js | /**
* @author joel
* 25-11-15.
*/
/// <reference path="../typings/tsd.d.ts" />
/// <reference path="../typings/app.d.ts" />
'use strict';
var _ = require('lodash');
var Promise = require('bluebird');
var IsPromise = require('is-promise');
var PromiseEnsurer = (function () {
function PromiseEnsurer() {
}
PromiseEnsurer.isPromise = function (value) {
return IsPromise(value);
};
PromiseEnsurer.transformToPromise = function (value) { | return new Promise(function (resolve, reject) {
if (_.isUndefined(value)) {
reject(undefined);
}
else if (_.isBoolean(value)) {
value ? resolve(undefined) : reject(undefined);
}
else {
resolve(value);
}
});
};
PromiseEnsurer.ensure = function (value) {
return this.isPromise(value) ? value : this.transformToPromise(value);
};
return PromiseEnsurer;
})();
module.exports = PromiseEnsurer; | random_line_split |
|
promise-ensurer.js | /**
* @author joel
* 25-11-15.
*/
/// <reference path="../typings/tsd.d.ts" />
/// <reference path="../typings/app.d.ts" />
'use strict';
var _ = require('lodash');
var Promise = require('bluebird');
var IsPromise = require('is-promise');
var PromiseEnsurer = (function () {
function | () {
}
PromiseEnsurer.isPromise = function (value) {
return IsPromise(value);
};
PromiseEnsurer.transformToPromise = function (value) {
return new Promise(function (resolve, reject) {
if (_.isUndefined(value)) {
reject(undefined);
}
else if (_.isBoolean(value)) {
value ? resolve(undefined) : reject(undefined);
}
else {
resolve(value);
}
});
};
PromiseEnsurer.ensure = function (value) {
return this.isPromise(value) ? value : this.transformToPromise(value);
};
return PromiseEnsurer;
})();
module.exports = PromiseEnsurer;
| PromiseEnsurer | identifier_name |
sten_virtex.py | from uvscada.ngc import *
import math
'''
Need to account for endmill radius to make the die actualy fit
w=0.805, h=0.789
em=0.0413
Actual used w/h: cos(45) * 0.0413 = 0.02920351
Total w/h: 0.0413
Wasted w/h: 0.0413 - 0.0292 = 0.0121
each corner
Increase by
'''
cnc = init(
#em=0.0413,
em=0.0625,
fr=2.0,
fr_z=1.0,
rpm=600,
verbose=False)
diew = 0.805
dieh = 0.789
theta = math.atan(diew / dieh)
# FIXME: not necessarily ~45
dw = math.cos(theta) * cnc.em/2
fullw = diew + 2 * dw
dh = math.sin(theta) * cnc.em/2
fullh = dieh + 2 * dh
main_r = 1.063
line('(theta: %0.3f rad, %0.1f deg)' % (theta, theta * 180 / math.pi), verbose=True)
line('(die: %0.3fw x %0.3fh)' % (diew, dieh), verbose=True)
line('(em: %0.3f)' % (cnc.em), verbose=True)
line('(died: %0.3fw x %0.3fh)' % (dw, dh), verbose=True)
line('(dief: %0.3fw x %0.3fh)' % (fullw, fullh), verbose=True)
# Original diagram
if 0:
rect_in_ul(x=-0.403, y=-0.864, w=diew, h=dieh, finishes=1)
rect_in_ul(x=-0.403, y=-0.075, w=diew, h=dieh, finishes=1) | y = fullh/2 + sep/2
# Find corner xy coordinate then calculate dist to center
# Does not account for edge rounding (ie slightly underestimates)
rx = fullw/2
ry = y + fullh/2
rd = (rx**2 + ry**2)**0.5
rsep = main_r - rd
line('(Edge sep: %0.3f)' % (rsep,), verbose=True)
line(' (rect: %0.3fx %0.3fy)' % (rx, ry), verbose=True)
if rsep < 0.05:
raise Exception("DRC fail")
# 1.063 - 0.962970924
rect_in_cent(x=0.0, y=-y, w=fullw, h=fullh, finishes=1)
rect_in_cent(x=0.0, y=y, w=fullw, h=fullh, finishes=1)
# Originaly couldn't finish b/c would lift off
# Now waxing down
circ_cent_out(x=0.0, y=0.0, r=main_r, finishes=1)
end() | # Centering properly
if 1:
sep = 0.10
line('(Die sep: %0.3f)' % (sep,), verbose=True) | random_line_split |
sten_virtex.py | from uvscada.ngc import *
import math
'''
Need to account for endmill radius to make the die actualy fit
w=0.805, h=0.789
em=0.0413
Actual used w/h: cos(45) * 0.0413 = 0.02920351
Total w/h: 0.0413
Wasted w/h: 0.0413 - 0.0292 = 0.0121
each corner
Increase by
'''
cnc = init(
#em=0.0413,
em=0.0625,
fr=2.0,
fr_z=1.0,
rpm=600,
verbose=False)
diew = 0.805
dieh = 0.789
theta = math.atan(diew / dieh)
# FIXME: not necessarily ~45
dw = math.cos(theta) * cnc.em/2
fullw = diew + 2 * dw
dh = math.sin(theta) * cnc.em/2
fullh = dieh + 2 * dh
main_r = 1.063
line('(theta: %0.3f rad, %0.1f deg)' % (theta, theta * 180 / math.pi), verbose=True)
line('(die: %0.3fw x %0.3fh)' % (diew, dieh), verbose=True)
line('(em: %0.3f)' % (cnc.em), verbose=True)
line('(died: %0.3fw x %0.3fh)' % (dw, dh), verbose=True)
line('(dief: %0.3fw x %0.3fh)' % (fullw, fullh), verbose=True)
# Original diagram
if 0:
|
# Centering properly
if 1:
sep = 0.10
line('(Die sep: %0.3f)' % (sep,), verbose=True)
y = fullh/2 + sep/2
# Find corner xy coordinate then calculate dist to center
# Does not account for edge rounding (ie slightly underestimates)
rx = fullw/2
ry = y + fullh/2
rd = (rx**2 + ry**2)**0.5
rsep = main_r - rd
line('(Edge sep: %0.3f)' % (rsep,), verbose=True)
line(' (rect: %0.3fx %0.3fy)' % (rx, ry), verbose=True)
if rsep < 0.05:
raise Exception("DRC fail")
# 1.063 - 0.962970924
rect_in_cent(x=0.0, y=-y, w=fullw, h=fullh, finishes=1)
rect_in_cent(x=0.0, y=y, w=fullw, h=fullh, finishes=1)
# Originaly couldn't finish b/c would lift off
# Now waxing down
circ_cent_out(x=0.0, y=0.0, r=main_r, finishes=1)
end()
| rect_in_ul(x=-0.403, y=-0.864, w=diew, h=dieh, finishes=1)
rect_in_ul(x=-0.403, y=-0.075, w=diew, h=dieh, finishes=1) | conditional_block |
__init__.py | # coding: utf-8
from collections import namedtuple
from pandas.io.msgpack.exceptions import * # noqa
from pandas.io.msgpack._version import version # noqa
class | (namedtuple("ExtType", "code data")):
"""ExtType represents ext type in msgpack."""
def __new__(cls, code, data):
if not isinstance(code, int):
raise TypeError("code must be int")
if not isinstance(data, bytes):
raise TypeError("data must be bytes")
if not 0 <= code <= 127:
raise ValueError("code must be 0~127")
return super().__new__(cls, code, data)
import os # noqa
from pandas.io.msgpack._packer import Packer # noqa
from pandas.io.msgpack._unpacker import unpack, unpackb, Unpacker # noqa
def pack(o, stream, **kwargs):
"""
Pack object `o` and write it to `stream`
See :class:`Packer` for options.
"""
packer = Packer(**kwargs)
stream.write(packer.pack(o))
def packb(o, **kwargs):
"""
Pack object `o` and return packed bytes
See :class:`Packer` for options.
"""
return Packer(**kwargs).pack(o)
# alias for compatibility to simplejson/marshal/pickle.
load = unpack
loads = unpackb
dump = pack
dumps = packb
| ExtType | identifier_name |
__init__.py | # coding: utf-8
from collections import namedtuple
from pandas.io.msgpack.exceptions import * # noqa
from pandas.io.msgpack._version import version # noqa
class ExtType(namedtuple("ExtType", "code data")):
"""ExtType represents ext type in msgpack."""
def __new__(cls, code, data):
if not isinstance(code, int):
raise TypeError("code must be int")
if not isinstance(data, bytes):
raise TypeError("data must be bytes")
if not 0 <= code <= 127:
|
return super().__new__(cls, code, data)
import os # noqa
from pandas.io.msgpack._packer import Packer # noqa
from pandas.io.msgpack._unpacker import unpack, unpackb, Unpacker # noqa
def pack(o, stream, **kwargs):
"""
Pack object `o` and write it to `stream`
See :class:`Packer` for options.
"""
packer = Packer(**kwargs)
stream.write(packer.pack(o))
def packb(o, **kwargs):
"""
Pack object `o` and return packed bytes
See :class:`Packer` for options.
"""
return Packer(**kwargs).pack(o)
# alias for compatibility to simplejson/marshal/pickle.
load = unpack
loads = unpackb
dump = pack
dumps = packb
| raise ValueError("code must be 0~127") | conditional_block |
__init__.py | # coding: utf-8
| from pandas.io.msgpack._version import version # noqa
class ExtType(namedtuple("ExtType", "code data")):
"""ExtType represents ext type in msgpack."""
def __new__(cls, code, data):
if not isinstance(code, int):
raise TypeError("code must be int")
if not isinstance(data, bytes):
raise TypeError("data must be bytes")
if not 0 <= code <= 127:
raise ValueError("code must be 0~127")
return super().__new__(cls, code, data)
import os # noqa
from pandas.io.msgpack._packer import Packer # noqa
from pandas.io.msgpack._unpacker import unpack, unpackb, Unpacker # noqa
def pack(o, stream, **kwargs):
"""
Pack object `o` and write it to `stream`
See :class:`Packer` for options.
"""
packer = Packer(**kwargs)
stream.write(packer.pack(o))
def packb(o, **kwargs):
"""
Pack object `o` and return packed bytes
See :class:`Packer` for options.
"""
return Packer(**kwargs).pack(o)
# alias for compatibility to simplejson/marshal/pickle.
load = unpack
loads = unpackb
dump = pack
dumps = packb | from collections import namedtuple
from pandas.io.msgpack.exceptions import * # noqa | random_line_split |
__init__.py | # coding: utf-8
from collections import namedtuple
from pandas.io.msgpack.exceptions import * # noqa
from pandas.io.msgpack._version import version # noqa
class ExtType(namedtuple("ExtType", "code data")):
"""ExtType represents ext type in msgpack."""
def __new__(cls, code, data):
if not isinstance(code, int):
raise TypeError("code must be int")
if not isinstance(data, bytes):
raise TypeError("data must be bytes")
if not 0 <= code <= 127:
raise ValueError("code must be 0~127")
return super().__new__(cls, code, data)
import os # noqa
from pandas.io.msgpack._packer import Packer # noqa
from pandas.io.msgpack._unpacker import unpack, unpackb, Unpacker # noqa
def pack(o, stream, **kwargs):
"""
Pack object `o` and write it to `stream`
See :class:`Packer` for options.
"""
packer = Packer(**kwargs)
stream.write(packer.pack(o))
def packb(o, **kwargs):
|
# alias for compatibility to simplejson/marshal/pickle.
load = unpack
loads = unpackb
dump = pack
dumps = packb
| """
Pack object `o` and return packed bytes
See :class:`Packer` for options.
"""
return Packer(**kwargs).pack(o) | identifier_body |
amf.py | # Copyright (c) The AcidSWF Project.
# See LICENSE.txt for details.
"""
Support for creating a service which runs a web server.
@since: 1.0
"""
import logging
from twisted.python import usage
from twisted.application import service
from acidswf.service import createAMFService
| optParameters = [
['log-level', None, logging.INFO, 'Log level.'],
['amf-transport', None, 'http', 'Run the AMF server on HTTP or HTTPS transport.'],
['amf-host', None, 'localhost', 'The interface for the AMF gateway to listen on.'],
['service', None, 'acidswf', 'The remote service name.'],
['amf-port', None, 8000, 'The port number for the AMF gateway to listen on.'],
['crossdomain', None, 'crossdomain.xml', 'Path to a crossdomain.xml file.'],
]
class Options(usage.Options):
"""
Define the options accepted by the I{acidswf amf} plugin.
"""
synopsis = "[amf options]"
optParameters = optParameters
longdesc = """\
This starts an AMF server."""
def postOptions(self):
"""
Set up conditional defaults and check for dependencies.
If SSL is not available but an HTTPS server was configured, raise a
L{UsageError} indicating that this is not possible.
If no server port was supplied, select a default appropriate for the
other options supplied.
"""
pass
#if self['https']:
# try:
# from twisted.internet.ssl import DefaultOpenSSLContextFactory
# except ImportError:
# raise usage.UsageError("SSL support not installed")
def makeService(options):
top_service = service.MultiService()
createAMFService(top_service, options)
return top_service | random_line_split |
|
amf.py | # Copyright (c) The AcidSWF Project.
# See LICENSE.txt for details.
"""
Support for creating a service which runs a web server.
@since: 1.0
"""
import logging
from twisted.python import usage
from twisted.application import service
from acidswf.service import createAMFService
optParameters = [
['log-level', None, logging.INFO, 'Log level.'],
['amf-transport', None, 'http', 'Run the AMF server on HTTP or HTTPS transport.'],
['amf-host', None, 'localhost', 'The interface for the AMF gateway to listen on.'],
['service', None, 'acidswf', 'The remote service name.'],
['amf-port', None, 8000, 'The port number for the AMF gateway to listen on.'],
['crossdomain', None, 'crossdomain.xml', 'Path to a crossdomain.xml file.'],
]
class Options(usage.Options):
| """
pass
#if self['https']:
# try:
# from twisted.internet.ssl import DefaultOpenSSLContextFactory
# except ImportError:
# raise usage.UsageError("SSL support not installed")
def makeService(options):
top_service = service.MultiService()
createAMFService(top_service, options)
return top_service
| """
Define the options accepted by the I{acidswf amf} plugin.
"""
synopsis = "[amf options]"
optParameters = optParameters
longdesc = """\
This starts an AMF server."""
def postOptions(self):
"""
Set up conditional defaults and check for dependencies.
If SSL is not available but an HTTPS server was configured, raise a
L{UsageError} indicating that this is not possible.
If no server port was supplied, select a default appropriate for the
other options supplied. | identifier_body |
amf.py | # Copyright (c) The AcidSWF Project.
# See LICENSE.txt for details.
"""
Support for creating a service which runs a web server.
@since: 1.0
"""
import logging
from twisted.python import usage
from twisted.application import service
from acidswf.service import createAMFService
optParameters = [
['log-level', None, logging.INFO, 'Log level.'],
['amf-transport', None, 'http', 'Run the AMF server on HTTP or HTTPS transport.'],
['amf-host', None, 'localhost', 'The interface for the AMF gateway to listen on.'],
['service', None, 'acidswf', 'The remote service name.'],
['amf-port', None, 8000, 'The port number for the AMF gateway to listen on.'],
['crossdomain', None, 'crossdomain.xml', 'Path to a crossdomain.xml file.'],
]
class | (usage.Options):
"""
Define the options accepted by the I{acidswf amf} plugin.
"""
synopsis = "[amf options]"
optParameters = optParameters
longdesc = """\
This starts an AMF server."""
def postOptions(self):
"""
Set up conditional defaults and check for dependencies.
If SSL is not available but an HTTPS server was configured, raise a
L{UsageError} indicating that this is not possible.
If no server port was supplied, select a default appropriate for the
other options supplied.
"""
pass
#if self['https']:
# try:
# from twisted.internet.ssl import DefaultOpenSSLContextFactory
# except ImportError:
# raise usage.UsageError("SSL support not installed")
def makeService(options):
top_service = service.MultiService()
createAMFService(top_service, options)
return top_service
| Options | identifier_name |
bitops-nsieve-bits.js | var _sunSpiderStartDate = new Date();
// The Great Computer Language Shootout
// http://shootout.alioth.debian.org
//
// Contributed by Ian Osgood
function pad(n,width) {
var s = n.toString();
while (s.length < width) s = ' ' + s;
return s;
} | function primes(isPrime, n) {
var i, count = 0, m = 10000<<n, size = m+31>>5;
for (i=0; i<size; i++) isPrime[i] = 0xffffffff;
for (i=2; i<m; i++)
if (isPrime[i>>5] & 1<<(i&31)) {
for (var j=i+i; j<m; j+=i)
isPrime[j>>5] &= ~(1<<(j&31));
count++;
}
}
function sieve() {
for (var i = 4; i <= 4; i++) {
var isPrime = new Array((10000<<i)+31>>5);
primes(isPrime, i);
}
}
sieve();
var _sunSpiderInterval = new Date() - _sunSpiderStartDate;
dumpValue(_sunSpiderInterval); | random_line_split |
|
bitops-nsieve-bits.js |
var _sunSpiderStartDate = new Date();
// The Great Computer Language Shootout
// http://shootout.alioth.debian.org
//
// Contributed by Ian Osgood
function pad(n,width) {
var s = n.toString();
while (s.length < width) s = ' ' + s;
return s;
}
function primes(isPrime, n) {
var i, count = 0, m = 10000<<n, size = m+31>>5;
for (i=0; i<size; i++) isPrime[i] = 0xffffffff;
for (i=2; i<m; i++)
if (isPrime[i>>5] & 1<<(i&31)) {
for (var j=i+i; j<m; j+=i)
isPrime[j>>5] &= ~(1<<(j&31));
count++;
}
}
function sieve() |
sieve();
var _sunSpiderInterval = new Date() - _sunSpiderStartDate;
dumpValue(_sunSpiderInterval);
| {
for (var i = 4; i <= 4; i++) {
var isPrime = new Array((10000<<i)+31>>5);
primes(isPrime, i);
}
} | identifier_body |
bitops-nsieve-bits.js |
var _sunSpiderStartDate = new Date();
// The Great Computer Language Shootout
// http://shootout.alioth.debian.org
//
// Contributed by Ian Osgood
function | (n,width) {
var s = n.toString();
while (s.length < width) s = ' ' + s;
return s;
}
function primes(isPrime, n) {
var i, count = 0, m = 10000<<n, size = m+31>>5;
for (i=0; i<size; i++) isPrime[i] = 0xffffffff;
for (i=2; i<m; i++)
if (isPrime[i>>5] & 1<<(i&31)) {
for (var j=i+i; j<m; j+=i)
isPrime[j>>5] &= ~(1<<(j&31));
count++;
}
}
function sieve() {
for (var i = 4; i <= 4; i++) {
var isPrime = new Array((10000<<i)+31>>5);
primes(isPrime, i);
}
}
sieve();
var _sunSpiderInterval = new Date() - _sunSpiderStartDate;
dumpValue(_sunSpiderInterval);
| pad | identifier_name |
bitops-nsieve-bits.js |
var _sunSpiderStartDate = new Date();
// The Great Computer Language Shootout
// http://shootout.alioth.debian.org
//
// Contributed by Ian Osgood
function pad(n,width) {
var s = n.toString();
while (s.length < width) s = ' ' + s;
return s;
}
function primes(isPrime, n) {
var i, count = 0, m = 10000<<n, size = m+31>>5;
for (i=0; i<size; i++) isPrime[i] = 0xffffffff;
for (i=2; i<m; i++)
if (isPrime[i>>5] & 1<<(i&31)) |
}
function sieve() {
for (var i = 4; i <= 4; i++) {
var isPrime = new Array((10000<<i)+31>>5);
primes(isPrime, i);
}
}
sieve();
var _sunSpiderInterval = new Date() - _sunSpiderStartDate;
dumpValue(_sunSpiderInterval);
| {
for (var j=i+i; j<m; j+=i)
isPrime[j>>5] &= ~(1<<(j&31));
count++;
} | conditional_block |
ColossusHero.tsx | /*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
import React, { useState } from 'react';
import { styled } from '@csegames/linaria/react';
const Hero = styled.div`
position: fixed;
top: 0;
left: 0;
bottom: 0;
right: 0;
flex: 1 1 auto;
display: flex;
flex-direction: column;
align-content: stretch;
align-items: stretch;
justify-content: flex-start;
flex-wrap: nowrap;
user-select: none !important;
-webkit-user-select: none !important;
transition: opacity 2s ease;
`;
const Content = styled.div`
width: 100%;
height: 100%;
flex: 1 1 auto;
`;
const Video = styled.video` | object-fit: cover;
`;
export function ColossusHero(props: {}) {
const [isInitialVideo, setIsInitialVideo] = useState(true);
function onVideoEnded() {
setIsInitialVideo(false);
}
return (
<Hero>
<Content>
<Video src='videos/fsr-logo-4k-10q-loop.webm' poster='' onEnded={onVideoEnded} autoPlay={isInitialVideo} loop></Video>
{isInitialVideo && <Video src='videos/fsr-intro-4k-10q.webm' poster='images/cse/login-cse.jpg' onEnded={onVideoEnded} autoPlay></Video>}
</Content>
</Hero>
)
} | position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%; | random_line_split |
ColossusHero.tsx | /*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
import React, { useState } from 'react';
import { styled } from '@csegames/linaria/react';
const Hero = styled.div`
position: fixed;
top: 0;
left: 0;
bottom: 0;
right: 0;
flex: 1 1 auto;
display: flex;
flex-direction: column;
align-content: stretch;
align-items: stretch;
justify-content: flex-start;
flex-wrap: nowrap;
user-select: none !important;
-webkit-user-select: none !important;
transition: opacity 2s ease;
`;
const Content = styled.div`
width: 100%;
height: 100%;
flex: 1 1 auto;
`;
const Video = styled.video`
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
object-fit: cover;
`;
export function ColossusHero(props: {}) {
const [isInitialVideo, setIsInitialVideo] = useState(true);
function | () {
setIsInitialVideo(false);
}
return (
<Hero>
<Content>
<Video src='videos/fsr-logo-4k-10q-loop.webm' poster='' onEnded={onVideoEnded} autoPlay={isInitialVideo} loop></Video>
{isInitialVideo && <Video src='videos/fsr-intro-4k-10q.webm' poster='images/cse/login-cse.jpg' onEnded={onVideoEnded} autoPlay></Video>}
</Content>
</Hero>
)
} | onVideoEnded | identifier_name |
ColossusHero.tsx | /*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
import React, { useState } from 'react';
import { styled } from '@csegames/linaria/react';
const Hero = styled.div`
position: fixed;
top: 0;
left: 0;
bottom: 0;
right: 0;
flex: 1 1 auto;
display: flex;
flex-direction: column;
align-content: stretch;
align-items: stretch;
justify-content: flex-start;
flex-wrap: nowrap;
user-select: none !important;
-webkit-user-select: none !important;
transition: opacity 2s ease;
`;
const Content = styled.div`
width: 100%;
height: 100%;
flex: 1 1 auto;
`;
const Video = styled.video`
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
object-fit: cover;
`;
export function ColossusHero(props: {}) {
const [isInitialVideo, setIsInitialVideo] = useState(true);
function onVideoEnded() |
return (
<Hero>
<Content>
<Video src='videos/fsr-logo-4k-10q-loop.webm' poster='' onEnded={onVideoEnded} autoPlay={isInitialVideo} loop></Video>
{isInitialVideo && <Video src='videos/fsr-intro-4k-10q.webm' poster='images/cse/login-cse.jpg' onEnded={onVideoEnded} autoPlay></Video>}
</Content>
</Hero>
)
} | {
setIsInitialVideo(false);
} | identifier_body |
JustPremium.py | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.Addon import Addon
class JustPremium(Addon):
| def links_added(self, links, pid):
hosterdict = self.pyload.pluginManager.hosterPlugins
linkdict = self.pyload.api.checkURLs(links)
premiumplugins = set(account.type for account in self.pyload.api.getAccounts(False) \
if account.valid and account.premium)
multihosters = set(hoster for hoster in self.pyload.pluginManager.hosterPlugins \
if 'new_name' in hosterdict[hoster] \
and hosterdict[hoster]['new_name'] in premiumplugins)
excluded = map(lambda domain: "".join(part.capitalize() for part in re.split(r'(\.|\d+)', domain) if part != '.'),
self.get_config('excluded').replace(' ', '').replace(',', '|').replace(';', '|').split('|'))
included = map(lambda domain: "".join(part.capitalize() for part in re.split(r'(\.|\d+)', domain) if part != '.'),
self.get_config('included').replace(' ', '').replace(',', '|').replace(';', '|').split('|'))
hosterlist = (premiumplugins | multihosters).union(excluded).difference(included)
#: Found at least one hoster with account or multihoster
if not any( True for pluginname in linkdict if pluginname in hosterlist ):
return
for pluginname in set(linkdict.keys()) - hosterlist:
self.log_info(_("Remove links of plugin: %s") % pluginname)
for link in linkdict[pluginname]:
self.log_debug("Remove link: %s" % link)
links.remove(link)
| __name__ = "JustPremium"
__type__ = "hook"
__version__ = "0.25"
__status__ = "testing"
__config__ = [("activated", "bool", "Activated" , False),
("excluded" , "str" , "Exclude hosters (comma separated)", "" ),
("included" , "str" , "Include hosters (comma separated)", "" )]
__description__ = """Remove not-premium links from added urls"""
__license__ = "GPLv3"
__authors__ = [("mazleu" , "[email protected]"),
("Walter Purcaro", "[email protected]" ),
("immenz" , "[email protected]" )]
def init(self):
self.event_map = {'linksAdded': "links_added"}
| identifier_body |
JustPremium.py | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.Addon import Addon
class JustPremium(Addon):
__name__ = "JustPremium"
__type__ = "hook"
__version__ = "0.25"
__status__ = "testing"
__config__ = [("activated", "bool", "Activated" , False),
("excluded" , "str" , "Exclude hosters (comma separated)", "" ),
("included" , "str" , "Include hosters (comma separated)", "" )]
__description__ = """Remove not-premium links from added urls"""
__license__ = "GPLv3"
__authors__ = [("mazleu" , "[email protected]"),
("Walter Purcaro", "[email protected]" ),
("immenz" , "[email protected]" )]
def init(self):
self.event_map = {'linksAdded': "links_added"}
def links_added(self, links, pid):
hosterdict = self.pyload.pluginManager.hosterPlugins
linkdict = self.pyload.api.checkURLs(links)
premiumplugins = set(account.type for account in self.pyload.api.getAccounts(False) \
if account.valid and account.premium)
multihosters = set(hoster for hoster in self.pyload.pluginManager.hosterPlugins \
if 'new_name' in hosterdict[hoster] \
and hosterdict[hoster]['new_name'] in premiumplugins)
excluded = map(lambda domain: "".join(part.capitalize() for part in re.split(r'(\.|\d+)', domain) if part != '.'),
self.get_config('excluded').replace(' ', '').replace(',', '|').replace(';', '|').split('|'))
included = map(lambda domain: "".join(part.capitalize() for part in re.split(r'(\.|\d+)', domain) if part != '.'),
self.get_config('included').replace(' ', '').replace(',', '|').replace(';', '|').split('|'))
hosterlist = (premiumplugins | multihosters).union(excluded).difference(included)
#: Found at least one hoster with account or multihoster
if not any( True for pluginname in linkdict if pluginname in hosterlist ):
return
for pluginname in set(linkdict.keys()) - hosterlist:
self.log_info(_("Remove links of plugin: %s") % pluginname)
for link in linkdict[pluginname]:
| self.log_debug("Remove link: %s" % link)
links.remove(link) | conditional_block |
|
JustPremium.py | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.Addon import Addon
class | (Addon):
__name__ = "JustPremium"
__type__ = "hook"
__version__ = "0.25"
__status__ = "testing"
__config__ = [("activated", "bool", "Activated" , False),
("excluded" , "str" , "Exclude hosters (comma separated)", "" ),
("included" , "str" , "Include hosters (comma separated)", "" )]
__description__ = """Remove not-premium links from added urls"""
__license__ = "GPLv3"
__authors__ = [("mazleu" , "[email protected]"),
("Walter Purcaro", "[email protected]" ),
("immenz" , "[email protected]" )]
def init(self):
self.event_map = {'linksAdded': "links_added"}
def links_added(self, links, pid):
hosterdict = self.pyload.pluginManager.hosterPlugins
linkdict = self.pyload.api.checkURLs(links)
premiumplugins = set(account.type for account in self.pyload.api.getAccounts(False) \
if account.valid and account.premium)
multihosters = set(hoster for hoster in self.pyload.pluginManager.hosterPlugins \
if 'new_name' in hosterdict[hoster] \
and hosterdict[hoster]['new_name'] in premiumplugins)
excluded = map(lambda domain: "".join(part.capitalize() for part in re.split(r'(\.|\d+)', domain) if part != '.'),
self.get_config('excluded').replace(' ', '').replace(',', '|').replace(';', '|').split('|'))
included = map(lambda domain: "".join(part.capitalize() for part in re.split(r'(\.|\d+)', domain) if part != '.'),
self.get_config('included').replace(' ', '').replace(',', '|').replace(';', '|').split('|'))
hosterlist = (premiumplugins | multihosters).union(excluded).difference(included)
#: Found at least one hoster with account or multihoster
if not any( True for pluginname in linkdict if pluginname in hosterlist ):
return
for pluginname in set(linkdict.keys()) - hosterlist:
self.log_info(_("Remove links of plugin: %s") % pluginname)
for link in linkdict[pluginname]:
self.log_debug("Remove link: %s" % link)
links.remove(link)
| JustPremium | identifier_name |
JustPremium.py | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.Addon import Addon
class JustPremium(Addon):
__name__ = "JustPremium" | __config__ = [("activated", "bool", "Activated" , False),
("excluded" , "str" , "Exclude hosters (comma separated)", "" ),
("included" , "str" , "Include hosters (comma separated)", "" )]
__description__ = """Remove not-premium links from added urls"""
__license__ = "GPLv3"
__authors__ = [("mazleu" , "[email protected]"),
("Walter Purcaro", "[email protected]" ),
("immenz" , "[email protected]" )]
def init(self):
self.event_map = {'linksAdded': "links_added"}
def links_added(self, links, pid):
hosterdict = self.pyload.pluginManager.hosterPlugins
linkdict = self.pyload.api.checkURLs(links)
premiumplugins = set(account.type for account in self.pyload.api.getAccounts(False) \
if account.valid and account.premium)
multihosters = set(hoster for hoster in self.pyload.pluginManager.hosterPlugins \
if 'new_name' in hosterdict[hoster] \
and hosterdict[hoster]['new_name'] in premiumplugins)
excluded = map(lambda domain: "".join(part.capitalize() for part in re.split(r'(\.|\d+)', domain) if part != '.'),
self.get_config('excluded').replace(' ', '').replace(',', '|').replace(';', '|').split('|'))
included = map(lambda domain: "".join(part.capitalize() for part in re.split(r'(\.|\d+)', domain) if part != '.'),
self.get_config('included').replace(' ', '').replace(',', '|').replace(';', '|').split('|'))
hosterlist = (premiumplugins | multihosters).union(excluded).difference(included)
#: Found at least one hoster with account or multihoster
if not any( True for pluginname in linkdict if pluginname in hosterlist ):
return
for pluginname in set(linkdict.keys()) - hosterlist:
self.log_info(_("Remove links of plugin: %s") % pluginname)
for link in linkdict[pluginname]:
self.log_debug("Remove link: %s" % link)
links.remove(link) | __type__ = "hook"
__version__ = "0.25"
__status__ = "testing"
| random_line_split |
coherence_copy_like_err_fundamental_struct_ref.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we are able to introduce a negative constraint that
// `MyType: !MyTrait` along with other "fundamental" wrappers.
// aux-build:coherence_copy_like_lib.rs
// compile-pass
// skip-codegen
#![allow(dead_code)]
extern crate coherence_copy_like_lib as lib;
struct MyType { x: i32 }
trait MyTrait { fn | () {} }
impl<T: lib::MyCopy> MyTrait for T { }
// `MyFundamentalStruct` is declared fundamental, so we can test that
//
// MyFundamentalStruct<&MyTrait>: !MyTrait
//
// Huzzah.
impl<'a> MyTrait for lib::MyFundamentalStruct<&'a MyType> { }
fn main() { }
| foo | identifier_name |
coherence_copy_like_err_fundamental_struct_ref.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we are able to introduce a negative constraint that
// `MyType: !MyTrait` along with other "fundamental" wrappers.
// aux-build:coherence_copy_like_lib.rs
// compile-pass
// skip-codegen
#![allow(dead_code)] |
extern crate coherence_copy_like_lib as lib;
struct MyType { x: i32 }
trait MyTrait { fn foo() {} }
impl<T: lib::MyCopy> MyTrait for T { }
// `MyFundamentalStruct` is declared fundamental, so we can test that
//
// MyFundamentalStruct<&MyTrait>: !MyTrait
//
// Huzzah.
impl<'a> MyTrait for lib::MyFundamentalStruct<&'a MyType> { }
fn main() { } | random_line_split |
|
coherence_copy_like_err_fundamental_struct_ref.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we are able to introduce a negative constraint that
// `MyType: !MyTrait` along with other "fundamental" wrappers.
// aux-build:coherence_copy_like_lib.rs
// compile-pass
// skip-codegen
#![allow(dead_code)]
extern crate coherence_copy_like_lib as lib;
struct MyType { x: i32 }
trait MyTrait { fn foo() {} }
impl<T: lib::MyCopy> MyTrait for T { }
// `MyFundamentalStruct` is declared fundamental, so we can test that
//
// MyFundamentalStruct<&MyTrait>: !MyTrait
//
// Huzzah.
impl<'a> MyTrait for lib::MyFundamentalStruct<&'a MyType> { }
fn main() | { } | identifier_body |
|
sigproc.py | # Copyright 2018 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# for py2/py3 compatibility
from __future__ import print_function
import signal
from . import base
from testrunner.local import utils
class SignalProc(base.TestProcObserver):
def __init__(self):
super(SignalProc, self).__init__()
self.exit_code = utils.EXIT_CODE_PASS
def setup(self, *args, **kwargs):
super(SignalProc, self).setup(*args, **kwargs)
# It should be called after processors are chained together to not loose
# catched signal.
signal.signal(signal.SIGINT, self._on_ctrlc)
signal.signal(signal.SIGTERM, self._on_sigterm)
def _on_ctrlc(self, _signum, _stack_frame):
print('>>> Ctrl-C detected, early abort...')
self.exit_code = utils.EXIT_CODE_INTERRUPTED
self.stop()
def | (self, _signum, _stack_frame):
print('>>> SIGTERM received, early abort...')
self.exit_code = utils.EXIT_CODE_TERMINATED
self.stop()
| _on_sigterm | identifier_name |
sigproc.py | # Copyright 2018 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# for py2/py3 compatibility
from __future__ import print_function
import signal
from . import base
from testrunner.local import utils
class SignalProc(base.TestProcObserver):
def __init__(self): | super(SignalProc, self).__init__()
self.exit_code = utils.EXIT_CODE_PASS
def setup(self, *args, **kwargs):
super(SignalProc, self).setup(*args, **kwargs)
# It should be called after processors are chained together to not loose
# catched signal.
signal.signal(signal.SIGINT, self._on_ctrlc)
signal.signal(signal.SIGTERM, self._on_sigterm)
def _on_ctrlc(self, _signum, _stack_frame):
print('>>> Ctrl-C detected, early abort...')
self.exit_code = utils.EXIT_CODE_INTERRUPTED
self.stop()
def _on_sigterm(self, _signum, _stack_frame):
print('>>> SIGTERM received, early abort...')
self.exit_code = utils.EXIT_CODE_TERMINATED
self.stop() | random_line_split |
|
sigproc.py | # Copyright 2018 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# for py2/py3 compatibility
from __future__ import print_function
import signal
from . import base
from testrunner.local import utils
class SignalProc(base.TestProcObserver):
def __init__(self):
super(SignalProc, self).__init__()
self.exit_code = utils.EXIT_CODE_PASS
def setup(self, *args, **kwargs):
super(SignalProc, self).setup(*args, **kwargs)
# It should be called after processors are chained together to not loose
# catched signal.
signal.signal(signal.SIGINT, self._on_ctrlc)
signal.signal(signal.SIGTERM, self._on_sigterm)
def _on_ctrlc(self, _signum, _stack_frame):
|
def _on_sigterm(self, _signum, _stack_frame):
print('>>> SIGTERM received, early abort...')
self.exit_code = utils.EXIT_CODE_TERMINATED
self.stop()
| print('>>> Ctrl-C detected, early abort...')
self.exit_code = utils.EXIT_CODE_INTERRUPTED
self.stop() | identifier_body |
time_stepping.py | (x) + implicit_terms(x)
`explicit_terms(x)` includes terms that should use explicit time-stepping and
`implicit_terms(x)` includes terms that should be modeled implicitly.
Typically the explicit terms are non-linear and the implicit terms are linear.
This simplifies solves but isn't strictly necessary.
"""
def explicit_terms(self, state: PyTreeState) -> PyTreeState:
"""Evaluates explicit terms in the ODE."""
raise NotImplementedError
def implicit_terms(self, state: PyTreeState) -> PyTreeState:
"""Evaluates implicit terms in the ODE."""
raise NotImplementedError
def implicit_solve(
self, state: PyTreeState, step_size: float,
) -> PyTreeState:
"""Solves `y - step_size * implicit_terms(y) = x` for y."""
raise NotImplementedError
def backward_forward_euler(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via forward and backward Euler methods.
This method is first order accurate.
Args:
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
"""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
@tree_math.wrap
def step_fn(u0):
g = u0 + dt * F(u0)
u1 = G_inv(g, dt)
return u1
return step_fn
def cran | equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and 2nd order Runge-Kutta (Heun).
This method is second order accurate.
Args:
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
Reference:
Chandler, G. J. & Kerswell, R. R. Invariant recurrent solutions embedded in
a turbulent two-dimensional Kolmogorov flow. J. Fluid Mech. 722, 554–595
(2013). https://doi.org/10.1017/jfm.2013.122 (Section 3)
"""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
@tree_math.wrap
def step_fn(u0):
g = u0 + 0.5 * dt * G(u0)
h1 = F(u0)
u1 = G_inv(g + dt * h1, 0.5 * dt)
h2 = 0.5 * (F(u1) + h1)
u2 = G_inv(g + dt * h2, 0.5 * dt)
return u2
return step_fn
def low_storage_runge_kutta_crank_nicolson(
alphas: Sequence[float],
betas: Sequence[float],
gammas: Sequence[float],
equation: ImplicitExplicitODE,
time_step: float,
) -> TimeStepFn:
"""Time stepping via "low-storage" Runge-Kutta and Crank-Nicolson steps.
These scheme are second order accurate for the implicit terms, but potentially
higher order accurate for the explicit terms. This seems to be a favorable
tradeoff when the explicit terms dominate, e.g., for modeling turbulent
fluids.
Per Canuto: "[these methods] have been widely used for the time-discretization
in applications of spectral methods."
Args:
alphas: alpha coefficients.
betas: beta coefficients.
gammas: gamma coefficients.
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
Reference:
Canuto, C., Yousuff Hussaini, M., Quarteroni, A. & Zang, T. A.
Spectral Methods: Evolution to Complex Geometries and Applications to
Fluid Dynamics. (Springer Berlin Heidelberg, 2007).
https://doi.org/10.1007/978-3-540-30728-0 (Appendix D.3)
"""
# pylint: disable=invalid-name,non-ascii-name
α = alphas
β = betas
γ = gammas
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
if len(alphas) - 1 != len(betas) != len(gammas):
raise ValueError("number of RK coefficients does not match")
@tree_math.wrap
def step_fn(u):
h = 0
for k in range(len(β)):
h = F(u) + β[k] * h
µ = 0.5 * dt * (α[k + 1] - α[k])
u = G_inv(u + γ[k] * dt * h + µ * G(u), µ)
return u
return step_fn
def crank_nicolson_rk3(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and RK3 ("Williamson")."""
return low_storage_runge_kutta_crank_nicolson(
alphas=[0, 1/3, 3/4, 1],
betas=[0, -5/9, -153/128],
gammas=[1/3, 15/16, 8/15],
equation=equation,
time_step=time_step,
)
def crank_nicolson_rk4(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and RK4 ("Carpenter-Kennedy")."""
# pylint: disable=line-too-long
return low_storage_runge_kutta_crank_nicolson(
alphas=[0, 0.1496590219993, 0.3704009573644, 0.6222557631345, 0.9582821306748, 1],
betas=[0, -0.4178904745, -1.192151694643, -1.697784692471, -1.514183444257],
gammas=[0.1496590219993, 0.3792103129999, 0.8229550293869, 0.6994504559488, 0.1530572479681],
equation=equation,
time_step=time_step,
)
@dataclasses.dataclass
class ImExButcherTableau:
"""Butcher Tableau for implicit-explicit Runge-Kutta methods."""
a_ex: Sequence[Sequence[float]]
a_im: Sequence[Sequence[float]]
b_ex: Sequence[float]
b_im: Sequence[float]
def __post_init__(self):
if len({len(self.a_ex) + 1,
len(self.a_im) + 1,
len(self.b_ex),
len(self.b_im)}) > 1:
raise ValueError("inconsistent Butcher tableau")
def imex_runge_kutta(
tableau: ImExButcherTableau,
equation: ImplicitExplicitODE,
time_step: float,
) -> TimeStepFn:
"""Time stepping with Implicit-Explicit Runge-Kutta."""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
a_ex = tableau.a_ex
a_im = tableau.a_im
b_ex = tableau.b_ex
b_im = tableau.b_im
num_steps = len(b_ex)
@tree_math.wrap
def step_fn(y0):
f = [None] * num_steps
g = [None] * num_steps
f[0] = F(y0)
g[0] = G(y0)
for i in range(1, num_steps):
ex_terms = dt * sum(a_ex[i-1][j] * f[j] for j in range(i) if a_ex[i-1][j])
im_terms = dt * sum(a_im[i-1][j] * g[j] for | k_nicolson_rk2(
| identifier_name |
time_stepping.py | raise NotImplementedError
def backward_forward_euler(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via forward and backward Euler methods.
This method is first order accurate.
Args:
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
"""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
@tree_math.wrap
def step_fn(u0):
g = u0 + dt * F(u0)
u1 = G_inv(g, dt)
return u1
return step_fn
def crank_nicolson_rk2(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and 2nd order Runge-Kutta (Heun).
This method is second order accurate.
Args:
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
Reference:
Chandler, G. J. & Kerswell, R. R. Invariant recurrent solutions embedded in
a turbulent two-dimensional Kolmogorov flow. J. Fluid Mech. 722, 554–595
(2013). https://doi.org/10.1017/jfm.2013.122 (Section 3)
"""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
@tree_math.wrap
def step_fn(u0):
g = u0 + 0.5 * dt * G(u0)
h1 = F(u0)
u1 = G_inv(g + dt * h1, 0.5 * dt)
h2 = 0.5 * (F(u1) + h1)
u2 = G_inv(g + dt * h2, 0.5 * dt)
return u2
return step_fn
def low_storage_runge_kutta_crank_nicolson(
alphas: Sequence[float],
betas: Sequence[float],
gammas: Sequence[float],
equation: ImplicitExplicitODE,
time_step: float,
) -> TimeStepFn:
"""Time stepping via "low-storage" Runge-Kutta and Crank-Nicolson steps.
These scheme are second order accurate for the implicit terms, but potentially
higher order accurate for the explicit terms. This seems to be a favorable
tradeoff when the explicit terms dominate, e.g., for modeling turbulent
fluids.
Per Canuto: "[these methods] have been widely used for the time-discretization
in applications of spectral methods."
Args:
alphas: alpha coefficients.
betas: beta coefficients.
gammas: gamma coefficients.
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
Reference:
Canuto, C., Yousuff Hussaini, M., Quarteroni, A. & Zang, T. A.
Spectral Methods: Evolution to Complex Geometries and Applications to
Fluid Dynamics. (Springer Berlin Heidelberg, 2007).
https://doi.org/10.1007/978-3-540-30728-0 (Appendix D.3)
"""
# pylint: disable=invalid-name,non-ascii-name
α = alphas
β = betas
γ = gammas
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
if len(alphas) - 1 != len(betas) != len(gammas):
raise ValueError("number of RK coefficients does not match")
@tree_math.wrap
def step_fn(u):
h = 0
for k in range(len(β)):
h = F(u) + β[k] * h
µ = 0.5 * dt * (α[k + 1] - α[k])
u = G_inv(u + γ[k] * dt * h + µ * G(u), µ)
return u
return step_fn
def crank_nicolson_rk3(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and RK3 ("Williamson")."""
return low_storage_runge_kutta_crank_nicolson(
alphas=[0, 1/3, 3/4, 1],
betas=[0, -5/9, -153/128],
gammas=[1/3, 15/16, 8/15],
equation=equation,
time_step=time_step,
)
def crank_nicolson_rk4(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and RK4 ("Carpenter-Kennedy")."""
# pylint: disable=line-too-long
return low_storage_runge_kutta_crank_nicolson(
alphas=[0, 0.1496590219993, 0.3704009573644, 0.6222557631345, 0.9582821306748, 1],
betas=[0, -0.4178904745, -1.192151694643, -1.697784692471, -1.514183444257],
gammas=[0.1496590219993, 0.3792103129999, 0.8229550293869, 0.6994504559488, 0.1530572479681],
equation=equation,
time_step=time_step,
)
@dataclasses.dataclass
class ImExButcherTableau:
"""Butcher Tableau for implicit-explicit Runge-Kutta methods."""
a_ex: Sequence[Sequence[float]]
a_im: Sequence[Sequence[float]]
b_ex: Sequence[float]
b_im: Sequence[float]
def __post_init__(self):
if len({len(self.a_ex) + 1,
len(self.a_im) + 1,
len(self.b_ex),
len(self.b_im)}) > 1:
raise ValueError("inconsistent Butcher tableau")
def imex_runge_kutta(
tableau: ImExButcherTableau,
equation: ImplicitExplicitODE,
time_step: float,
) -> TimeStepFn:
"""Time stepping with Implicit-Explicit Runge-Kutta."""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
a_ex = tableau.a_ex
a_im = tableau.a_im
b_ex = tableau.b_ex
b_im = tableau.b_im
num_steps = len(b_ex)
@tree_math.wrap
def step_fn(y0):
f = [None] * num_ | steps
g = [None] * num_steps
f[0] = F(y0)
g[0] = G(y0)
for i in range(1, num_steps):
ex_terms = dt * sum(a_ex[i-1][j] * f[j] for j in range(i) if a_ex[i-1][j])
im_terms = dt * sum(a_im[i-1][j] * g[j] for j in range(i) if a_im[i-1][j])
Y_star = y0 + ex_terms + im_terms
Y = G_inv(Y_star, dt * a_im[i-1][i])
if any(a_ex[j][i] for j in range(i, num_steps - 1)) or b_ex[i]:
f[i] = F(Y)
if any(a_im[j][i] for j in range(i, num_steps - 1)) or b_im[i]:
g[i] = G(Y)
ex_terms = dt * sum(b_ex[j] * f[j] for j in range(num_steps) if b_ex[j])
im_terms = dt * sum(b_im[j] * g[j] for j in range(num_steps) if b_im[j])
y_next = y0 + ex_terms + im_terms
| identifier_body |
|
time_stepping.py | (x) + implicit_terms(x)
`explicit_terms(x)` includes terms that should use explicit time-stepping and
`implicit_terms(x)` includes terms that should be modeled implicitly.
Typically the explicit terms are non-linear and the implicit terms are linear.
This simplifies solves but isn't strictly necessary.
"""
def explicit_terms(self, state: PyTreeState) -> PyTreeState:
"""Evaluates explicit terms in the ODE."""
raise NotImplementedError
def implicit_terms(self, state: PyTreeState) -> PyTreeState:
"""Evaluates implicit terms in the ODE."""
raise NotImplementedError
def implicit_solve(
self, state: PyTreeState, step_size: float,
) -> PyTreeState:
"""Solves `y - step_size * implicit_terms(y) = x` for y."""
raise NotImplementedError
def backward_forward_euler(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via forward and backward Euler methods.
This method is first order accurate.
Args:
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
"""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
@tree_math.wrap
def step_fn(u0):
g = u0 + dt * F(u0)
u1 = G_inv(g, dt)
return u1
return step_fn
def crank_nicolson_rk2(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and 2nd order Runge-Kutta (Heun).
This method is second order accurate.
Args:
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
Reference:
Chandler, G. J. & Kerswell, R. R. Invariant recurrent solutions embedded in
a turbulent two-dimensional Kolmogorov flow. J. Fluid Mech. 722, 554–595
(2013). https://doi.org/10.1017/jfm.2013.122 (Section 3)
"""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
@tree_math.wrap
def step_fn(u0):
g = u0 + 0.5 * dt * G(u0)
h1 = F(u0)
u1 = G_inv(g + dt * h1, 0.5 * dt)
h2 = 0.5 * (F(u1) + h1)
u2 = G_inv(g + dt * h2, 0.5 * dt)
return u2
return step_fn
def low_storage_runge_kutta_crank_nicolson(
alphas: Sequence[float],
betas: Sequence[float],
gammas: Sequence[float],
equation: ImplicitExplicitODE,
time_step: float,
) -> TimeStepFn:
"""Time stepping via "low-storage" Runge-Kutta and Crank-Nicolson steps.
These scheme are second order accurate for the implicit terms, but potentially
higher order accurate for the explicit terms. This seems to be a favorable
tradeoff when the explicit terms dominate, e.g., for modeling turbulent
fluids.
Per Canuto: "[these methods] have been widely used for the time-discretization
in applications of spectral methods."
Args:
alphas: alpha coefficients.
betas: beta coefficients.
gammas: gamma coefficients.
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
Reference:
Canuto, C., Yousuff Hussaini, M., Quarteroni, A. & Zang, T. A.
Spectral Methods: Evolution to Complex Geometries and Applications to
Fluid Dynamics. (Springer Berlin Heidelberg, 2007).
https://doi.org/10.1007/978-3-540-30728-0 (Appendix D.3)
"""
# pylint: disable=invalid-name,non-ascii-name
α = alphas
β = betas
γ = gammas
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
if len(alphas) - 1 != len(betas) != len(gammas):
raise ValueError("number of RK coefficients does not match")
@tree_math.wrap
def step_fn(u):
h = 0
for k in range(len(β)):
h = F(u) + | eturn step_fn
def crank_nicolson_rk3(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and RK3 ("Williamson")."""
return low_storage_runge_kutta_crank_nicolson(
alphas=[0, 1/3, 3/4, 1],
betas=[0, -5/9, -153/128],
gammas=[1/3, 15/16, 8/15],
equation=equation,
time_step=time_step,
)
def crank_nicolson_rk4(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and RK4 ("Carpenter-Kennedy")."""
# pylint: disable=line-too-long
return low_storage_runge_kutta_crank_nicolson(
alphas=[0, 0.1496590219993, 0.3704009573644, 0.6222557631345, 0.9582821306748, 1],
betas=[0, -0.4178904745, -1.192151694643, -1.697784692471, -1.514183444257],
gammas=[0.1496590219993, 0.3792103129999, 0.8229550293869, 0.6994504559488, 0.1530572479681],
equation=equation,
time_step=time_step,
)
@dataclasses.dataclass
class ImExButcherTableau:
"""Butcher Tableau for implicit-explicit Runge-Kutta methods."""
a_ex: Sequence[Sequence[float]]
a_im: Sequence[Sequence[float]]
b_ex: Sequence[float]
b_im: Sequence[float]
def __post_init__(self):
if len({len(self.a_ex) + 1,
len(self.a_im) + 1,
len(self.b_ex),
len(self.b_im)}) > 1:
raise ValueError("inconsistent Butcher tableau")
def imex_runge_kutta(
tableau: ImExButcherTableau,
equation: ImplicitExplicitODE,
time_step: float,
) -> TimeStepFn:
"""Time stepping with Implicit-Explicit Runge-Kutta."""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
a_ex = tableau.a_ex
a_im = tableau.a_im
b_ex = tableau.b_ex
b_im = tableau.b_im
num_steps = len(b_ex)
@tree_math.wrap
def step_fn(y0):
f = [None] * num_steps
g = [None] * num_steps
f[0] = F(y0)
g[0] = G(y0)
for i in range(1, num_steps):
ex_terms = dt * sum(a_ex[i-1][j] * f[j] for j in range(i) if a_ex[i-1][j])
im_terms = dt * sum(a_im[i-1][j] * g[j] for j | β[k] * h
µ = 0.5 * dt * (α[k + 1] - α[k])
u = G_inv(u + γ[k] * dt * h + µ * G(u), µ)
return u
r | conditional_block |
time_stepping.py | (x) + implicit_terms(x)
`explicit_terms(x)` includes terms that should use explicit time-stepping and
`implicit_terms(x)` includes terms that should be modeled implicitly.
Typically the explicit terms are non-linear and the implicit terms are linear.
This simplifies solves but isn't strictly necessary.
"""
def explicit_terms(self, state: PyTreeState) -> PyTreeState:
"""Evaluates explicit terms in the ODE."""
raise NotImplementedError
def implicit_terms(self, state: PyTreeState) -> PyTreeState:
"""Evaluates implicit terms in the ODE."""
raise NotImplementedError
def implicit_solve(
self, state: PyTreeState, step_size: float,
) -> PyTreeState:
"""Solves `y - step_size * implicit_terms(y) = x` for y."""
raise NotImplementedError
def backward_forward_euler(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn: |
This method is first order accurate.
Args:
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
"""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
@tree_math.wrap
def step_fn(u0):
g = u0 + dt * F(u0)
u1 = G_inv(g, dt)
return u1
return step_fn
def crank_nicolson_rk2(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and 2nd order Runge-Kutta (Heun).
This method is second order accurate.
Args:
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
Reference:
Chandler, G. J. & Kerswell, R. R. Invariant recurrent solutions embedded in
a turbulent two-dimensional Kolmogorov flow. J. Fluid Mech. 722, 554–595
(2013). https://doi.org/10.1017/jfm.2013.122 (Section 3)
"""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
@tree_math.wrap
def step_fn(u0):
g = u0 + 0.5 * dt * G(u0)
h1 = F(u0)
u1 = G_inv(g + dt * h1, 0.5 * dt)
h2 = 0.5 * (F(u1) + h1)
u2 = G_inv(g + dt * h2, 0.5 * dt)
return u2
return step_fn
def low_storage_runge_kutta_crank_nicolson(
alphas: Sequence[float],
betas: Sequence[float],
gammas: Sequence[float],
equation: ImplicitExplicitODE,
time_step: float,
) -> TimeStepFn:
"""Time stepping via "low-storage" Runge-Kutta and Crank-Nicolson steps.
These scheme are second order accurate for the implicit terms, but potentially
higher order accurate for the explicit terms. This seems to be a favorable
tradeoff when the explicit terms dominate, e.g., for modeling turbulent
fluids.
Per Canuto: "[these methods] have been widely used for the time-discretization
in applications of spectral methods."
Args:
alphas: alpha coefficients.
betas: beta coefficients.
gammas: gamma coefficients.
equation: equation to solve.
time_step: time step.
Returns:
Function that performs a time step.
Reference:
Canuto, C., Yousuff Hussaini, M., Quarteroni, A. & Zang, T. A.
Spectral Methods: Evolution to Complex Geometries and Applications to
Fluid Dynamics. (Springer Berlin Heidelberg, 2007).
https://doi.org/10.1007/978-3-540-30728-0 (Appendix D.3)
"""
# pylint: disable=invalid-name,non-ascii-name
α = alphas
β = betas
γ = gammas
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
if len(alphas) - 1 != len(betas) != len(gammas):
raise ValueError("number of RK coefficients does not match")
@tree_math.wrap
def step_fn(u):
h = 0
for k in range(len(β)):
h = F(u) + β[k] * h
µ = 0.5 * dt * (α[k + 1] - α[k])
u = G_inv(u + γ[k] * dt * h + µ * G(u), µ)
return u
return step_fn
def crank_nicolson_rk3(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and RK3 ("Williamson")."""
return low_storage_runge_kutta_crank_nicolson(
alphas=[0, 1/3, 3/4, 1],
betas=[0, -5/9, -153/128],
gammas=[1/3, 15/16, 8/15],
equation=equation,
time_step=time_step,
)
def crank_nicolson_rk4(
equation: ImplicitExplicitODE, time_step: float,
) -> TimeStepFn:
"""Time stepping via Crank-Nicolson and RK4 ("Carpenter-Kennedy")."""
# pylint: disable=line-too-long
return low_storage_runge_kutta_crank_nicolson(
alphas=[0, 0.1496590219993, 0.3704009573644, 0.6222557631345, 0.9582821306748, 1],
betas=[0, -0.4178904745, -1.192151694643, -1.697784692471, -1.514183444257],
gammas=[0.1496590219993, 0.3792103129999, 0.8229550293869, 0.6994504559488, 0.1530572479681],
equation=equation,
time_step=time_step,
)
@dataclasses.dataclass
class ImExButcherTableau:
"""Butcher Tableau for implicit-explicit Runge-Kutta methods."""
a_ex: Sequence[Sequence[float]]
a_im: Sequence[Sequence[float]]
b_ex: Sequence[float]
b_im: Sequence[float]
def __post_init__(self):
if len({len(self.a_ex) + 1,
len(self.a_im) + 1,
len(self.b_ex),
len(self.b_im)}) > 1:
raise ValueError("inconsistent Butcher tableau")
def imex_runge_kutta(
tableau: ImExButcherTableau,
equation: ImplicitExplicitODE,
time_step: float,
) -> TimeStepFn:
"""Time stepping with Implicit-Explicit Runge-Kutta."""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.unwrap(equation.explicit_terms)
G = tree_math.unwrap(equation.implicit_terms)
G_inv = tree_math.unwrap(equation.implicit_solve, vector_argnums=0)
a_ex = tableau.a_ex
a_im = tableau.a_im
b_ex = tableau.b_ex
b_im = tableau.b_im
num_steps = len(b_ex)
@tree_math.wrap
def step_fn(y0):
f = [None] * num_steps
g = [None] * num_steps
f[0] = F(y0)
g[0] = G(y0)
for i in range(1, num_steps):
ex_terms = dt * sum(a_ex[i-1][j] * f[j] for j in range(i) if a_ex[i-1][j])
im_terms = dt * sum(a_im[i-1][j] * g[j] for j | """Time stepping via forward and backward Euler methods. | random_line_split |
ogr2ogrtabletopostgislist.py | # -*- coding: utf-8 -*-
"""
***************************************************************************
ogr2ogrtabletopostgislist.py
---------------------
Date : November 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'November 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QSettings
from processing.core.parameters import ParameterString
from processing.core.parameters import ParameterTable
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterTableField
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools.system import isWindows
from processing.tools.vector import ogrConnectionString, ogrLayerName
class Ogr2OgrTableToPostGisList(GdalAlgorithm):
DATABASE = 'DATABASE'
INPUT_LAYER = 'INPUT_LAYER'
HOST = 'HOST'
PORT = 'PORT'
USER = 'USER'
DBNAME = 'DBNAME'
PASSWORD = 'PASSWORD'
SCHEMA = 'SCHEMA'
TABLE = 'TABLE'
PK = 'PK'
PRIMARY_KEY = 'PRIMARY_KEY'
WHERE = 'WHERE'
GT = 'GT'
OVERWRITE = 'OVERWRITE'
APPEND = 'APPEND'
ADDFIELDS = 'ADDFIELDS'
LAUNDER = 'LAUNDER'
SKIPFAILURES = 'SKIPFAILURES'
PRECISION = 'PRECISION'
OPTIONS = 'OPTIONS'
def dbConnectionNames(self):
|
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Import layer/table as geometryless table into PostgreSQL database')
self.group, self.i18n_group = self.trAlgorithm('[OGR] Miscellaneous')
self.DB_CONNECTIONS = self.dbConnectionNames()
self.addParameter(ParameterSelection(self.DATABASE,
self.tr('Database (connection name)'), self.DB_CONNECTIONS))
self.addParameter(ParameterTable(self.INPUT_LAYER,
self.tr('Input layer')))
self.addParameter(ParameterString(self.SCHEMA,
self.tr('Schema name'), 'public', optional=True))
self.addParameter(ParameterString(self.TABLE,
self.tr('Table name, leave blank to use input name'),
'', optional=True))
self.addParameter(ParameterString(self.PK,
self.tr('Primary key'), 'id', optional=True))
self.addParameter(ParameterTableField(self.PRIMARY_KEY,
self.tr('Primary key (existing field, used if the above option is left empty)'),
self.INPUT_LAYER, optional=True))
self.addParameter(ParameterString(self.WHERE,
self.tr('Select features using a SQL "WHERE" statement (Ex: column=\'value\')'),
'', optional=True))
self.addParameter(ParameterString(self.GT,
self.tr('Group N features per transaction (Default: 20000)'),
'', optional=True))
self.addParameter(ParameterBoolean(self.OVERWRITE,
self.tr('Overwrite existing table'), True))
self.addParameter(ParameterBoolean(self.APPEND,
self.tr('Append to existing table'), False))
self.addParameter(ParameterBoolean(self.ADDFIELDS,
self.tr('Append and add new fields to existing table'), False))
self.addParameter(ParameterBoolean(self.LAUNDER,
self.tr('Do not launder columns/table names'), False))
self.addParameter(ParameterBoolean(self.SKIPFAILURES,
self.tr('Continue after a failure, skipping the failed record'),
False))
self.addParameter(ParameterBoolean(self.PRECISION,
self.tr('Keep width and precision of input attributes'),
True))
self.addParameter(ParameterString(self.OPTIONS,
self.tr('Additional creation options'), '', optional=True))
def getConsoleCommands(self):
connection = self.DB_CONNECTIONS[self.getParameterValue(self.DATABASE)]
settings = QSettings()
mySettings = '/PostgreSQL/connections/' + connection
dbname = settings.value(mySettings + '/database')
user = settings.value(mySettings + '/username')
host = settings.value(mySettings + '/host')
port = settings.value(mySettings + '/port')
password = settings.value(mySettings + '/password')
inLayer = self.getParameterValue(self.INPUT_LAYER)
ogrLayer = ogrConnectionString(inLayer)[1:-1]
schema = unicode(self.getParameterValue(self.SCHEMA))
table = unicode(self.getParameterValue(self.TABLE))
pk = unicode(self.getParameterValue(self.PK))
pkstring = "-lco FID=" + pk
primary_key = self.getParameterValue(self.PRIMARY_KEY)
where = unicode(self.getParameterValue(self.WHERE))
wherestring = '-where "' + where + '"'
gt = unicode(self.getParameterValue(self.GT))
overwrite = self.getParameterValue(self.OVERWRITE)
append = self.getParameterValue(self.APPEND)
addfields = self.getParameterValue(self.ADDFIELDS)
launder = self.getParameterValue(self.LAUNDER)
launderstring = "-lco LAUNDER=NO"
skipfailures = self.getParameterValue(self.SKIPFAILURES)
precision = self.getParameterValue(self.PRECISION)
options = unicode(self.getParameterValue(self.OPTIONS))
arguments = []
arguments.append('-progress')
arguments.append('--config PG_USE_COPY YES')
arguments.append('-f')
arguments.append('PostgreSQL')
arguments.append('PG:"host=')
arguments.append(host)
arguments.append('port=')
arguments.append(port)
if len(dbname) > 0:
arguments.append('dbname=' + dbname)
if len(password) > 0:
arguments.append('password=' + password)
if len(schema) > 0:
arguments.append('active_schema=' + schema)
else:
arguments.append('active_schema=public')
arguments.append('user=' + user + '"')
arguments.append(ogrLayer)
arguments.append('-nlt NONE')
arguments.append(ogrLayerName(inLayer))
if launder:
arguments.append(launderstring)
if append:
arguments.append('-append')
if addfields:
arguments.append('-addfields')
if overwrite:
arguments.append('-overwrite')
if len(pk) > 0:
arguments.append(pkstring)
elif primary_key is not None:
arguments.append("-lco FID=" + primary_key)
if len(table) == 0:
table = ogrLayerName(inLayer).lower()
if schema:
table = '{}.{}'.format(schema, table)
arguments.append('-nln')
arguments.append(table)
if skipfailures:
arguments.append('-skipfailures')
if where:
arguments.append(wherestring)
if len(gt) > 0:
arguments.append('-gt')
arguments.append(gt)
if not precision:
arguments.append('-lco PRECISION=NO')
if len(options) > 0:
arguments.append(options)
commands = []
if isWindows():
commands = ['cmd.exe', '/C ', 'ogr2ogr.exe',
GdalUtils.escapeAndJoin(arguments)]
else:
commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)]
return commands
def commandName(self):
return "ogr2ogr"
| settings = QSettings()
settings.beginGroup('/PostgreSQL/connections/')
return settings.childGroups() | identifier_body |
ogr2ogrtabletopostgislist.py | # -*- coding: utf-8 -*-
"""
***************************************************************************
ogr2ogrtabletopostgislist.py
---------------------
Date : November 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'November 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QSettings
from processing.core.parameters import ParameterString
from processing.core.parameters import ParameterTable
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterTableField
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools.system import isWindows
from processing.tools.vector import ogrConnectionString, ogrLayerName
class Ogr2OgrTableToPostGisList(GdalAlgorithm):
DATABASE = 'DATABASE'
INPUT_LAYER = 'INPUT_LAYER'
HOST = 'HOST'
PORT = 'PORT'
USER = 'USER'
DBNAME = 'DBNAME'
PASSWORD = 'PASSWORD'
SCHEMA = 'SCHEMA'
TABLE = 'TABLE'
PK = 'PK'
PRIMARY_KEY = 'PRIMARY_KEY'
WHERE = 'WHERE'
GT = 'GT'
OVERWRITE = 'OVERWRITE'
APPEND = 'APPEND'
ADDFIELDS = 'ADDFIELDS'
LAUNDER = 'LAUNDER'
SKIPFAILURES = 'SKIPFAILURES'
PRECISION = 'PRECISION'
OPTIONS = 'OPTIONS'
def | (self):
settings = QSettings()
settings.beginGroup('/PostgreSQL/connections/')
return settings.childGroups()
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Import layer/table as geometryless table into PostgreSQL database')
self.group, self.i18n_group = self.trAlgorithm('[OGR] Miscellaneous')
self.DB_CONNECTIONS = self.dbConnectionNames()
self.addParameter(ParameterSelection(self.DATABASE,
self.tr('Database (connection name)'), self.DB_CONNECTIONS))
self.addParameter(ParameterTable(self.INPUT_LAYER,
self.tr('Input layer')))
self.addParameter(ParameterString(self.SCHEMA,
self.tr('Schema name'), 'public', optional=True))
self.addParameter(ParameterString(self.TABLE,
self.tr('Table name, leave blank to use input name'),
'', optional=True))
self.addParameter(ParameterString(self.PK,
self.tr('Primary key'), 'id', optional=True))
self.addParameter(ParameterTableField(self.PRIMARY_KEY,
self.tr('Primary key (existing field, used if the above option is left empty)'),
self.INPUT_LAYER, optional=True))
self.addParameter(ParameterString(self.WHERE,
self.tr('Select features using a SQL "WHERE" statement (Ex: column=\'value\')'),
'', optional=True))
self.addParameter(ParameterString(self.GT,
self.tr('Group N features per transaction (Default: 20000)'),
'', optional=True))
self.addParameter(ParameterBoolean(self.OVERWRITE,
self.tr('Overwrite existing table'), True))
self.addParameter(ParameterBoolean(self.APPEND,
self.tr('Append to existing table'), False))
self.addParameter(ParameterBoolean(self.ADDFIELDS,
self.tr('Append and add new fields to existing table'), False))
self.addParameter(ParameterBoolean(self.LAUNDER,
self.tr('Do not launder columns/table names'), False))
self.addParameter(ParameterBoolean(self.SKIPFAILURES,
self.tr('Continue after a failure, skipping the failed record'),
False))
self.addParameter(ParameterBoolean(self.PRECISION,
self.tr('Keep width and precision of input attributes'),
True))
self.addParameter(ParameterString(self.OPTIONS,
self.tr('Additional creation options'), '', optional=True))
def getConsoleCommands(self):
connection = self.DB_CONNECTIONS[self.getParameterValue(self.DATABASE)]
settings = QSettings()
mySettings = '/PostgreSQL/connections/' + connection
dbname = settings.value(mySettings + '/database')
user = settings.value(mySettings + '/username')
host = settings.value(mySettings + '/host')
port = settings.value(mySettings + '/port')
password = settings.value(mySettings + '/password')
inLayer = self.getParameterValue(self.INPUT_LAYER)
ogrLayer = ogrConnectionString(inLayer)[1:-1]
schema = unicode(self.getParameterValue(self.SCHEMA))
table = unicode(self.getParameterValue(self.TABLE))
pk = unicode(self.getParameterValue(self.PK))
pkstring = "-lco FID=" + pk
primary_key = self.getParameterValue(self.PRIMARY_KEY)
where = unicode(self.getParameterValue(self.WHERE))
wherestring = '-where "' + where + '"'
gt = unicode(self.getParameterValue(self.GT))
overwrite = self.getParameterValue(self.OVERWRITE)
append = self.getParameterValue(self.APPEND)
addfields = self.getParameterValue(self.ADDFIELDS)
launder = self.getParameterValue(self.LAUNDER)
launderstring = "-lco LAUNDER=NO"
skipfailures = self.getParameterValue(self.SKIPFAILURES)
precision = self.getParameterValue(self.PRECISION)
options = unicode(self.getParameterValue(self.OPTIONS))
arguments = []
arguments.append('-progress')
arguments.append('--config PG_USE_COPY YES')
arguments.append('-f')
arguments.append('PostgreSQL')
arguments.append('PG:"host=')
arguments.append(host)
arguments.append('port=')
arguments.append(port)
if len(dbname) > 0:
arguments.append('dbname=' + dbname)
if len(password) > 0:
arguments.append('password=' + password)
if len(schema) > 0:
arguments.append('active_schema=' + schema)
else:
arguments.append('active_schema=public')
arguments.append('user=' + user + '"')
arguments.append(ogrLayer)
arguments.append('-nlt NONE')
arguments.append(ogrLayerName(inLayer))
if launder:
arguments.append(launderstring)
if append:
arguments.append('-append')
if addfields:
arguments.append('-addfields')
if overwrite:
arguments.append('-overwrite')
if len(pk) > 0:
arguments.append(pkstring)
elif primary_key is not None:
arguments.append("-lco FID=" + primary_key)
if len(table) == 0:
table = ogrLayerName(inLayer).lower()
if schema:
table = '{}.{}'.format(schema, table)
arguments.append('-nln')
arguments.append(table)
if skipfailures:
arguments.append('-skipfailures')
if where:
arguments.append(wherestring)
if len(gt) > 0:
arguments.append('-gt')
arguments.append(gt)
if not precision:
arguments.append('-lco PRECISION=NO')
if len(options) > 0:
arguments.append(options)
commands = []
if isWindows():
commands = ['cmd.exe', '/C ', 'ogr2ogr.exe',
GdalUtils.escapeAndJoin(arguments)]
else:
commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)]
return commands
def commandName(self):
return "ogr2ogr"
| dbConnectionNames | identifier_name |
ogr2ogrtabletopostgislist.py | # -*- coding: utf-8 -*-
"""
***************************************************************************
ogr2ogrtabletopostgislist.py
---------------------
Date : November 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'November 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QSettings
from processing.core.parameters import ParameterString
from processing.core.parameters import ParameterTable
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterTableField
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools.system import isWindows
from processing.tools.vector import ogrConnectionString, ogrLayerName
class Ogr2OgrTableToPostGisList(GdalAlgorithm):
DATABASE = 'DATABASE'
INPUT_LAYER = 'INPUT_LAYER'
HOST = 'HOST'
PORT = 'PORT'
USER = 'USER'
DBNAME = 'DBNAME'
PASSWORD = 'PASSWORD'
SCHEMA = 'SCHEMA'
TABLE = 'TABLE'
PK = 'PK'
PRIMARY_KEY = 'PRIMARY_KEY'
WHERE = 'WHERE'
GT = 'GT'
OVERWRITE = 'OVERWRITE'
APPEND = 'APPEND'
ADDFIELDS = 'ADDFIELDS'
LAUNDER = 'LAUNDER'
SKIPFAILURES = 'SKIPFAILURES'
PRECISION = 'PRECISION'
OPTIONS = 'OPTIONS'
def dbConnectionNames(self):
settings = QSettings()
settings.beginGroup('/PostgreSQL/connections/')
return settings.childGroups()
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Import layer/table as geometryless table into PostgreSQL database')
self.group, self.i18n_group = self.trAlgorithm('[OGR] Miscellaneous')
self.DB_CONNECTIONS = self.dbConnectionNames()
self.addParameter(ParameterSelection(self.DATABASE,
self.tr('Database (connection name)'), self.DB_CONNECTIONS))
self.addParameter(ParameterTable(self.INPUT_LAYER,
self.tr('Input layer')))
self.addParameter(ParameterString(self.SCHEMA,
self.tr('Schema name'), 'public', optional=True))
self.addParameter(ParameterString(self.TABLE,
self.tr('Table name, leave blank to use input name'),
'', optional=True))
self.addParameter(ParameterString(self.PK,
self.tr('Primary key'), 'id', optional=True))
self.addParameter(ParameterTableField(self.PRIMARY_KEY,
self.tr('Primary key (existing field, used if the above option is left empty)'),
self.INPUT_LAYER, optional=True))
self.addParameter(ParameterString(self.WHERE,
self.tr('Select features using a SQL "WHERE" statement (Ex: column=\'value\')'),
'', optional=True))
self.addParameter(ParameterString(self.GT,
self.tr('Group N features per transaction (Default: 20000)'),
'', optional=True))
self.addParameter(ParameterBoolean(self.OVERWRITE,
self.tr('Overwrite existing table'), True))
self.addParameter(ParameterBoolean(self.APPEND,
self.tr('Append to existing table'), False))
self.addParameter(ParameterBoolean(self.ADDFIELDS,
self.tr('Append and add new fields to existing table'), False))
self.addParameter(ParameterBoolean(self.LAUNDER,
self.tr('Do not launder columns/table names'), False))
self.addParameter(ParameterBoolean(self.SKIPFAILURES,
self.tr('Continue after a failure, skipping the failed record'),
False))
self.addParameter(ParameterBoolean(self.PRECISION,
self.tr('Keep width and precision of input attributes'),
True))
self.addParameter(ParameterString(self.OPTIONS,
self.tr('Additional creation options'), '', optional=True))
def getConsoleCommands(self):
connection = self.DB_CONNECTIONS[self.getParameterValue(self.DATABASE)]
settings = QSettings()
mySettings = '/PostgreSQL/connections/' + connection
dbname = settings.value(mySettings + '/database')
user = settings.value(mySettings + '/username')
host = settings.value(mySettings + '/host')
port = settings.value(mySettings + '/port')
password = settings.value(mySettings + '/password')
inLayer = self.getParameterValue(self.INPUT_LAYER)
ogrLayer = ogrConnectionString(inLayer)[1:-1]
schema = unicode(self.getParameterValue(self.SCHEMA))
table = unicode(self.getParameterValue(self.TABLE))
pk = unicode(self.getParameterValue(self.PK))
pkstring = "-lco FID=" + pk
primary_key = self.getParameterValue(self.PRIMARY_KEY)
where = unicode(self.getParameterValue(self.WHERE))
wherestring = '-where "' + where + '"'
gt = unicode(self.getParameterValue(self.GT))
overwrite = self.getParameterValue(self.OVERWRITE)
append = self.getParameterValue(self.APPEND)
addfields = self.getParameterValue(self.ADDFIELDS)
launder = self.getParameterValue(self.LAUNDER)
launderstring = "-lco LAUNDER=NO"
skipfailures = self.getParameterValue(self.SKIPFAILURES)
precision = self.getParameterValue(self.PRECISION)
options = unicode(self.getParameterValue(self.OPTIONS))
arguments = []
arguments.append('-progress')
arguments.append('--config PG_USE_COPY YES')
arguments.append('-f')
arguments.append('PostgreSQL')
arguments.append('PG:"host=')
arguments.append(host)
arguments.append('port=')
arguments.append(port)
if len(dbname) > 0:
arguments.append('dbname=' + dbname)
if len(password) > 0:
arguments.append('password=' + password)
if len(schema) > 0:
arguments.append('active_schema=' + schema)
else:
arguments.append('active_schema=public')
arguments.append('user=' + user + '"')
arguments.append(ogrLayer)
arguments.append('-nlt NONE')
arguments.append(ogrLayerName(inLayer))
if launder:
arguments.append(launderstring)
if append:
arguments.append('-append')
if addfields:
arguments.append('-addfields')
if overwrite:
arguments.append('-overwrite')
if len(pk) > 0:
arguments.append(pkstring)
elif primary_key is not None:
arguments.append("-lco FID=" + primary_key)
if len(table) == 0:
table = ogrLayerName(inLayer).lower()
if schema:
table = '{}.{}'.format(schema, table)
arguments.append('-nln')
arguments.append(table)
if skipfailures:
arguments.append('-skipfailures')
if where:
|
if len(gt) > 0:
arguments.append('-gt')
arguments.append(gt)
if not precision:
arguments.append('-lco PRECISION=NO')
if len(options) > 0:
arguments.append(options)
commands = []
if isWindows():
commands = ['cmd.exe', '/C ', 'ogr2ogr.exe',
GdalUtils.escapeAndJoin(arguments)]
else:
commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)]
return commands
def commandName(self):
return "ogr2ogr"
| arguments.append(wherestring) | conditional_block |
ogr2ogrtabletopostgislist.py | # -*- coding: utf-8 -*-
"""
***************************************************************************
ogr2ogrtabletopostgislist.py
---------------------
Date : November 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'November 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QSettings
from processing.core.parameters import ParameterString
from processing.core.parameters import ParameterTable
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterTableField
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools.system import isWindows
from processing.tools.vector import ogrConnectionString, ogrLayerName
class Ogr2OgrTableToPostGisList(GdalAlgorithm):
DATABASE = 'DATABASE'
INPUT_LAYER = 'INPUT_LAYER'
HOST = 'HOST'
PORT = 'PORT'
USER = 'USER'
DBNAME = 'DBNAME'
PASSWORD = 'PASSWORD'
SCHEMA = 'SCHEMA'
TABLE = 'TABLE'
PK = 'PK'
PRIMARY_KEY = 'PRIMARY_KEY'
WHERE = 'WHERE'
GT = 'GT'
OVERWRITE = 'OVERWRITE'
APPEND = 'APPEND'
ADDFIELDS = 'ADDFIELDS'
LAUNDER = 'LAUNDER'
SKIPFAILURES = 'SKIPFAILURES'
PRECISION = 'PRECISION'
OPTIONS = 'OPTIONS'
def dbConnectionNames(self):
settings = QSettings()
settings.beginGroup('/PostgreSQL/connections/')
return settings.childGroups()
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Import layer/table as geometryless table into PostgreSQL database')
self.group, self.i18n_group = self.trAlgorithm('[OGR] Miscellaneous')
self.DB_CONNECTIONS = self.dbConnectionNames()
self.addParameter(ParameterSelection(self.DATABASE,
self.tr('Database (connection name)'), self.DB_CONNECTIONS))
self.addParameter(ParameterTable(self.INPUT_LAYER,
self.tr('Input layer')))
self.addParameter(ParameterString(self.SCHEMA,
self.tr('Schema name'), 'public', optional=True))
self.addParameter(ParameterString(self.TABLE,
self.tr('Table name, leave blank to use input name'),
'', optional=True))
self.addParameter(ParameterString(self.PK,
self.tr('Primary key'), 'id', optional=True))
self.addParameter(ParameterTableField(self.PRIMARY_KEY,
self.tr('Primary key (existing field, used if the above option is left empty)'),
self.INPUT_LAYER, optional=True)) | self.tr('Select features using a SQL "WHERE" statement (Ex: column=\'value\')'),
'', optional=True))
self.addParameter(ParameterString(self.GT,
self.tr('Group N features per transaction (Default: 20000)'),
'', optional=True))
self.addParameter(ParameterBoolean(self.OVERWRITE,
self.tr('Overwrite existing table'), True))
self.addParameter(ParameterBoolean(self.APPEND,
self.tr('Append to existing table'), False))
self.addParameter(ParameterBoolean(self.ADDFIELDS,
self.tr('Append and add new fields to existing table'), False))
self.addParameter(ParameterBoolean(self.LAUNDER,
self.tr('Do not launder columns/table names'), False))
self.addParameter(ParameterBoolean(self.SKIPFAILURES,
self.tr('Continue after a failure, skipping the failed record'),
False))
self.addParameter(ParameterBoolean(self.PRECISION,
self.tr('Keep width and precision of input attributes'),
True))
self.addParameter(ParameterString(self.OPTIONS,
self.tr('Additional creation options'), '', optional=True))
def getConsoleCommands(self):
connection = self.DB_CONNECTIONS[self.getParameterValue(self.DATABASE)]
settings = QSettings()
mySettings = '/PostgreSQL/connections/' + connection
dbname = settings.value(mySettings + '/database')
user = settings.value(mySettings + '/username')
host = settings.value(mySettings + '/host')
port = settings.value(mySettings + '/port')
password = settings.value(mySettings + '/password')
inLayer = self.getParameterValue(self.INPUT_LAYER)
ogrLayer = ogrConnectionString(inLayer)[1:-1]
schema = unicode(self.getParameterValue(self.SCHEMA))
table = unicode(self.getParameterValue(self.TABLE))
pk = unicode(self.getParameterValue(self.PK))
pkstring = "-lco FID=" + pk
primary_key = self.getParameterValue(self.PRIMARY_KEY)
where = unicode(self.getParameterValue(self.WHERE))
wherestring = '-where "' + where + '"'
gt = unicode(self.getParameterValue(self.GT))
overwrite = self.getParameterValue(self.OVERWRITE)
append = self.getParameterValue(self.APPEND)
addfields = self.getParameterValue(self.ADDFIELDS)
launder = self.getParameterValue(self.LAUNDER)
launderstring = "-lco LAUNDER=NO"
skipfailures = self.getParameterValue(self.SKIPFAILURES)
precision = self.getParameterValue(self.PRECISION)
options = unicode(self.getParameterValue(self.OPTIONS))
arguments = []
arguments.append('-progress')
arguments.append('--config PG_USE_COPY YES')
arguments.append('-f')
arguments.append('PostgreSQL')
arguments.append('PG:"host=')
arguments.append(host)
arguments.append('port=')
arguments.append(port)
if len(dbname) > 0:
arguments.append('dbname=' + dbname)
if len(password) > 0:
arguments.append('password=' + password)
if len(schema) > 0:
arguments.append('active_schema=' + schema)
else:
arguments.append('active_schema=public')
arguments.append('user=' + user + '"')
arguments.append(ogrLayer)
arguments.append('-nlt NONE')
arguments.append(ogrLayerName(inLayer))
if launder:
arguments.append(launderstring)
if append:
arguments.append('-append')
if addfields:
arguments.append('-addfields')
if overwrite:
arguments.append('-overwrite')
if len(pk) > 0:
arguments.append(pkstring)
elif primary_key is not None:
arguments.append("-lco FID=" + primary_key)
if len(table) == 0:
table = ogrLayerName(inLayer).lower()
if schema:
table = '{}.{}'.format(schema, table)
arguments.append('-nln')
arguments.append(table)
if skipfailures:
arguments.append('-skipfailures')
if where:
arguments.append(wherestring)
if len(gt) > 0:
arguments.append('-gt')
arguments.append(gt)
if not precision:
arguments.append('-lco PRECISION=NO')
if len(options) > 0:
arguments.append(options)
commands = []
if isWindows():
commands = ['cmd.exe', '/C ', 'ogr2ogr.exe',
GdalUtils.escapeAndJoin(arguments)]
else:
commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)]
return commands
def commandName(self):
return "ogr2ogr" | self.addParameter(ParameterString(self.WHERE, | random_line_split |
Fx-catalog-collapsible-menu.js | define([
"jquery",
"fx-cat-br/widgets/Fx-widgets-commons",
'text!fx-cat-br/json/fx-catalog-collapsible-menu-config.json',
"lib/bootstrap"
], function ($, W_Commons, conf) {
var o = { },
defaultOptions = {
widget: {
lang: 'EN'
},
events: {
SELECT: 'fx.catalog.module.select'
}
};
var cache = {},
w_Commons, $collapse;
function Fx_Catalog_Collapsible_Menu() {
w_Commons = new W_Commons();
}
Fx_Catalog_Collapsible_Menu.prototype.init = function (options) {
//Merge options
$.extend(o, defaultOptions);
$.extend(o, options);
};
Fx_Catalog_Collapsible_Menu.prototype.render = function (options) {
$.extend(o, options);
cache.json = JSON.parse(conf);
this.initStructure();
this.renderMenu(cache.json);
};
Fx_Catalog_Collapsible_Menu.prototype.initStructure = function () {
o.collapseId = "fx-collapse-" + w_Commons.getFenixUniqueId();
$collapse = $('<div class="panel-group" id="accordion"></div>');
$collapse.attr("id", o.collapseId);
$(o.container).append($collapse);
};
Fx_Catalog_Collapsible_Menu.prototype.renderMenu = function (json) {
var self = this;
if (json.hasOwnProperty("panels")) {
var panels = json.panels;
for (var i = 0; i < panels.length; i++) {
$collapse.append(self.buildPanel(panels[i]))
}
$(o.container).append($collapse)
} else {
throw new Error("Fx_Catalog_Collapsible_Menu: no 'panels' attribute in config JSON.")
}
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanel = function (panel) {
var self = this,
id = "fx-collapse-panel-" + w_Commons.getFenixUniqueId();
var $p = $(document.createElement("DIV"));
$p.addClass("panel");
$p.addClass("panel-default");
$p.append(self.buildPanelHeader(panel, id));
$p.append(self.buildPanelBody(panel, id));
return $p;
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanelHeader = function (panel, id) {
//Init header
var $header = $('<div class="panel-heading"></div>'),
$title = $('<h4 class="panel-title fx-menu-category-title"></h4>'),
$a = $('<a data-toggle="collapse"></a>'),
$info = $('<div class="fx-catalog-modular-menu-category-info"></div>'),
$plus = $('<div class="fx-catalog-modular-menu-category-plus"></div>');
$a.attr("data-parent", "#" + o.collapseId);
$a.attr("href", "#" + id);
if (panel.hasOwnProperty("title")) {
$a.html(panel["title"][o.widget.lang]);
}
return $header.append($title.append($a.append($plus)).append($info));
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanelBody = function (panel, id) {
//Init panel body
var $bodyContainer = $("<div class='panel-collapse collapse'></div>");
$bodyContainer.attr("id", id);
var $body = $('<div class="panel-body"></div>');
if (panel.hasOwnProperty("modules")) {
var modules = panel["modules"];
for (var j = 0; j < modules.length; j++) {
var $module = $("<div></div>"),
$btn = $('<button type="button" class="btn btn-default btn-block"></button>');
$btn.on('click', {module: modules[j] }, function (e) {
var $btn = $(this);
if ($btn.is(':disabled') === false) {
$btn.attr("disabled", "disabled");
w_Commons.raiseCustomEvent(o.container, o.events.SELECT, e.data.module)
}
});
| if (modules[j].hasOwnProperty("id")) {
$btn.attr("id", modules[j].id);
}
if (modules[j].hasOwnProperty("module")) {
$btn.attr("data-module", modules[j].module);
}
//Keep it before the label to have the icon in its the left side
if (modules[j].hasOwnProperty("icon")) {
$btn.append($('<span class="' + modules[j].icon + '"></span>'));
}
if (modules[j].hasOwnProperty("label")) {
$btn.append(modules[j].label[o.widget.lang]);
}
if (modules[j].hasOwnProperty("popover")) {
/* console.log(modules[j]["popover"])
var keys = Object.keys(modules[j]["popover"]);
for (var k = 0; k < keys.length; k++ ){
$btn.attr(keys[k], modules[j]["popover"][keys[k]])
}*/
}
$module.append($btn);
$body.append($module)
}
}
return $bodyContainer.append($body);
};
Fx_Catalog_Collapsible_Menu.prototype.disable = function (module) {
$(o.container).find("[data-module='" + module + "']").attr("disabled", "disabled");
};
Fx_Catalog_Collapsible_Menu.prototype.activate = function (module) {
$(o.container).find("[data-module='" + module + "']").removeAttr("disabled");
};
return Fx_Catalog_Collapsible_Menu;
}); | random_line_split |
|
Fx-catalog-collapsible-menu.js | define([
"jquery",
"fx-cat-br/widgets/Fx-widgets-commons",
'text!fx-cat-br/json/fx-catalog-collapsible-menu-config.json',
"lib/bootstrap"
], function ($, W_Commons, conf) {
var o = { },
defaultOptions = {
widget: {
lang: 'EN'
},
events: {
SELECT: 'fx.catalog.module.select'
}
};
var cache = {},
w_Commons, $collapse;
function Fx_Catalog_Collapsible_Menu() {
w_Commons = new W_Commons();
}
Fx_Catalog_Collapsible_Menu.prototype.init = function (options) {
//Merge options
$.extend(o, defaultOptions);
$.extend(o, options);
};
Fx_Catalog_Collapsible_Menu.prototype.render = function (options) {
$.extend(o, options);
cache.json = JSON.parse(conf);
this.initStructure();
this.renderMenu(cache.json);
};
Fx_Catalog_Collapsible_Menu.prototype.initStructure = function () {
o.collapseId = "fx-collapse-" + w_Commons.getFenixUniqueId();
$collapse = $('<div class="panel-group" id="accordion"></div>');
$collapse.attr("id", o.collapseId);
$(o.container).append($collapse);
};
Fx_Catalog_Collapsible_Menu.prototype.renderMenu = function (json) {
var self = this;
if (json.hasOwnProperty("panels")) {
var panels = json.panels;
for (var i = 0; i < panels.length; i++) {
$collapse.append(self.buildPanel(panels[i]))
}
$(o.container).append($collapse)
} else {
throw new Error("Fx_Catalog_Collapsible_Menu: no 'panels' attribute in config JSON.")
}
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanel = function (panel) {
var self = this,
id = "fx-collapse-panel-" + w_Commons.getFenixUniqueId();
var $p = $(document.createElement("DIV"));
$p.addClass("panel");
$p.addClass("panel-default");
$p.append(self.buildPanelHeader(panel, id));
$p.append(self.buildPanelBody(panel, id));
return $p;
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanelHeader = function (panel, id) {
//Init header
var $header = $('<div class="panel-heading"></div>'),
$title = $('<h4 class="panel-title fx-menu-category-title"></h4>'),
$a = $('<a data-toggle="collapse"></a>'),
$info = $('<div class="fx-catalog-modular-menu-category-info"></div>'),
$plus = $('<div class="fx-catalog-modular-menu-category-plus"></div>');
$a.attr("data-parent", "#" + o.collapseId);
$a.attr("href", "#" + id);
if (panel.hasOwnProperty("title")) {
$a.html(panel["title"][o.widget.lang]);
}
return $header.append($title.append($a.append($plus)).append($info));
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanelBody = function (panel, id) {
//Init panel body
var $bodyContainer = $("<div class='panel-collapse collapse'></div>");
$bodyContainer.attr("id", id);
var $body = $('<div class="panel-body"></div>');
if (panel.hasOwnProperty("modules")) | }
if (modules[j].hasOwnProperty("module")) {
$btn.attr("data-module", modules[j].module);
}
//Keep it before the label to have the icon in its the left side
if (modules[j].hasOwnProperty("icon")) {
$btn.append($('<span class="' + modules[j].icon + '"></span>'));
}
if (modules[j].hasOwnProperty("label")) {
$btn.append(modules[j].label[o.widget.lang]);
}
if (modules[j].hasOwnProperty("popover")) {
/* console.log(modules[j]["popover"])
var keys = Object.keys(modules[j]["popover"]);
for (var k = 0; k < keys.length; k++ ){
$btn.attr(keys[k], modules[j]["popover"][keys[k]])
}*/
}
$module.append($btn);
$body.append($module)
}
}
return $bodyContainer.append($body);
};
Fx_Catalog_Collapsible_Menu.prototype.disable = function (module) {
$(o.container).find("[data-module='" + module + "']").attr("disabled", "disabled");
};
Fx_Catalog_Collapsible_Menu.prototype.activate = function (module) {
$(o.container).find("[data-module='" + module + "']").removeAttr("disabled");
};
return Fx_Catalog_Collapsible_Menu;
}); | {
var modules = panel["modules"];
for (var j = 0; j < modules.length; j++) {
var $module = $("<div></div>"),
$btn = $('<button type="button" class="btn btn-default btn-block"></button>');
$btn.on('click', {module: modules[j] }, function (e) {
var $btn = $(this);
if ($btn.is(':disabled') === false) {
$btn.attr("disabled", "disabled");
w_Commons.raiseCustomEvent(o.container, o.events.SELECT, e.data.module)
}
});
if (modules[j].hasOwnProperty("id")) {
$btn.attr("id", modules[j].id); | conditional_block |
Fx-catalog-collapsible-menu.js | define([
"jquery",
"fx-cat-br/widgets/Fx-widgets-commons",
'text!fx-cat-br/json/fx-catalog-collapsible-menu-config.json',
"lib/bootstrap"
], function ($, W_Commons, conf) {
var o = { },
defaultOptions = {
widget: {
lang: 'EN'
},
events: {
SELECT: 'fx.catalog.module.select'
}
};
var cache = {},
w_Commons, $collapse;
function | () {
w_Commons = new W_Commons();
}
Fx_Catalog_Collapsible_Menu.prototype.init = function (options) {
//Merge options
$.extend(o, defaultOptions);
$.extend(o, options);
};
Fx_Catalog_Collapsible_Menu.prototype.render = function (options) {
$.extend(o, options);
cache.json = JSON.parse(conf);
this.initStructure();
this.renderMenu(cache.json);
};
Fx_Catalog_Collapsible_Menu.prototype.initStructure = function () {
o.collapseId = "fx-collapse-" + w_Commons.getFenixUniqueId();
$collapse = $('<div class="panel-group" id="accordion"></div>');
$collapse.attr("id", o.collapseId);
$(o.container).append($collapse);
};
Fx_Catalog_Collapsible_Menu.prototype.renderMenu = function (json) {
var self = this;
if (json.hasOwnProperty("panels")) {
var panels = json.panels;
for (var i = 0; i < panels.length; i++) {
$collapse.append(self.buildPanel(panels[i]))
}
$(o.container).append($collapse)
} else {
throw new Error("Fx_Catalog_Collapsible_Menu: no 'panels' attribute in config JSON.")
}
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanel = function (panel) {
var self = this,
id = "fx-collapse-panel-" + w_Commons.getFenixUniqueId();
var $p = $(document.createElement("DIV"));
$p.addClass("panel");
$p.addClass("panel-default");
$p.append(self.buildPanelHeader(panel, id));
$p.append(self.buildPanelBody(panel, id));
return $p;
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanelHeader = function (panel, id) {
//Init header
var $header = $('<div class="panel-heading"></div>'),
$title = $('<h4 class="panel-title fx-menu-category-title"></h4>'),
$a = $('<a data-toggle="collapse"></a>'),
$info = $('<div class="fx-catalog-modular-menu-category-info"></div>'),
$plus = $('<div class="fx-catalog-modular-menu-category-plus"></div>');
$a.attr("data-parent", "#" + o.collapseId);
$a.attr("href", "#" + id);
if (panel.hasOwnProperty("title")) {
$a.html(panel["title"][o.widget.lang]);
}
return $header.append($title.append($a.append($plus)).append($info));
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanelBody = function (panel, id) {
//Init panel body
var $bodyContainer = $("<div class='panel-collapse collapse'></div>");
$bodyContainer.attr("id", id);
var $body = $('<div class="panel-body"></div>');
if (panel.hasOwnProperty("modules")) {
var modules = panel["modules"];
for (var j = 0; j < modules.length; j++) {
var $module = $("<div></div>"),
$btn = $('<button type="button" class="btn btn-default btn-block"></button>');
$btn.on('click', {module: modules[j] }, function (e) {
var $btn = $(this);
if ($btn.is(':disabled') === false) {
$btn.attr("disabled", "disabled");
w_Commons.raiseCustomEvent(o.container, o.events.SELECT, e.data.module)
}
});
if (modules[j].hasOwnProperty("id")) {
$btn.attr("id", modules[j].id);
}
if (modules[j].hasOwnProperty("module")) {
$btn.attr("data-module", modules[j].module);
}
//Keep it before the label to have the icon in its the left side
if (modules[j].hasOwnProperty("icon")) {
$btn.append($('<span class="' + modules[j].icon + '"></span>'));
}
if (modules[j].hasOwnProperty("label")) {
$btn.append(modules[j].label[o.widget.lang]);
}
if (modules[j].hasOwnProperty("popover")) {
/* console.log(modules[j]["popover"])
var keys = Object.keys(modules[j]["popover"]);
for (var k = 0; k < keys.length; k++ ){
$btn.attr(keys[k], modules[j]["popover"][keys[k]])
}*/
}
$module.append($btn);
$body.append($module)
}
}
return $bodyContainer.append($body);
};
Fx_Catalog_Collapsible_Menu.prototype.disable = function (module) {
$(o.container).find("[data-module='" + module + "']").attr("disabled", "disabled");
};
Fx_Catalog_Collapsible_Menu.prototype.activate = function (module) {
$(o.container).find("[data-module='" + module + "']").removeAttr("disabled");
};
return Fx_Catalog_Collapsible_Menu;
}); | Fx_Catalog_Collapsible_Menu | identifier_name |
Fx-catalog-collapsible-menu.js | define([
"jquery",
"fx-cat-br/widgets/Fx-widgets-commons",
'text!fx-cat-br/json/fx-catalog-collapsible-menu-config.json',
"lib/bootstrap"
], function ($, W_Commons, conf) {
var o = { },
defaultOptions = {
widget: {
lang: 'EN'
},
events: {
SELECT: 'fx.catalog.module.select'
}
};
var cache = {},
w_Commons, $collapse;
function Fx_Catalog_Collapsible_Menu() |
Fx_Catalog_Collapsible_Menu.prototype.init = function (options) {
//Merge options
$.extend(o, defaultOptions);
$.extend(o, options);
};
Fx_Catalog_Collapsible_Menu.prototype.render = function (options) {
$.extend(o, options);
cache.json = JSON.parse(conf);
this.initStructure();
this.renderMenu(cache.json);
};
Fx_Catalog_Collapsible_Menu.prototype.initStructure = function () {
o.collapseId = "fx-collapse-" + w_Commons.getFenixUniqueId();
$collapse = $('<div class="panel-group" id="accordion"></div>');
$collapse.attr("id", o.collapseId);
$(o.container).append($collapse);
};
Fx_Catalog_Collapsible_Menu.prototype.renderMenu = function (json) {
var self = this;
if (json.hasOwnProperty("panels")) {
var panels = json.panels;
for (var i = 0; i < panels.length; i++) {
$collapse.append(self.buildPanel(panels[i]))
}
$(o.container).append($collapse)
} else {
throw new Error("Fx_Catalog_Collapsible_Menu: no 'panels' attribute in config JSON.")
}
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanel = function (panel) {
var self = this,
id = "fx-collapse-panel-" + w_Commons.getFenixUniqueId();
var $p = $(document.createElement("DIV"));
$p.addClass("panel");
$p.addClass("panel-default");
$p.append(self.buildPanelHeader(panel, id));
$p.append(self.buildPanelBody(panel, id));
return $p;
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanelHeader = function (panel, id) {
//Init header
var $header = $('<div class="panel-heading"></div>'),
$title = $('<h4 class="panel-title fx-menu-category-title"></h4>'),
$a = $('<a data-toggle="collapse"></a>'),
$info = $('<div class="fx-catalog-modular-menu-category-info"></div>'),
$plus = $('<div class="fx-catalog-modular-menu-category-plus"></div>');
$a.attr("data-parent", "#" + o.collapseId);
$a.attr("href", "#" + id);
if (panel.hasOwnProperty("title")) {
$a.html(panel["title"][o.widget.lang]);
}
return $header.append($title.append($a.append($plus)).append($info));
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanelBody = function (panel, id) {
//Init panel body
var $bodyContainer = $("<div class='panel-collapse collapse'></div>");
$bodyContainer.attr("id", id);
var $body = $('<div class="panel-body"></div>');
if (panel.hasOwnProperty("modules")) {
var modules = panel["modules"];
for (var j = 0; j < modules.length; j++) {
var $module = $("<div></div>"),
$btn = $('<button type="button" class="btn btn-default btn-block"></button>');
$btn.on('click', {module: modules[j] }, function (e) {
var $btn = $(this);
if ($btn.is(':disabled') === false) {
$btn.attr("disabled", "disabled");
w_Commons.raiseCustomEvent(o.container, o.events.SELECT, e.data.module)
}
});
if (modules[j].hasOwnProperty("id")) {
$btn.attr("id", modules[j].id);
}
if (modules[j].hasOwnProperty("module")) {
$btn.attr("data-module", modules[j].module);
}
//Keep it before the label to have the icon in its the left side
if (modules[j].hasOwnProperty("icon")) {
$btn.append($('<span class="' + modules[j].icon + '"></span>'));
}
if (modules[j].hasOwnProperty("label")) {
$btn.append(modules[j].label[o.widget.lang]);
}
if (modules[j].hasOwnProperty("popover")) {
/* console.log(modules[j]["popover"])
var keys = Object.keys(modules[j]["popover"]);
for (var k = 0; k < keys.length; k++ ){
$btn.attr(keys[k], modules[j]["popover"][keys[k]])
}*/
}
$module.append($btn);
$body.append($module)
}
}
return $bodyContainer.append($body);
};
Fx_Catalog_Collapsible_Menu.prototype.disable = function (module) {
$(o.container).find("[data-module='" + module + "']").attr("disabled", "disabled");
};
Fx_Catalog_Collapsible_Menu.prototype.activate = function (module) {
$(o.container).find("[data-module='" + module + "']").removeAttr("disabled");
};
return Fx_Catalog_Collapsible_Menu;
}); | {
w_Commons = new W_Commons();
} | identifier_body |
find_dependencies.py | this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import imp
import logging
import modulefinder
import optparse
import os
import sys
import zipfile
from telemetry import benchmark
from telemetry.core import command_line
from telemetry.core import discover
from telemetry.util import bootstrap
from telemetry.util import cloud_storage
from telemetry.util import path
from telemetry.util import path_set
DEPS_FILE = 'bootstrap_deps'
def FindBootstrapDependencies(base_dir):
deps_file = os.path.join(base_dir, DEPS_FILE)
if not os.path.exists(deps_file):
return []
deps_paths = bootstrap.ListAllDepsPaths(deps_file)
return set(os.path.realpath(os.path.join(
path.GetChromiumSrcDir(), os.pardir, deps_path))
for deps_path in deps_paths)
def FindPythonDependencies(module_path):
logging.info('Finding Python dependencies of %s' % module_path)
# Load the module to inherit its sys.path modifications.
imp.load_source(
os.path.splitext(os.path.basename(module_path))[0], module_path)
# Analyze the module for its imports.
finder = modulefinder.ModuleFinder()
finder.run_script(module_path)
# Filter for only imports in Chromium.
for module in finder.modules.itervalues():
# If it's an __init__.py, module.__path__ gives the package's folder.
module_path = module.__path__[0] if module.__path__ else module.__file__
if not module_path:
continue
module_path = os.path.realpath(module_path)
if not path.IsSubpath(module_path, path.GetChromiumSrcDir()):
continue
yield module_path
def FindPageSetDependencies(base_dir):
logging.info('Finding page sets in %s' % base_dir)
# Add base_dir to path so our imports relative to base_dir will work.
sys.path.append(base_dir)
tests = discover.DiscoverClasses(base_dir, base_dir, benchmark.Benchmark,
index_by_class_name=True)
for test_class in tests.itervalues():
test_obj = test_class()
# Ensure the test's default options are set if needed.
parser = optparse.OptionParser()
test_obj.AddCommandLineArgs(parser, None)
options = optparse.Values()
for k, v in parser.get_default_values().__dict__.iteritems():
options.ensure_value(k, v)
# Page set paths are relative to their runner script, not relative to us.
path.GetBaseDir = lambda: base_dir
# TODO: Loading the page set will automatically download its Cloud Storage
# deps. This is really expensive, and we don't want to do this by default.
page_set = test_obj.CreatePageSet(options)
# Add all of its serving_dirs as dependencies.
for serving_dir in page_set.serving_dirs:
yield serving_dir
def FindExcludedFiles(files, options):
# Define some filters for files.
def IsHidden(path_string):
|
def IsPyc(path_string):
return os.path.splitext(path_string)[1] == '.pyc'
def IsInCloudStorage(path_string):
return os.path.exists(path_string + '.sha1')
def MatchesExcludeOptions(path_string):
for pattern in options.exclude:
if (fnmatch.fnmatch(path_string, pattern) or
fnmatch.fnmatch(os.path.basename(path_string), pattern)):
return True
return False
# Collect filters we're going to use to exclude files.
exclude_conditions = [
IsHidden,
IsPyc,
IsInCloudStorage,
MatchesExcludeOptions,
]
# Check all the files against the filters.
for file_path in files:
if any(condition(file_path) for condition in exclude_conditions):
yield file_path
def FindDependencies(target_paths, options):
# Verify arguments.
for target_path in target_paths:
if not os.path.exists(target_path):
raise ValueError('Path does not exist: %s' % target_path)
dependencies = path_set.PathSet()
# Including Telemetry's major entry points will (hopefully) include Telemetry
# and all its dependencies. If the user doesn't pass any arguments, we just
# have Telemetry.
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(path.GetTelemetryDir(), 'telemetry', 'benchmark_runner.py')))
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(path.GetTelemetryDir(),
'telemetry', 'unittest_util', 'run_tests.py')))
dependencies |= FindBootstrapDependencies(path.GetTelemetryDir())
# Add dependencies.
for target_path in target_paths:
base_dir = os.path.dirname(os.path.realpath(target_path))
dependencies.add(base_dir)
dependencies |= FindBootstrapDependencies(base_dir)
dependencies |= FindPythonDependencies(target_path)
if options.include_page_set_data:
dependencies |= FindPageSetDependencies(base_dir)
# Remove excluded files.
dependencies -= FindExcludedFiles(set(dependencies), options)
return dependencies
def ZipDependencies(target_paths, dependencies, options):
base_dir = os.path.dirname(os.path.realpath(path.GetChromiumSrcDir()))
with zipfile.ZipFile(options.zip, 'w', zipfile.ZIP_DEFLATED) as zip_file:
# Add dependencies to archive.
for dependency_path in dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(dependency_path, base_dir))
zip_file.write(dependency_path, path_in_archive)
# Add symlinks to executable paths, for ease of use.
for target_path in target_paths:
link_info = zipfile.ZipInfo(
os.path.join('telemetry', os.path.basename(target_path)))
link_info.create_system = 3 # Unix attributes.
# 010 is regular file, 0111 is the permission bits rwxrwxrwx.
link_info.external_attr = 0100777 << 16 # Octal.
relative_path = os.path.relpath(target_path, base_dir)
link_script = (
'#!/usr/bin/env python\n\n'
'import os\n'
'import sys\n\n\n'
'script = os.path.join(os.path.dirname(__file__), \'%s\')\n'
'os.execv(sys.executable, [sys.executable, script] + sys.argv[1:])'
% relative_path)
zip_file.writestr(link_info, link_script)
# Add gsutil to the archive, if it's available. The gsutil in
# depot_tools is modified to allow authentication using prodaccess.
# TODO: If there's a gsutil in telemetry/third_party/, bootstrap_deps
# will include it. Then there will be two copies of gsutil at the same
# location in the archive. This can be confusing for users.
gsutil_path = os.path.realpath(cloud_storage.FindGsutil())
if cloud_storage.SupportsProdaccess(gsutil_path):
gsutil_base_dir = os.path.join(os.path.dirname(gsutil_path), os.pardir)
gsutil_dependencies = path_set.PathSet()
gsutil_dependencies.add(os.path.dirname(gsutil_path))
# Also add modules from depot_tools that are needed by gsutil.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'boto'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'fancy_urllib'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'retry_decorator'))
gsutil_dependencies -= FindExcludedFiles(
set(gsutil_dependencies), options)
# Also add upload.py to the archive from depot_tools, if it is available.
# This allows us to post patches without requiring a full depot_tools
# install. There's no real point in including upload.py if we do not
# also have gsutil, which is why this is inside the gsutil block.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'upload.py'))
for dependency_path in gsutil_dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(path.GetTelemetryDir(), base_dir),
'third_party', os.path.relpath(dependency_path, gsutil_base_dir))
zip_file.write(dependency_path, path_in_archive)
class FindDependenciesCommand(command_line.OptparseCommand):
"""Prints all dependencies"""
@classmethod
def AddCommandLineArgs(cls, parser, _):
parser.add_option(
'-v', '--verbose', action='count', dest='verbosity',
help='Increase verbosity level (repeat as needed).')
parser.add_option(
'-p', '--include-page-set-data', action='store_true', default=False,
help='Scan tests for page set data and include them.')
parser.add_option(
'-e', '--exclude', action='append', default=[],
help='Exclude paths matching EXCLUDE. Can be used multiple times.')
parser.add_option(
'-z', '--zip',
help='Store files in a zip archive at ZIP.')
@classmethod
def ProcessCommandLineArgs(cls, parser, args, _):
if args.verbosity >= 2:
logging.getLogger().setLevel(logging.DEBUG)
elif args.verbosity:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARNING)
def Run(self, args):
target_paths = args.positional_args
dependencies = FindDependencies(target_paths, args)
if | for pathname_component in path_string.split(os.sep):
if pathname_component.startswith('.'):
return True
return False | identifier_body |
find_dependencies.py | source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import imp
import logging
import modulefinder
import optparse
import os
import sys
import zipfile
from telemetry import benchmark
from telemetry.core import command_line
from telemetry.core import discover
from telemetry.util import bootstrap
from telemetry.util import cloud_storage
from telemetry.util import path
from telemetry.util import path_set
DEPS_FILE = 'bootstrap_deps'
def FindBootstrapDependencies(base_dir):
deps_file = os.path.join(base_dir, DEPS_FILE)
if not os.path.exists(deps_file):
return []
deps_paths = bootstrap.ListAllDepsPaths(deps_file)
return set(os.path.realpath(os.path.join(
path.GetChromiumSrcDir(), os.pardir, deps_path))
for deps_path in deps_paths)
def FindPythonDependencies(module_path):
logging.info('Finding Python dependencies of %s' % module_path)
# Load the module to inherit its sys.path modifications.
imp.load_source(
os.path.splitext(os.path.basename(module_path))[0], module_path)
# Analyze the module for its imports.
finder = modulefinder.ModuleFinder()
finder.run_script(module_path)
# Filter for only imports in Chromium.
for module in finder.modules.itervalues():
# If it's an __init__.py, module.__path__ gives the package's folder.
module_path = module.__path__[0] if module.__path__ else module.__file__
if not module_path:
continue
module_path = os.path.realpath(module_path)
if not path.IsSubpath(module_path, path.GetChromiumSrcDir()):
continue
yield module_path
def FindPageSetDependencies(base_dir):
logging.info('Finding page sets in %s' % base_dir)
# Add base_dir to path so our imports relative to base_dir will work.
sys.path.append(base_dir)
tests = discover.DiscoverClasses(base_dir, base_dir, benchmark.Benchmark,
index_by_class_name=True)
for test_class in tests.itervalues():
test_obj = test_class()
# Ensure the test's default options are set if needed.
parser = optparse.OptionParser()
test_obj.AddCommandLineArgs(parser, None)
options = optparse.Values()
for k, v in parser.get_default_values().__dict__.iteritems():
options.ensure_value(k, v)
# Page set paths are relative to their runner script, not relative to us.
path.GetBaseDir = lambda: base_dir
# TODO: Loading the page set will automatically download its Cloud Storage
# deps. This is really expensive, and we don't want to do this by default.
page_set = test_obj.CreatePageSet(options)
# Add all of its serving_dirs as dependencies.
for serving_dir in page_set.serving_dirs:
yield serving_dir
def FindExcludedFiles(files, options):
# Define some filters for files.
def IsHidden(path_string):
for pathname_component in path_string.split(os.sep):
if pathname_component.startswith('.'):
return True
return False
def IsPyc(path_string):
return os.path.splitext(path_string)[1] == '.pyc'
def IsInCloudStorage(path_string):
return os.path.exists(path_string + '.sha1')
def MatchesExcludeOptions(path_string):
for pattern in options.exclude:
if (fnmatch.fnmatch(path_string, pattern) or
fnmatch.fnmatch(os.path.basename(path_string), pattern)):
return True
return False
# Collect filters we're going to use to exclude files.
exclude_conditions = [
IsHidden,
IsPyc,
IsInCloudStorage,
MatchesExcludeOptions,
]
# Check all the files against the filters.
for file_path in files:
if any(condition(file_path) for condition in exclude_conditions):
yield file_path
def FindDependencies(target_paths, options):
# Verify arguments.
for target_path in target_paths:
if not os.path.exists(target_path):
|
dependencies = path_set.PathSet()
# Including Telemetry's major entry points will (hopefully) include Telemetry
# and all its dependencies. If the user doesn't pass any arguments, we just
# have Telemetry.
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(path.GetTelemetryDir(), 'telemetry', 'benchmark_runner.py')))
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(path.GetTelemetryDir(),
'telemetry', 'unittest_util', 'run_tests.py')))
dependencies |= FindBootstrapDependencies(path.GetTelemetryDir())
# Add dependencies.
for target_path in target_paths:
base_dir = os.path.dirname(os.path.realpath(target_path))
dependencies.add(base_dir)
dependencies |= FindBootstrapDependencies(base_dir)
dependencies |= FindPythonDependencies(target_path)
if options.include_page_set_data:
dependencies |= FindPageSetDependencies(base_dir)
# Remove excluded files.
dependencies -= FindExcludedFiles(set(dependencies), options)
return dependencies
def ZipDependencies(target_paths, dependencies, options):
base_dir = os.path.dirname(os.path.realpath(path.GetChromiumSrcDir()))
with zipfile.ZipFile(options.zip, 'w', zipfile.ZIP_DEFLATED) as zip_file:
# Add dependencies to archive.
for dependency_path in dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(dependency_path, base_dir))
zip_file.write(dependency_path, path_in_archive)
# Add symlinks to executable paths, for ease of use.
for target_path in target_paths:
link_info = zipfile.ZipInfo(
os.path.join('telemetry', os.path.basename(target_path)))
link_info.create_system = 3 # Unix attributes.
# 010 is regular file, 0111 is the permission bits rwxrwxrwx.
link_info.external_attr = 0100777 << 16 # Octal.
relative_path = os.path.relpath(target_path, base_dir)
link_script = (
'#!/usr/bin/env python\n\n'
'import os\n'
'import sys\n\n\n'
'script = os.path.join(os.path.dirname(__file__), \'%s\')\n'
'os.execv(sys.executable, [sys.executable, script] + sys.argv[1:])'
% relative_path)
zip_file.writestr(link_info, link_script)
# Add gsutil to the archive, if it's available. The gsutil in
# depot_tools is modified to allow authentication using prodaccess.
# TODO: If there's a gsutil in telemetry/third_party/, bootstrap_deps
# will include it. Then there will be two copies of gsutil at the same
# location in the archive. This can be confusing for users.
gsutil_path = os.path.realpath(cloud_storage.FindGsutil())
if cloud_storage.SupportsProdaccess(gsutil_path):
gsutil_base_dir = os.path.join(os.path.dirname(gsutil_path), os.pardir)
gsutil_dependencies = path_set.PathSet()
gsutil_dependencies.add(os.path.dirname(gsutil_path))
# Also add modules from depot_tools that are needed by gsutil.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'boto'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'fancy_urllib'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'retry_decorator'))
gsutil_dependencies -= FindExcludedFiles(
set(gsutil_dependencies), options)
# Also add upload.py to the archive from depot_tools, if it is available.
# This allows us to post patches without requiring a full depot_tools
# install. There's no real point in including upload.py if we do not
# also have gsutil, which is why this is inside the gsutil block.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'upload.py'))
for dependency_path in gsutil_dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(path.GetTelemetryDir(), base_dir),
'third_party', os.path.relpath(dependency_path, gsutil_base_dir))
zip_file.write(dependency_path, path_in_archive)
class FindDependenciesCommand(command_line.OptparseCommand):
"""Prints all dependencies"""
@classmethod
def AddCommandLineArgs(cls, parser, _):
parser.add_option(
'-v', '--verbose', action='count', dest='verbosity',
help='Increase verbosity level (repeat as needed).')
parser.add_option(
'-p', '--include-page-set-data', action='store_true', default=False,
help='Scan tests for page set data and include them.')
parser.add_option(
'-e', '--exclude', action='append', default=[],
help='Exclude paths matching EXCLUDE. Can be used multiple times.')
parser.add_option(
'-z', '--zip',
help='Store files in a zip archive at ZIP.')
@classmethod
def ProcessCommandLineArgs(cls, parser, args, _):
if args.verbosity >= 2:
logging.getLogger().setLevel(logging.DEBUG)
elif args.verbosity:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARNING)
def Run(self, args):
target_paths = args.positional_args
dependencies = FindDependencies(target_paths, args)
if | raise ValueError('Path does not exist: %s' % target_path) | conditional_block |
find_dependencies.py | source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import imp
import logging
import modulefinder
import optparse
import os
import sys
import zipfile
from telemetry import benchmark
from telemetry.core import command_line
from telemetry.core import discover
from telemetry.util import bootstrap
from telemetry.util import cloud_storage
from telemetry.util import path
from telemetry.util import path_set
DEPS_FILE = 'bootstrap_deps'
def FindBootstrapDependencies(base_dir):
deps_file = os.path.join(base_dir, DEPS_FILE)
if not os.path.exists(deps_file):
return []
deps_paths = bootstrap.ListAllDepsPaths(deps_file)
return set(os.path.realpath(os.path.join(
path.GetChromiumSrcDir(), os.pardir, deps_path))
for deps_path in deps_paths)
def FindPythonDependencies(module_path):
logging.info('Finding Python dependencies of %s' % module_path)
# Load the module to inherit its sys.path modifications.
imp.load_source(
os.path.splitext(os.path.basename(module_path))[0], module_path)
# Analyze the module for its imports.
finder = modulefinder.ModuleFinder()
finder.run_script(module_path)
# Filter for only imports in Chromium.
for module in finder.modules.itervalues():
# If it's an __init__.py, module.__path__ gives the package's folder.
module_path = module.__path__[0] if module.__path__ else module.__file__
if not module_path:
continue
module_path = os.path.realpath(module_path)
if not path.IsSubpath(module_path, path.GetChromiumSrcDir()):
continue
yield module_path
def FindPageSetDependencies(base_dir):
logging.info('Finding page sets in %s' % base_dir)
# Add base_dir to path so our imports relative to base_dir will work.
sys.path.append(base_dir)
tests = discover.DiscoverClasses(base_dir, base_dir, benchmark.Benchmark,
index_by_class_name=True)
for test_class in tests.itervalues():
test_obj = test_class()
# Ensure the test's default options are set if needed.
parser = optparse.OptionParser()
test_obj.AddCommandLineArgs(parser, None)
options = optparse.Values()
for k, v in parser.get_default_values().__dict__.iteritems():
options.ensure_value(k, v)
# Page set paths are relative to their runner script, not relative to us.
path.GetBaseDir = lambda: base_dir
# TODO: Loading the page set will automatically download its Cloud Storage
# deps. This is really expensive, and we don't want to do this by default.
page_set = test_obj.CreatePageSet(options)
# Add all of its serving_dirs as dependencies.
for serving_dir in page_set.serving_dirs:
yield serving_dir
def FindExcludedFiles(files, options):
# Define some filters for files.
def IsHidden(path_string):
for pathname_component in path_string.split(os.sep):
if pathname_component.startswith('.'):
return True
return False
def IsPyc(path_string):
return os.path.splitext(path_string)[1] == '.pyc'
def IsInCloudStorage(path_string):
return os.path.exists(path_string + '.sha1')
def MatchesExcludeOptions(path_string):
for pattern in options.exclude:
if (fnmatch.fnmatch(path_string, pattern) or
fnmatch.fnmatch(os.path.basename(path_string), pattern)):
return True
return False
# Collect filters we're going to use to exclude files.
exclude_conditions = [
IsHidden,
IsPyc,
IsInCloudStorage,
MatchesExcludeOptions,
]
# Check all the files against the filters.
for file_path in files:
if any(condition(file_path) for condition in exclude_conditions):
yield file_path
def | (target_paths, options):
# Verify arguments.
for target_path in target_paths:
if not os.path.exists(target_path):
raise ValueError('Path does not exist: %s' % target_path)
dependencies = path_set.PathSet()
# Including Telemetry's major entry points will (hopefully) include Telemetry
# and all its dependencies. If the user doesn't pass any arguments, we just
# have Telemetry.
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(path.GetTelemetryDir(), 'telemetry', 'benchmark_runner.py')))
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(path.GetTelemetryDir(),
'telemetry', 'unittest_util', 'run_tests.py')))
dependencies |= FindBootstrapDependencies(path.GetTelemetryDir())
# Add dependencies.
for target_path in target_paths:
base_dir = os.path.dirname(os.path.realpath(target_path))
dependencies.add(base_dir)
dependencies |= FindBootstrapDependencies(base_dir)
dependencies |= FindPythonDependencies(target_path)
if options.include_page_set_data:
dependencies |= FindPageSetDependencies(base_dir)
# Remove excluded files.
dependencies -= FindExcludedFiles(set(dependencies), options)
return dependencies
def ZipDependencies(target_paths, dependencies, options):
base_dir = os.path.dirname(os.path.realpath(path.GetChromiumSrcDir()))
with zipfile.ZipFile(options.zip, 'w', zipfile.ZIP_DEFLATED) as zip_file:
# Add dependencies to archive.
for dependency_path in dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(dependency_path, base_dir))
zip_file.write(dependency_path, path_in_archive)
# Add symlinks to executable paths, for ease of use.
for target_path in target_paths:
link_info = zipfile.ZipInfo(
os.path.join('telemetry', os.path.basename(target_path)))
link_info.create_system = 3 # Unix attributes.
# 010 is regular file, 0111 is the permission bits rwxrwxrwx.
link_info.external_attr = 0100777 << 16 # Octal.
relative_path = os.path.relpath(target_path, base_dir)
link_script = (
'#!/usr/bin/env python\n\n'
'import os\n'
'import sys\n\n\n'
'script = os.path.join(os.path.dirname(__file__), \'%s\')\n'
'os.execv(sys.executable, [sys.executable, script] + sys.argv[1:])'
% relative_path)
zip_file.writestr(link_info, link_script)
# Add gsutil to the archive, if it's available. The gsutil in
# depot_tools is modified to allow authentication using prodaccess.
# TODO: If there's a gsutil in telemetry/third_party/, bootstrap_deps
# will include it. Then there will be two copies of gsutil at the same
# location in the archive. This can be confusing for users.
gsutil_path = os.path.realpath(cloud_storage.FindGsutil())
if cloud_storage.SupportsProdaccess(gsutil_path):
gsutil_base_dir = os.path.join(os.path.dirname(gsutil_path), os.pardir)
gsutil_dependencies = path_set.PathSet()
gsutil_dependencies.add(os.path.dirname(gsutil_path))
# Also add modules from depot_tools that are needed by gsutil.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'boto'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'fancy_urllib'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'retry_decorator'))
gsutil_dependencies -= FindExcludedFiles(
set(gsutil_dependencies), options)
# Also add upload.py to the archive from depot_tools, if it is available.
# This allows us to post patches without requiring a full depot_tools
# install. There's no real point in including upload.py if we do not
# also have gsutil, which is why this is inside the gsutil block.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'upload.py'))
for dependency_path in gsutil_dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(path.GetTelemetryDir(), base_dir),
'third_party', os.path.relpath(dependency_path, gsutil_base_dir))
zip_file.write(dependency_path, path_in_archive)
class FindDependenciesCommand(command_line.OptparseCommand):
"""Prints all dependencies"""
@classmethod
def AddCommandLineArgs(cls, parser, _):
parser.add_option(
'-v', '--verbose', action='count', dest='verbosity',
help='Increase verbosity level (repeat as needed).')
parser.add_option(
'-p', '--include-page-set-data', action='store_true', default=False,
help='Scan tests for page set data and include them.')
parser.add_option(
'-e', '--exclude', action='append', default=[],
help='Exclude paths matching EXCLUDE. Can be used multiple times.')
parser.add_option(
'-z', '--zip',
help='Store files in a zip archive at ZIP.')
@classmethod
def ProcessCommandLineArgs(cls, parser, args, _):
if args.verbosity >= 2:
logging.getLogger().setLevel(logging.DEBUG)
elif args.verbosity:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARNING)
def Run(self, args):
target_paths = args.positional_args
dependencies = FindDependencies(target_paths, args)
if | FindDependencies | identifier_name |
find_dependencies.py | this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import imp
import logging
import modulefinder
import optparse
import os
import sys
import zipfile
from telemetry import benchmark
from telemetry.core import command_line
from telemetry.core import discover
from telemetry.util import bootstrap
from telemetry.util import cloud_storage
from telemetry.util import path
from telemetry.util import path_set
DEPS_FILE = 'bootstrap_deps'
def FindBootstrapDependencies(base_dir):
deps_file = os.path.join(base_dir, DEPS_FILE)
if not os.path.exists(deps_file):
return []
deps_paths = bootstrap.ListAllDepsPaths(deps_file)
return set(os.path.realpath(os.path.join(
path.GetChromiumSrcDir(), os.pardir, deps_path))
for deps_path in deps_paths)
def FindPythonDependencies(module_path):
logging.info('Finding Python dependencies of %s' % module_path)
# Load the module to inherit its sys.path modifications.
imp.load_source(
os.path.splitext(os.path.basename(module_path))[0], module_path)
# Analyze the module for its imports.
finder = modulefinder.ModuleFinder()
finder.run_script(module_path)
# Filter for only imports in Chromium.
for module in finder.modules.itervalues():
# If it's an __init__.py, module.__path__ gives the package's folder.
module_path = module.__path__[0] if module.__path__ else module.__file__
if not module_path:
continue
module_path = os.path.realpath(module_path)
if not path.IsSubpath(module_path, path.GetChromiumSrcDir()):
continue
yield module_path
def FindPageSetDependencies(base_dir):
logging.info('Finding page sets in %s' % base_dir)
# Add base_dir to path so our imports relative to base_dir will work.
sys.path.append(base_dir)
tests = discover.DiscoverClasses(base_dir, base_dir, benchmark.Benchmark,
index_by_class_name=True)
for test_class in tests.itervalues():
test_obj = test_class()
# Ensure the test's default options are set if needed.
parser = optparse.OptionParser()
test_obj.AddCommandLineArgs(parser, None)
options = optparse.Values()
for k, v in parser.get_default_values().__dict__.iteritems():
options.ensure_value(k, v)
# Page set paths are relative to their runner script, not relative to us.
path.GetBaseDir = lambda: base_dir
# TODO: Loading the page set will automatically download its Cloud Storage
# deps. This is really expensive, and we don't want to do this by default.
page_set = test_obj.CreatePageSet(options)
# Add all of its serving_dirs as dependencies.
for serving_dir in page_set.serving_dirs:
yield serving_dir
def FindExcludedFiles(files, options):
# Define some filters for files.
def IsHidden(path_string):
for pathname_component in path_string.split(os.sep):
if pathname_component.startswith('.'):
return True
return False
def IsPyc(path_string):
return os.path.splitext(path_string)[1] == '.pyc'
def IsInCloudStorage(path_string):
return os.path.exists(path_string + '.sha1')
def MatchesExcludeOptions(path_string):
for pattern in options.exclude:
if (fnmatch.fnmatch(path_string, pattern) or
fnmatch.fnmatch(os.path.basename(path_string), pattern)):
return True
return False
# Collect filters we're going to use to exclude files.
exclude_conditions = [
IsHidden,
IsPyc,
IsInCloudStorage,
MatchesExcludeOptions,
]
# Check all the files against the filters.
for file_path in files:
if any(condition(file_path) for condition in exclude_conditions):
yield file_path
def FindDependencies(target_paths, options):
# Verify arguments.
for target_path in target_paths:
if not os.path.exists(target_path):
raise ValueError('Path does not exist: %s' % target_path)
dependencies = path_set.PathSet()
# Including Telemetry's major entry points will (hopefully) include Telemetry
# and all its dependencies. If the user doesn't pass any arguments, we just
# have Telemetry.
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(path.GetTelemetryDir(), 'telemetry', 'benchmark_runner.py')))
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(path.GetTelemetryDir(),
'telemetry', 'unittest_util', 'run_tests.py')))
dependencies |= FindBootstrapDependencies(path.GetTelemetryDir())
# Add dependencies.
for target_path in target_paths:
base_dir = os.path.dirname(os.path.realpath(target_path))
dependencies.add(base_dir)
dependencies |= FindBootstrapDependencies(base_dir)
dependencies |= FindPythonDependencies(target_path)
if options.include_page_set_data:
dependencies |= FindPageSetDependencies(base_dir)
# Remove excluded files.
dependencies -= FindExcludedFiles(set(dependencies), options)
return dependencies
def ZipDependencies(target_paths, dependencies, options):
base_dir = os.path.dirname(os.path.realpath(path.GetChromiumSrcDir()))
with zipfile.ZipFile(options.zip, 'w', zipfile.ZIP_DEFLATED) as zip_file:
# Add dependencies to archive.
for dependency_path in dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(dependency_path, base_dir))
zip_file.write(dependency_path, path_in_archive)
# Add symlinks to executable paths, for ease of use.
for target_path in target_paths:
link_info = zipfile.ZipInfo(
os.path.join('telemetry', os.path.basename(target_path)))
link_info.create_system = 3 # Unix attributes.
# 010 is regular file, 0111 is the permission bits rwxrwxrwx.
link_info.external_attr = 0100777 << 16 # Octal.
relative_path = os.path.relpath(target_path, base_dir)
link_script = (
'#!/usr/bin/env python\n\n'
'import os\n'
'import sys\n\n\n'
'script = os.path.join(os.path.dirname(__file__), \'%s\')\n'
'os.execv(sys.executable, [sys.executable, script] + sys.argv[1:])'
% relative_path)
zip_file.writestr(link_info, link_script)
# Add gsutil to the archive, if it's available. The gsutil in
# depot_tools is modified to allow authentication using prodaccess.
# TODO: If there's a gsutil in telemetry/third_party/, bootstrap_deps
# will include it. Then there will be two copies of gsutil at the same | gsutil_dependencies = path_set.PathSet()
gsutil_dependencies.add(os.path.dirname(gsutil_path))
# Also add modules from depot_tools that are needed by gsutil.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'boto'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'fancy_urllib'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'retry_decorator'))
gsutil_dependencies -= FindExcludedFiles(
set(gsutil_dependencies), options)
# Also add upload.py to the archive from depot_tools, if it is available.
# This allows us to post patches without requiring a full depot_tools
# install. There's no real point in including upload.py if we do not
# also have gsutil, which is why this is inside the gsutil block.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'upload.py'))
for dependency_path in gsutil_dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(path.GetTelemetryDir(), base_dir),
'third_party', os.path.relpath(dependency_path, gsutil_base_dir))
zip_file.write(dependency_path, path_in_archive)
class FindDependenciesCommand(command_line.OptparseCommand):
"""Prints all dependencies"""
@classmethod
def AddCommandLineArgs(cls, parser, _):
parser.add_option(
'-v', '--verbose', action='count', dest='verbosity',
help='Increase verbosity level (repeat as needed).')
parser.add_option(
'-p', '--include-page-set-data', action='store_true', default=False,
help='Scan tests for page set data and include them.')
parser.add_option(
'-e', '--exclude', action='append', default=[],
help='Exclude paths matching EXCLUDE. Can be used multiple times.')
parser.add_option(
'-z', '--zip',
help='Store files in a zip archive at ZIP.')
@classmethod
def ProcessCommandLineArgs(cls, parser, args, _):
if args.verbosity >= 2:
logging.getLogger().setLevel(logging.DEBUG)
elif args.verbosity:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARNING)
def Run(self, args):
target_paths = args.positional_args
dependencies = FindDependencies(target_paths, args)
if args | # location in the archive. This can be confusing for users.
gsutil_path = os.path.realpath(cloud_storage.FindGsutil())
if cloud_storage.SupportsProdaccess(gsutil_path):
gsutil_base_dir = os.path.join(os.path.dirname(gsutil_path), os.pardir) | random_line_split |
app.module.ts | import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
// Imports for the fake backend.
import {InMemoryWebApiModule} from 'angular-in-memory-web-api';
import {InMemoryDataService} from './mocks/in-memory-data.service';
import {AppComponent} from './app.component';
import {BookmarkDetailComponent} from './bm-detail/bookmark-detail.component';
import {BookmarksComponent} from './bookmarks/bookmarks.component';
import {AboutComponent} from './about/about.component';
import { LoginFormComponent } from './login/login-form.component';
import {BookmarkService} from './services/bookmark.service';
import { UserService } from './services/user-service';
import {routing, appRoutingProviders} from './app.routing';
import { RegisterFormComponent } from './register/register-form.component';
import {BookmarkViewComponent} from './bm-view/bookmark-view.component';
import {BookmarkEditComponent} from './bm-edit/bookmark-edit.component';
/**
* App modules class.
* @author Dmitry Noranovich
*/
@NgModule({
declarations: [
AppComponent,
BookmarkDetailComponent,
BookmarksComponent,
AboutComponent,
LoginFormComponent,
RegisterFormComponent,
BookmarkEditComponent,
BookmarkViewComponent
],
imports: [
BrowserModule,
FormsModule,
HttpModule,
InMemoryWebApiModule.forRoot(InMemoryDataService),
routing
],
providers: [BookmarkService,
UserService,
appRoutingProviders],
bootstrap: [AppComponent]
})
export class | { }
| AppModule | identifier_name |
app.module.ts | import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
// Imports for the fake backend.
import {InMemoryWebApiModule} from 'angular-in-memory-web-api';
import {InMemoryDataService} from './mocks/in-memory-data.service';
import {AppComponent} from './app.component';
import {BookmarkDetailComponent} from './bm-detail/bookmark-detail.component';
import {BookmarksComponent} from './bookmarks/bookmarks.component';
import {AboutComponent} from './about/about.component';
import { LoginFormComponent } from './login/login-form.component';
import {BookmarkService} from './services/bookmark.service';
import { UserService } from './services/user-service';
import {routing, appRoutingProviders} from './app.routing';
import { RegisterFormComponent } from './register/register-form.component'; | * App modules class.
* @author Dmitry Noranovich
*/
@NgModule({
declarations: [
AppComponent,
BookmarkDetailComponent,
BookmarksComponent,
AboutComponent,
LoginFormComponent,
RegisterFormComponent,
BookmarkEditComponent,
BookmarkViewComponent
],
imports: [
BrowserModule,
FormsModule,
HttpModule,
InMemoryWebApiModule.forRoot(InMemoryDataService),
routing
],
providers: [BookmarkService,
UserService,
appRoutingProviders],
bootstrap: [AppComponent]
})
export class AppModule { } | import {BookmarkViewComponent} from './bm-view/bookmark-view.component';
import {BookmarkEditComponent} from './bm-edit/bookmark-edit.component';
/** | random_line_split |
test_reset_password.py | from djangosanetesting.cases import HttpTestCase
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core import mail
from accounts.tests import testdata
class TestResetPassword(HttpTestCase):
def | (self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.host = 'localhost'
self.port = 8000
def setUp(self):
testdata.run()
def test_reset_password(self):
res = self.client.post(reverse('password_reset'),
{'register_number' : settings.TEST_USERNAME,
},
follow=True)
assert reverse('password_reset_done') in res.request['PATH_INFO']
assert len(mail.outbox) == 1
reset_url = [word for word in mail.outbox[0].body.split() if word.startswith('http')][0]
res = self.client.get(reset_url, follow=True)
assert res.status_code == 200
assert 'unsuccessful' not in res.content.lower()
assert 'change my password' in res.content.lower()
# I've to stop here, because next step is to change password at Google Apps.
# Can't mess up production database.
| __init__ | identifier_name |
test_reset_password.py | from djangosanetesting.cases import HttpTestCase
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core import mail
from accounts.tests import testdata
class TestResetPassword(HttpTestCase):
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.host = 'localhost'
self.port = 8000
def setUp(self):
testdata.run()
def test_reset_password(self):
res = self.client.post(reverse('password_reset'),
{'register_number' : settings.TEST_USERNAME,
}, |
assert reverse('password_reset_done') in res.request['PATH_INFO']
assert len(mail.outbox) == 1
reset_url = [word for word in mail.outbox[0].body.split() if word.startswith('http')][0]
res = self.client.get(reset_url, follow=True)
assert res.status_code == 200
assert 'unsuccessful' not in res.content.lower()
assert 'change my password' in res.content.lower()
# I've to stop here, because next step is to change password at Google Apps.
# Can't mess up production database. | follow=True) | random_line_split |
test_reset_password.py | from djangosanetesting.cases import HttpTestCase
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core import mail
from accounts.tests import testdata
class TestResetPassword(HttpTestCase):
def __init__(self, *args, **kwargs):
|
def setUp(self):
testdata.run()
def test_reset_password(self):
res = self.client.post(reverse('password_reset'),
{'register_number' : settings.TEST_USERNAME,
},
follow=True)
assert reverse('password_reset_done') in res.request['PATH_INFO']
assert len(mail.outbox) == 1
reset_url = [word for word in mail.outbox[0].body.split() if word.startswith('http')][0]
res = self.client.get(reset_url, follow=True)
assert res.status_code == 200
assert 'unsuccessful' not in res.content.lower()
assert 'change my password' in res.content.lower()
# I've to stop here, because next step is to change password at Google Apps.
# Can't mess up production database.
| super(self.__class__, self).__init__(*args, **kwargs)
self.host = 'localhost'
self.port = 8000 | identifier_body |
manage.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask_script import Manager, Shell, Server
from flask_migrate import MigrateCommand
from app import create_app, db
from app.models import User
from app.settings import DevConfig, ProdConfig
if os.environ.get("ENV") == 'prod':
app = create_app(ProdConfig)
else:
app = create_app(DevConfig)
HERE = os.path.abspath(os.path.dirname(__file__))
TEST_PATH = os.path.join(HERE, 'tests')
manager = Manager(app)
def | ():
"""Return context dict for a shell session so you can access
app, db, and the User model by default.
"""
return {'app': app, 'db': db, 'User': User}
manager.add_command('server', Server())
manager.add_command('shell', Shell(make_context=_make_context))
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run() | _make_context | identifier_name |
manage.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask_script import Manager, Shell, Server | from app.settings import DevConfig, ProdConfig
if os.environ.get("ENV") == 'prod':
app = create_app(ProdConfig)
else:
app = create_app(DevConfig)
HERE = os.path.abspath(os.path.dirname(__file__))
TEST_PATH = os.path.join(HERE, 'tests')
manager = Manager(app)
def _make_context():
"""Return context dict for a shell session so you can access
app, db, and the User model by default.
"""
return {'app': app, 'db': db, 'User': User}
manager.add_command('server', Server())
manager.add_command('shell', Shell(make_context=_make_context))
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run() | from flask_migrate import MigrateCommand
from app import create_app, db
from app.models import User | random_line_split |
manage.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask_script import Manager, Shell, Server
from flask_migrate import MigrateCommand
from app import create_app, db
from app.models import User
from app.settings import DevConfig, ProdConfig
if os.environ.get("ENV") == 'prod':
|
else:
app = create_app(DevConfig)
HERE = os.path.abspath(os.path.dirname(__file__))
TEST_PATH = os.path.join(HERE, 'tests')
manager = Manager(app)
def _make_context():
"""Return context dict for a shell session so you can access
app, db, and the User model by default.
"""
return {'app': app, 'db': db, 'User': User}
manager.add_command('server', Server())
manager.add_command('shell', Shell(make_context=_make_context))
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run() | app = create_app(ProdConfig) | conditional_block |
manage.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask_script import Manager, Shell, Server
from flask_migrate import MigrateCommand
from app import create_app, db
from app.models import User
from app.settings import DevConfig, ProdConfig
if os.environ.get("ENV") == 'prod':
app = create_app(ProdConfig)
else:
app = create_app(DevConfig)
HERE = os.path.abspath(os.path.dirname(__file__))
TEST_PATH = os.path.join(HERE, 'tests')
manager = Manager(app)
def _make_context():
|
manager.add_command('server', Server())
manager.add_command('shell', Shell(make_context=_make_context))
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run() | """Return context dict for a shell session so you can access
app, db, and the User model by default.
"""
return {'app': app, 'db': db, 'User': User} | identifier_body |
app.js | //Problem: Hints are shown even when form is valid
//Solution: Hide and show them at appropriate times
var $password = $("#password");
var $confirmPassword = $("#confirm_password");
//Hide hints
$("form span").hide();
function isPasswordValid() {
return $password.val().length > 8;
}
function arePasswordsMatching() {
return $password.val() === $confirmPassword.val();
}
function canSubmit() {
return isPasswordValid() && arePasswordsMatching();
}
function passwordEvent(){
//Find out if password is valid
if(isPasswordValid()) {
//Hide hint if valid
$password.next().hide();
} else {
//else show hint
$password.next().show();
}
}
function confirmPasswordEvent() |
function enableSubmitEvent() {
$("#submit").prop("disabled", !canSubmit());
}
//When event happens on password input
$password.focus(passwordEvent).keyup(passwordEvent).keyup(confirmPasswordEvent).keyup(enableSubmitEvent);
//When event happens on confirmation input
$confirmPassword.focus(confirmPasswordEvent).keyup(confirmPasswordEvent).keyup(enableSubmitEvent);
enableSubmitEvent(); | {
//find out if password and confirmation match
if(arePasswordsMatching()) {
//Hide hint if match
$confirmPassword.next().hide();
} else {
//else show hint
$confirmPassword.next().show();
}
} | identifier_body |
app.js | //Problem: Hints are shown even when form is valid
//Solution: Hide and show them at appropriate times
var $password = $("#password");
var $confirmPassword = $("#confirm_password");
//Hide hints
$("form span").hide();
function isPasswordValid() {
return $password.val().length > 8;
}
function arePasswordsMatching() {
return $password.val() === $confirmPassword.val();
}
function canSubmit() {
return isPasswordValid() && arePasswordsMatching();
}
function passwordEvent(){
//Find out if password is valid
if(isPasswordValid()) {
//Hide hint if valid
$password.next().hide();
} else {
//else show hint
$password.next().show();
}
}
function confirmPasswordEvent() {
//find out if password and confirmation match
if(arePasswordsMatching()) {
//Hide hint if match
$confirmPassword.next().hide();
} else |
}
function enableSubmitEvent() {
$("#submit").prop("disabled", !canSubmit());
}
//When event happens on password input
$password.focus(passwordEvent).keyup(passwordEvent).keyup(confirmPasswordEvent).keyup(enableSubmitEvent);
//When event happens on confirmation input
$confirmPassword.focus(confirmPasswordEvent).keyup(confirmPasswordEvent).keyup(enableSubmitEvent);
enableSubmitEvent(); | {
//else show hint
$confirmPassword.next().show();
} | conditional_block |
app.js | //Problem: Hints are shown even when form is valid
//Solution: Hide and show them at appropriate times
var $password = $("#password");
var $confirmPassword = $("#confirm_password");
//Hide hints
$("form span").hide();
function isPasswordValid() {
return $password.val().length > 8;
}
function arePasswordsMatching() {
return $password.val() === $confirmPassword.val();
}
function canSubmit() {
return isPasswordValid() && arePasswordsMatching();
}
function passwordEvent(){
//Find out if password is valid
if(isPasswordValid()) {
//Hide hint if valid
$password.next().hide();
} else {
//else show hint
$password.next().show();
}
}
function confirmPasswordEvent() {
//find out if password and confirmation match
if(arePasswordsMatching()) {
//Hide hint if match
$confirmPassword.next().hide();
} else {
//else show hint
$confirmPassword.next().show();
}
}
function | () {
$("#submit").prop("disabled", !canSubmit());
}
//When event happens on password input
$password.focus(passwordEvent).keyup(passwordEvent).keyup(confirmPasswordEvent).keyup(enableSubmitEvent);
//When event happens on confirmation input
$confirmPassword.focus(confirmPasswordEvent).keyup(confirmPasswordEvent).keyup(enableSubmitEvent);
enableSubmitEvent(); | enableSubmitEvent | identifier_name |
app.js | //Problem: Hints are shown even when form is valid
//Solution: Hide and show them at appropriate times
var $password = $("#password");
var $confirmPassword = $("#confirm_password");
//Hide hints
$("form span").hide();
function isPasswordValid() {
return $password.val().length > 8;
}
function arePasswordsMatching() {
return $password.val() === $confirmPassword.val();
}
function canSubmit() {
return isPasswordValid() && arePasswordsMatching();
} | //Hide hint if valid
$password.next().hide();
} else {
//else show hint
$password.next().show();
}
}
function confirmPasswordEvent() {
//find out if password and confirmation match
if(arePasswordsMatching()) {
//Hide hint if match
$confirmPassword.next().hide();
} else {
//else show hint
$confirmPassword.next().show();
}
}
function enableSubmitEvent() {
$("#submit").prop("disabled", !canSubmit());
}
//When event happens on password input
$password.focus(passwordEvent).keyup(passwordEvent).keyup(confirmPasswordEvent).keyup(enableSubmitEvent);
//When event happens on confirmation input
$confirmPassword.focus(confirmPasswordEvent).keyup(confirmPasswordEvent).keyup(enableSubmitEvent);
enableSubmitEvent(); |
function passwordEvent(){
//Find out if password is valid
if(isPasswordValid()) { | random_line_split |
model.rs | _to_attrs(resource, included, &visited_relationships))
}
/// Create a single resource object or collection of resource
/// objects directly from
/// [`DocumentData`](../api/struct.DocumentData.html). This method
/// will parse the document (the `data` and `included` resources) in an
/// attempt to instantiate the calling struct.
fn from_jsonapi_document(doc: &DocumentData) -> Result<Self> {
match doc.data.as_ref() {
Some(primary_data) => {
match *primary_data {
PrimaryData::None => bail!("Document had no data"),
PrimaryData::Single(ref resource) => {
Self::from_jsonapi_resource(resource, &doc.included)
}
PrimaryData::Multiple(ref resources) => {
let visited_relationships: Vec<&str> = Vec::new();
let all: Vec<ResourceAttributes> = resources
.iter()
.map(|r| Self::resource_to_attrs(r, &doc.included, &visited_relationships))
.collect();
Self::from_serializable(all)
}
}
}
None => bail!("Document had no data"),
}
}
/// Converts the instance of the struct into a
/// [`Resource`](../api/struct.Resource.html)
fn to_jsonapi_resource(&self) -> (Resource, Option<Resources>) {
if let Value::Object(mut attrs) = to_value(self).unwrap() {
let _ = attrs.remove("id");
let resource = Resource {
_type: self.jsonapi_type(),
id: self.jsonapi_id(),
relationships: self.build_relationships(),
attributes: Self::extract_attributes(&attrs),
..Default::default()
};
(resource, self.build_included())
} else {
panic!(format!("{} is not a Value::Object", self.jsonapi_type()))
}
}
/// Converts the struct into a complete
/// [`JsonApiDocument`](../api/struct.JsonApiDocument.html)
fn to_jsonapi_document(&self) -> JsonApiDocument {
let (resource, included) = self.to_jsonapi_resource();
JsonApiDocument::Data (
DocumentData {
data: Some(PrimaryData::Single(Box::new(resource))),
included,
..Default::default()
}
)
}
#[doc(hidden)]
fn build_has_one<M: JsonApiModel>(model: &M) -> Relationship {
Relationship {
data: Some(IdentifierData::Single(model.as_resource_identifier())),
links: None
}
}
#[doc(hidden)]
fn build_has_many<M: JsonApiModel>(models: &[M]) -> Relationship {
Relationship {
data: Some(IdentifierData::Multiple(
models.iter().map(|m| m.as_resource_identifier()).collect()
)),
links: None
}
}
#[doc(hidden)]
fn as_resource_identifier(&self) -> ResourceIdentifier {
ResourceIdentifier {
_type: self.jsonapi_type(),
id: self.jsonapi_id(),
}
}
/* Attribute corresponding to the model is removed from the Map
* before calling this, so there's no need to ignore it like we do
* with the attributes that correspond with relationships.
* */
#[doc(hidden)]
fn extract_attributes(attrs: &Map<String, Value>) -> ResourceAttributes {
attrs
.iter()
.filter(|&(key, _)| {
if let Some(fields) = Self::relationship_fields() {
if fields.contains(&key.as_str()) {
return false;
}
}
true
})
.map(|(k, v)| (k.clone(), v.clone()))
.collect()
}
#[doc(hidden)]
fn to_resources(&self) -> Resources {
let (me, maybe_others) = self.to_jsonapi_resource();
let mut flattened = vec![me];
if let Some(mut others) = maybe_others {
flattened.append(&mut others);
}
flattened
}
/// When passed a `ResourceIdentifier` (which contains a `type` and `id`)
/// this will iterate through the collection provided `haystack` in an
/// attempt to find and return the `Resource` whose `type` and `id`
/// attributes match
#[doc(hidden)]
fn lookup<'a>(needle: &ResourceIdentifier, haystack: &'a [Resource])
-> Option<&'a Resource>
{
for resource in haystack {
if resource._type == needle._type && resource.id == needle.id {
return Some(resource);
}
}
None
}
/// Return a [`ResourceAttributes`](../api/struct.ResourceAttributes.html)
/// object that contains the attributes in this `resource`. This will be
/// called recursively for each `relationship` on the resource in an attempt
/// to satisfy the properties for the calling struct.
///
/// The last parameter in this function call is `visited_relationships` which is used as this
/// function is called recursively. This `Vec` contains the JSON:API `relationships` that were
/// visited when this function was called last. When operating on the root node of the document
/// this is simply started with an empty `Vec`.
///
/// Tracking these "visited" relationships is necessary to prevent infinite recursion and stack
/// overflows. This situation can arise when the "included" resource object includes the parent
/// resource object - it will simply ping pong back and forth unable to acheive a finite
/// resolution.
///
/// The JSON:API specification doesn't communicate the direction of a relationship.
/// Furthermore the current implementation of this crate does not establish an object graph
/// that could be used to traverse these relationships effectively.
#[doc(hidden)]
fn resource_to_attrs(resource: &Resource, included: &Option<Resources>, visited_relationships: &Vec<&str>)
-> ResourceAttributes
{
let mut new_attrs = HashMap::new();
new_attrs.clone_from(&resource.attributes);
new_attrs.insert("id".into(), resource.id.clone().into());
// Copy the contents of `visited_relationships` so that we can mutate within the lexical
// scope of this function call. This is also important so each edge that we follow (the
// relationship) is not polluted by data from traversing sibling relationships
let mut this_visited: Vec<&str> = Vec::new();
for rel in visited_relationships.iter() {
this_visited.push(rel);
}
if let Some(relations) = resource.relationships.as_ref() {
if let Some(inc) = included.as_ref() {
for (name, relation) in relations {
// If we have already visited this resource object, exit early and do not
// recurse through the relations
if this_visited.contains(&name.as_str()) {
return new_attrs;
}
// Track that we have visited this relationship to avoid infinite recursion
this_visited.push(name);
let value = match relation.data { | Some(IdentifierData::None) => Value::Null,
Some(IdentifierData::Single(ref identifier)) => {
let found = Self::lookup(identifier, inc)
.map(|r| Self::resource_to_attrs(r, included, &this_visited) );
to_value(found)
.expect("Casting Single relation to value")
},
Some(IdentifierData::Multiple(ref identifiers)) => {
let found: Vec<Option<ResourceAttributes>> =
identifiers.iter().map(|identifier|{
Self::lookup(identifier, inc).map(|r|{
Self::resource_to_attrs(r, included, &this_visited)
})
}).collect();
to_value(found)
.expect("Casting Multiple relation to value")
},
None => Value::Null,
};
new_attrs.insert(name.to_string(), value);
}
}
}
new_attrs
}
#[doc(hidden)]
fn from_serializable<S: Serialize>(s: S) -> Result<Self> {
from_value(to_value(s)?).map_err(Error::from)
}
}
/// Converts a `vec!` of structs into
/// [`Resources`](../api/type.Resources.html)
///
pub fn vec_to_jsonapi_resources<T: JsonApiModel>(
objects: Vec<T>,
) -> (Resources, Option<Resources>) {
let mut included = vec![];
let resources = objects
.iter()
.map(|obj| {
let (res, mut opt_incl) = obj.to_jsonapi_resource();
if let Some(ref mut incl) = opt_incl {
included.append(incl);
}
res
})
.collect::<Vec<_>>();
let opt_included = if included.is_empty() {
None
} else {
Some(included)
};
(resources, opt_included)
}
/// Converts a `vec!` of structs into a
/// [`JsonApiDocument`](../api/struct.JsonApiDocument.html)
///
/// ```rust
/// #[macro_use] extern crate serde_derive;
/// #[macro_use] extern crate jsonapi;
/// use jsonapi::api::*;
/// use jsonapi::model::*;
///
/// #[derive(Debug, PartialEq, Serialize, Deserialize)]
/// struct Flea {
/// id: String,
/// name: String,
/// }
///
/// jsonapi_model!(Flea; "flea");
///
/// let fleas = vec
fn to_jsonapi_resource(&self) -> (Resource, Option<Resources>) {
if let Value::Object(mut attrs) = to_value(self).unwrap() {
let _ = attrs.remove("id");
let resource = Resource {
_type: self.jsonapi_type(),
id: self.jsonapi_id(),
relationships: self.build_relationships(),
attributes: Self::extract_attributes(&attrs),
..Default::default()
};
(resource, self.build_included())
} else {
panic!(format!("{} is not a Value::Object", self.jsonapi_type()))
}
}
/// Converts the struct into a complete
/// [`JsonApiDocument`](../api/struct.JsonApiDocument.html)
fn to_jsonapi_document(&self) -> JsonApiDocument {
let (resource, included) = self.to_jsonapi_resource();
JsonApiDocument::Data (
DocumentData {
data: Some(PrimaryData::Single(Box::new(resource))),
included,
..Default::default()
}
)
}
#[doc(hidden)]
fn build_has_one<M: JsonApiModel>(model: &M) -> Relationship {
Relationship {
data: Some(IdentifierData::Single(model.as_resource_identifier())),
links: None
}
}
#[doc(hidden)]
fn build_has_many<M: JsonApiModel>(models: &[M]) -> Relationship {
Relationship {
data: Some(IdentifierData::Multiple(
models.iter().map(|m| m.as_resource_identifier()).collect()
)),
links: None
}
}
#[doc(hidden)]
fn as_resource_identifier(&self) -> ResourceIdentifier {
ResourceIdentifier {
_type: self.jsonapi_type(),
id: self.jsonapi_id(),
}
}
/* Attribute corresponding to the model is removed from the Map
* before calling this, so there's no need to ignore it like we do
* with the attributes that correspond with relationships.
* */
#[doc(hidden)]
fn extract_attributes(attrs: &Map<String, Value>) -> ResourceAttributes {
attrs
.iter()
.filter(|&(key, _)| {
if let Some(fields) = Self::relationship_fields() {
if fields.contains(&key.as_str()) {
return false;
}
}
true
})
.map(|(k, v)| (k.clone(), v.clone()))
.collect()
}
#[doc(hidden)]
fn to_resources(&self) -> Resources {
let (me, maybe_others) = self.to_jsonapi_resource();
let mut flattened = vec![me];
if let Some(mut others) = maybe_others {
flattened.append(&mut others);
}
flattened
}
/// When passed a `ResourceIdentifier` (which contains a `type` and `id`)
/// this will iterate through the collection provided `haystack` in an
/// attempt to find and return the `Resource` whose `type` and `id`
/// attributes match
#[doc(hidden)]
fn lookup<'a>(needle: &ResourceIdentifier, haystack: &'a [Resource])
-> Option<&'a Resource>
{
for resource in haystack {
if resource._type == needle._type && resource.id == needle.id {
return Some(resource);
}
}
None
}
/// Return a [`ResourceAttributes`](../api/struct.ResourceAttributes.html)
/// object that contains the attributes in this `resource`. This will be
/// called recursively for each `relationship` on the resource in an attempt
/// to satisfy the properties for the calling struct.
///
/// The last parameter in this function call is `visited_relationships` which is used as this
/// function is called recursively. This `Vec` contains the JSON:API `relationships` that were
/// visited when this function was called last. When operating on the root node of the document
/// this is simply started with an empty `Vec`.
///
/// Tracking these "visited" relationships is necessary to prevent infinite recursion and stack
/// overflows. This situation can arise when the "included" resource object includes the parent
/// resource object - it will simply ping pong back and forth unable to acheive a finite
/// resolution.
///
/// The JSON:API specification doesn't communicate the direction of a relationship.
/// Furthermore the current implementation of this crate does not establish an object graph
/// that could be used to traverse these relationships effectively.
#[doc(hidden)]
fn resource_to_attrs(resource: &Resource, included: &Option<Resources>, visited_relationships: &Vec<&str>)
-> ResourceAttributes
{
let mut new_attrs = HashMap::new();
new_attrs.clone_from(&resource.attributes);
new_attrs.insert("id".into(), resource.id.clone().into());
// Copy the contents of `visited_relationships` so that we can mutate within the lexical
// scope of this function call. This is also important so each edge that we follow (the
// relationship) is not polluted by data from traversing sibling relationships
let mut this_visited: Vec<&str> = Vec::new();
for rel in visited_relationships.iter() {
this_visited.push(rel);
}
if let Some(relations) = resource.relationships.as_ref() {
if let Some(inc) = included.as_ref() {
for (name, relation) in relations {
// If we have already visited this resource object, exit early and do not
// recurse through the relations
if this_visited.contains(&name.as_str()) {
return new_attrs;
}
// Track that we have visited this relationship to avoid infinite recursion
this_visited.push(name);
let value = match relation.data {
Some(IdentifierData::None) => Value::Null,
Some(IdentifierData::Single(ref identifier)) => {
let found = Self::lookup(identifier, inc)
.map(|r| Self::resource_to_attrs(r, included, &this_visited) );
to_value(found)
.expect("Casting Single relation to value")
},
Some(IdentifierData::Multiple(ref identifiers)) => {
let found: Vec<Option<ResourceAttributes>> =
identifiers.iter().map(|identifier|{
Self::lookup(identifier, inc).map(|r|{
Self::resource_to_attrs(r, included, &this_visited)
})
}).collect();
to_value(found)
.expect("Casting Multiple relation to value")
},
None => Value::Null,
};
new_attrs.insert(name.to_string(), value);
}
}
}
new_attrs
}
#[doc(hidden)]
fn from_serializable<S: Serialize>(s: S) -> Result<Self> {
from_value(to_value(s)?).map_err(Error::from)
}
}
/// Converts a `vec!` of structs into
/// [`Resources`](../api/type.Resources.html)
///
pub fn vec_to_jsonapi_resources<T: JsonApiModel>(
objects: Vec<T>,
) -> (Resources, Option<Resources>) {
let mut included = vec![];
let resources = objects
.iter()
.map(|obj| {
let (res, mut opt_incl) = obj.to_jsonapi_resource();
if let Some(ref mut incl) = opt_incl {
included.append(incl);
}
res
})
.collect::<Vec<_>>();
let opt_included = if included.is_empty() {
None
} else {
Some(included)
};
(resources, opt_included)
}
/// Converts a `vec!` of structs into a
/// [`JsonApiDocument`](../api/struct.JsonApiDocument.html)
///
/// ```rust
/// #[macro_use] extern crate serde_derive;
/// #[macro_use] extern crate jsonapi;
/// use jsonapi::api::*;
/// use jsonapi::model::*;
///
/// #[derive(Debug, PartialEq, Serialize, Deserialize)]
/// struct Flea {
/// id: String,
/// name: String,
/// }
///
/// jsonapi_model!(Flea; "flea");
///
/// let fleas = vec![
/// Flea {
/// id: "2".into(),
/// name: "rick".into(),
/// },
/// Flea {
/// id: "3".into(),
/// name: "morty".into(),
/// },
/// ];
/// let doc = vec_to_jsonapi_document(fleas);
/// assert!(doc.is_valid());
/// ```
pub fn vec_to_jsonapi_document<T: JsonApiModel>(objects: Vec<T>) -> JsonApiDocument {
let (resources, included) = vec_to_jsonapi_resources(objects);
JsonApiDocument::Data (
DocumentData {
data: Some(PrimaryData::Multiple(resources)),
included,
..Default::default()
}
)
}
impl<M: JsonApiModel> JsonApiModel for Box<M> {
fn jsonapi_type(&self) -> String {
self.as_ref().jsonapi_type()
}
fn jsonapi_id(&self) -> String | {
self.as_ref().jsonapi_id()
} | identifier_body |
|
model.rs | _to_attrs(resource, included, &visited_relationships))
}
/// Create a single resource object or collection of resource
/// objects directly from
/// [`DocumentData`](../api/struct.DocumentData.html). This method
/// will parse the document (the `data` and `included` resources) in an
/// attempt to instantiate the calling struct.
fn from_jsonapi_document(doc: &DocumentData) -> Result<Self> {
match doc.data.as_ref() {
Some(primary_data) => {
match *primary_data {
PrimaryData::None => bail!("Document had no data"),
PrimaryData::Single(ref resource) => {
Self::from_jsonapi_resource(resource, &doc.included)
}
PrimaryData::Multiple(ref resources) => {
let visited_relationships: Vec<&str> = Vec::new();
let all: Vec<ResourceAttributes> = resources
.iter()
.map(|r| Self::resource_to_attrs(r, &doc.included, &visited_relationships))
.collect();
Self::from_serializable(all)
}
}
}
None => bail!("Document had no data"),
}
}
/// Converts the instance of the struct into a
/// [`Resource`](../api/struct.Resource.html)
fn to_jsonapi_resource(&self) -> (Resource, Option<Resources>) {
if let Value::Object(mut attrs) = to_value(self).unwrap() {
let _ = attrs.remove("id");
let resource = Resource {
_type: self.jsonapi_type(),
id: self.jsonapi_id(),
relationships: self.build_relationships(),
attributes: Self::extract_attributes(&attrs),
..Default::default()
};
(resource, self.build_included())
} else {
panic!(format!("{} is not a Value::Object", self.jsonapi_type()))
}
}
/// Converts the struct into a complete
/// [`JsonApiDocument`](../api/struct.JsonApiDocument.html)
fn to_jsonapi_document(&self) -> JsonApiDocument {
let (resource, included) = self.to_jsonapi_resource();
JsonApiDocument::Data (
DocumentData {
data: Some(PrimaryData::Single(Box::new(resource))),
included,
..Default::default()
}
)
}
#[doc(hidden)]
fn build_has_one<M: JsonApiModel>(model: &M) -> Relationship {
Relationship {
data: Some(IdentifierData::Single(model.as_resource_identifier())),
links: None
}
}
#[doc(hidden)]
fn build_has_many<M: JsonApiModel>(models: &[M]) -> Relationship {
Relationship {
data: Some(IdentifierData::Multiple(
models.iter().map(|m| m.as_resource_identifier()).collect()
)),
links: None
}
}
#[doc(hidden)]
fn as_resource_identifier(&self) -> ResourceIdentifier {
ResourceIdentifier {
_type: self.jsonapi_type(),
id: self.jsonapi_id(),
}
}
/* Attribute corresponding to the model is removed from the Map
* before calling this, so there's no need to ignore it like we do
* with the attributes that correspond with relationships.
* */
#[doc(hidden)]
fn extract_attributes(attrs: &Map<String, Value>) -> ResourceAttributes {
attrs
.iter()
.filter(|&(key, _)| {
if let Some(fields) = Self::relationship_fields() {
if fields.contains(&key.as_str()) {
return false;
}
}
true
})
.map(|(k, v)| (k.clone(), v.clone()))
.collect()
}
#[doc(hidden)]
fn | (&self) -> Resources {
let (me, maybe_others) = self.to_jsonapi_resource();
let mut flattened = vec![me];
if let Some(mut others) = maybe_others {
flattened.append(&mut others);
}
flattened
}
/// When passed a `ResourceIdentifier` (which contains a `type` and `id`)
/// this will iterate through the collection provided `haystack` in an
/// attempt to find and return the `Resource` whose `type` and `id`
/// attributes match
#[doc(hidden)]
fn lookup<'a>(needle: &ResourceIdentifier, haystack: &'a [Resource])
-> Option<&'a Resource>
{
for resource in haystack {
if resource._type == needle._type && resource.id == needle.id {
return Some(resource);
}
}
None
}
/// Return a [`ResourceAttributes`](../api/struct.ResourceAttributes.html)
/// object that contains the attributes in this `resource`. This will be
/// called recursively for each `relationship` on the resource in an attempt
/// to satisfy the properties for the calling struct.
///
/// The last parameter in this function call is `visited_relationships` which is used as this
/// function is called recursively. This `Vec` contains the JSON:API `relationships` that were
/// visited when this function was called last. When operating on the root node of the document
/// this is simply started with an empty `Vec`.
///
/// Tracking these "visited" relationships is necessary to prevent infinite recursion and stack
/// overflows. This situation can arise when the "included" resource object includes the parent
/// resource object - it will simply ping pong back and forth unable to acheive a finite
/// resolution.
///
/// The JSON:API specification doesn't communicate the direction of a relationship.
/// Furthermore the current implementation of this crate does not establish an object graph
/// that could be used to traverse these relationships effectively.
#[doc(hidden)]
fn resource_to_attrs(resource: &Resource, included: &Option<Resources>, visited_relationships: &Vec<&str>)
-> ResourceAttributes
{
let mut new_attrs = HashMap::new();
new_attrs.clone_from(&resource.attributes);
new_attrs.insert("id".into(), resource.id.clone().into());
// Copy the contents of `visited_relationships` so that we can mutate within the lexical
// scope of this function call. This is also important so each edge that we follow (the
// relationship) is not polluted by data from traversing sibling relationships
let mut this_visited: Vec<&str> = Vec::new();
for rel in visited_relationships.iter() {
this_visited.push(rel);
}
if let Some(relations) = resource.relationships.as_ref() {
if let Some(inc) = included.as_ref() {
for (name, relation) in relations {
// If we have already visited this resource object, exit early and do not
// recurse through the relations
if this_visited.contains(&name.as_str()) {
return new_attrs;
}
// Track that we have visited this relationship to avoid infinite recursion
this_visited.push(name);
let value = match relation.data {
Some(IdentifierData::None) => Value::Null,
Some(IdentifierData::Single(ref identifier)) => {
let found = Self::lookup(identifier, inc)
.map(|r| Self::resource_to_attrs(r, included, &this_visited) );
to_value(found)
.expect("Casting Single relation to value")
},
Some(IdentifierData::Multiple(ref identifiers)) => {
let found: Vec<Option<ResourceAttributes>> =
identifiers.iter().map(|identifier|{
Self::lookup(identifier, inc).map(|r|{
Self::resource_to_attrs(r, included, &this_visited)
})
}).collect();
to_value(found)
.expect("Casting Multiple relation to value")
},
None => Value::Null,
};
new_attrs.insert(name.to_string(), value);
}
}
}
new_attrs
}
#[doc(hidden)]
fn from_serializable<S: Serialize>(s: S) -> Result<Self> {
from_value(to_value(s)?).map_err(Error::from)
}
}
/// Converts a `vec!` of structs into
/// [`Resources`](../api/type.Resources.html)
///
pub fn vec_to_jsonapi_resources<T: JsonApiModel>(
objects: Vec<T>,
) -> (Resources, Option<Resources>) {
let mut included = vec![];
let resources = objects
.iter()
.map(|obj| {
let (res, mut opt_incl) = obj.to_jsonapi_resource();
if let Some(ref mut incl) = opt_incl {
included.append(incl);
}
res
})
.collect::<Vec<_>>();
let opt_included = if included.is_empty() {
None
} else {
Some(included)
};
(resources, opt_included)
}
/// Converts a `vec!` of structs into a
/// [`JsonApiDocument`](../api/struct.JsonApiDocument.html)
///
/// ```rust
/// #[macro_use] extern crate serde_derive;
/// #[macro_use] extern crate jsonapi;
/// use jsonapi::api::*;
/// use jsonapi::model::*;
///
/// #[derive(Debug, PartialEq, Serialize, Deserialize)]
/// struct Flea {
/// id: String,
/// name: String,
/// }
///
/// jsonapi_model!(Flea; "flea");
///
/// let fleas = vec![
/// Flea {
/// id: "2".into(),
/// name: "rick".into(),
/// },
/// Flea {
/// id: "3".into(),
/// name: | to_resources | identifier_name |
UserController.ts | import e = require('express');
import BaseController from "./BaseController";
import { router } from "../decorators/Web";
import { Uc, UcGroup,User,UcGroupModel, UserModel,UserHelper} from '../models/index';
class | extends BaseController {
@router({
method: 'post',
path: '/api/user/setting'
})
async create(req: e.Request, res: e.Response) {
let result = await User.insert(req.body);
res.send(super.wrapperRes(result));
}
@router({
method: 'patch',
path: '/api/user/setting'
})
async update(req: e.Request, res: e.Response) {
let user: UserModel = super.getUser(req);
let userModel: UserModel = UserHelper.buildModel(req.body);
userModel._id=user._id;
userModel.setModifiedInfo(user);
let result = await User.update(req.body);
res.send(super.wrapperRes(result));
}
}
export default UserController
| UserController | identifier_name |
UserController.ts | import e = require('express');
import BaseController from "./BaseController";
import { router } from "../decorators/Web";
import { Uc, UcGroup,User,UcGroupModel, UserModel,UserHelper} from '../models/index';
class UserController extends BaseController {
@router({
method: 'post',
path: '/api/user/setting'
})
async create(req: e.Request, res: e.Response) {
let result = await User.insert(req.body);
res.send(super.wrapperRes(result));
} |
@router({
method: 'patch',
path: '/api/user/setting'
})
async update(req: e.Request, res: e.Response) {
let user: UserModel = super.getUser(req);
let userModel: UserModel = UserHelper.buildModel(req.body);
userModel._id=user._id;
userModel.setModifiedInfo(user);
let result = await User.update(req.body);
res.send(super.wrapperRes(result));
}
}
export default UserController | random_line_split |
|
index.ts | import styled, { css as styledCss, keyframes } from 'styled-components'
import type { TTestable } from '@/spec'
import Img from '@/Img'
import { theme } from '@/utils/themes'
import css from '@/utils/css'
const DURATION = '2.5s'
const load = keyframes`
0% {
top: 24px;
}
70% {
top: 10px;
}
90% {
top: 0;
}
95% {
top: 0;
}
100% {
top: 24px;
}
`
const liquid1 = keyframes`
0% {
height: 0;
opacity: 0;
top: -5px;
}
22% {
height: 2.8125px;
top: 3.75px;
opacity: 1;
}
25% {
top: -2.5px;
}
35% {
height: 11.25px;
top: -5px;
}
55% {
height: 3px;
top: -1.25px;
}
60% {
height: 6px;
opacity: 1;
top: -3px;
}
96% {
height: 8.4375px;
opacity: 0;
top: 5px;
}
100% {
height: 0;
opacity: 0;
}
`
const liquid2 = keyframes`
0% {
height: 0;
opacity: 0;
top: -0.5rem;
}
17.5% {
height: 3px;
top: 2px;
opacity: 1;
}
20% {
top: -2.5px;
}
25% {
height: 15px;
top: -6px;
}
45% {
height: 3px;
top: -1px;
}
60% {
opacity: 1;
height: 15px;
top: -5px;
}
96% {
opacity: 0;
height: 8px;
top: 5px;
}
100% {
height: 0;
opacity: 0;
}
`
const loadRule = styledCss`
${load} ${DURATION} infinite;
`
const liquid1Rule = styledCss`
${liquid1} ${DURATION} infinite;
`
const liquid2Rule = styledCss`
${liquid2} ${DURATION} infinite;
`
export const Wrapper = styled.div.attrs(({ testid }: TTestable) => ({
'data-test-id': testid,
}))<TTestable>`
text-align: center;
position: relative;
height: 28px;
margin-bottom: 6px;
cursor: pointer;
`
export const Battery = styled.div`
display: inline-block;
position: relative;
width: 16px;
height: 26px;
box-shadow: 0 0 0 2px #155e76;
border-radius: 2px;
&:before {
content: '';
position: absolute;
left: 5px;
top: -4px;
height: 3px;
width: 6px;
background: #155e76;
border-radius: 2px;
}
${Wrapper}:hover & {
&:after { | right: 0;
border-right: 16px solid transparent;
border-bottom: 22px solid rgba(255, 255, 255, 0.25);
}
}
`
export const Liquid = styled.div`
position: absolute;
top: 23px;
bottom: 0;
left: 0;
right: 0;
width: 16px;
background: ${theme('baseColor.green')};
${Wrapper}:hover & {
top: 0;
animation: ${loadRule};
&:before {
left: 0;
animation: ${liquid2Rule};
content: '';
position: absolute;
top: -5px;
height: 11.25px;
width: 14.625px;
background: ${theme('baseColor.green')};
border-radius: 50%;
opacity: 0;
}
&:after {
right: 0;
animation: ${liquid1Rule};
content: '';
position: absolute;
top: -5px;
height: 11.25px;
width: 14.625px;
background: ${theme('baseColor.green')};
border-radius: 50%;
opacity: 0;
}
}
`
export const MoneySign = styled(Img)`
position: absolute;
top: 6px;
left: 3px;
${css.size(10)};
fill: #327faf;
transition: opacity 0.25s;
${Wrapper}:hover & {
fill: #ecbcb3;
top: 8px;
left: 2px;
${css.size(12)};
}
transition: all 0.2s;
` | content: '';
position: absolute;
top: 0;
bottom: 0;
left: 0; | random_line_split |
update.test.ts | import * as chai from 'chai';
import * as mongodb from 'mongodb';
import { Tyr } from 'tyranid';
const { ObjectId } = mongodb;
const O = (ObjectId as any) as (id: string) => mongodb.ObjectId;
const { expect } = chai;
export function add() {
describe('update.js', () => {
const { intersection, matches, merge } = Tyr.query,
i1 = '111111111111111111111111',
i2 = '222222222222222222222222',
i3 = '333333333333333333333333',
i4 = '444444444444444444444444';
describe('fromClientUpdate', () => {
let Book: Tyr.BookCollection, User: Tyr.UserCollection;
before(() => {
Book = Tyr.byName.book;
User = Tyr.byName.user;
});
it('should variation 1', () => {
const title = 'Browsers';
const clientUpdate = { | };
const serverUpdate = Book.fromClientUpdate(clientUpdate);
expect(serverUpdate.$set.title).to.be.eql(title);
expect(serverUpdate.$set.isbn).to.be.an.instanceof(O);
});
it('should support support paths strings', () => {
const clientUpdate = {
'name.first': 'An',
};
const serverUpdate = User.fromClientQuery(clientUpdate);
expect(serverUpdate['name.first']).to.be.eql('An');
});
it('should support fail on invalid paths strings', () => {
const clientUpdate = {
$set: {
'name.foo': 'An',
},
};
expect(() => {
User.fromClientUpdate(clientUpdate);
}).to.throw(/cannot find/i);
});
});
});
} | $set: {
title,
isbn: '5614c2f00000000000000000',
}, | random_line_split |
update.test.ts | import * as chai from 'chai';
import * as mongodb from 'mongodb';
import { Tyr } from 'tyranid';
const { ObjectId } = mongodb;
const O = (ObjectId as any) as (id: string) => mongodb.ObjectId;
const { expect } = chai;
export function | () {
describe('update.js', () => {
const { intersection, matches, merge } = Tyr.query,
i1 = '111111111111111111111111',
i2 = '222222222222222222222222',
i3 = '333333333333333333333333',
i4 = '444444444444444444444444';
describe('fromClientUpdate', () => {
let Book: Tyr.BookCollection, User: Tyr.UserCollection;
before(() => {
Book = Tyr.byName.book;
User = Tyr.byName.user;
});
it('should variation 1', () => {
const title = 'Browsers';
const clientUpdate = {
$set: {
title,
isbn: '5614c2f00000000000000000',
},
};
const serverUpdate = Book.fromClientUpdate(clientUpdate);
expect(serverUpdate.$set.title).to.be.eql(title);
expect(serverUpdate.$set.isbn).to.be.an.instanceof(O);
});
it('should support support paths strings', () => {
const clientUpdate = {
'name.first': 'An',
};
const serverUpdate = User.fromClientQuery(clientUpdate);
expect(serverUpdate['name.first']).to.be.eql('An');
});
it('should support fail on invalid paths strings', () => {
const clientUpdate = {
$set: {
'name.foo': 'An',
},
};
expect(() => {
User.fromClientUpdate(clientUpdate);
}).to.throw(/cannot find/i);
});
});
});
}
| add | identifier_name |
update.test.ts | import * as chai from 'chai';
import * as mongodb from 'mongodb';
import { Tyr } from 'tyranid';
const { ObjectId } = mongodb;
const O = (ObjectId as any) as (id: string) => mongodb.ObjectId;
const { expect } = chai;
export function add() | title,
isbn: '5614c2f00000000000000000',
},
};
const serverUpdate = Book.fromClientUpdate(clientUpdate);
expect(serverUpdate.$set.title).to.be.eql(title);
expect(serverUpdate.$set.isbn).to.be.an.instanceof(O);
});
it('should support support paths strings', () => {
const clientUpdate = {
'name.first': 'An',
};
const serverUpdate = User.fromClientQuery(clientUpdate);
expect(serverUpdate['name.first']).to.be.eql('An');
});
it('should support fail on invalid paths strings', () => {
const clientUpdate = {
$set: {
'name.foo': 'An',
},
};
expect(() => {
User.fromClientUpdate(clientUpdate);
}).to.throw(/cannot find/i);
});
});
});
}
| {
describe('update.js', () => {
const { intersection, matches, merge } = Tyr.query,
i1 = '111111111111111111111111',
i2 = '222222222222222222222222',
i3 = '333333333333333333333333',
i4 = '444444444444444444444444';
describe('fromClientUpdate', () => {
let Book: Tyr.BookCollection, User: Tyr.UserCollection;
before(() => {
Book = Tyr.byName.book;
User = Tyr.byName.user;
});
it('should variation 1', () => {
const title = 'Browsers';
const clientUpdate = {
$set: { | identifier_body |
0003_convert_recomended_articles.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from molo.core.models import ArticlePage, ArticlePageRecommendedSections
from wagtail.wagtailcore.blocks import StreamValue
def create_recomended_articles(main_article, article_list):
'''
Creates recommended article objects from article_list
and _prepends_ to existing recommended articles.
'''
existing_recommended_articles = [
ra.recommended_article.specific
for ra in main_article.recommended_articles.all()]
ArticlePageRecommendedSections.objects.filter(page=main_article).delete()
for hyperlinked_article in article_list:
ArticlePageRecommendedSections(
page=main_article,
recommended_article=hyperlinked_article).save()
# re-create existing recommended articles
for article in existing_recommended_articles:
if article not in article_list:
ArticlePageRecommendedSections(
page=main_article,
recommended_article=article).save()
def convert_articles(apps, schema_editor):
'''
Derived from https://github.com/wagtail/wagtail/issues/2110
'''
articles = ArticlePage.objects.all().exact_type(ArticlePage)
for article in articles:
stream_data = []
linked_articles = []
for block in article.body.stream_data:
if block['type'] == 'page':
if ArticlePage.objects.filter(id=block['value']):
linked_articles.append(ArticlePage.objects.get(
id=block['value'])) | stream_data.append(block)
if linked_articles:
create_recomended_articles(article, linked_articles)
stream_block = article.body.stream_block
article.body = StreamValue(stream_block, stream_data, is_lazy=True)
article.save()
section = article.get_parent().specific
section.enable_recommended_section = True
section.enable_next_section = True
section.save()
class Migration(migrations.Migration):
dependencies = [
('iogt', '0002_create_importers_group'),
]
operations = [
migrations.RunPython(convert_articles),
] | else:
# add block to new stream_data | random_line_split |
0003_convert_recomended_articles.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from molo.core.models import ArticlePage, ArticlePageRecommendedSections
from wagtail.wagtailcore.blocks import StreamValue
def create_recomended_articles(main_article, article_list):
'''
Creates recommended article objects from article_list
and _prepends_ to existing recommended articles.
'''
existing_recommended_articles = [
ra.recommended_article.specific
for ra in main_article.recommended_articles.all()]
ArticlePageRecommendedSections.objects.filter(page=main_article).delete()
for hyperlinked_article in article_list:
ArticlePageRecommendedSections(
page=main_article,
recommended_article=hyperlinked_article).save()
# re-create existing recommended articles
for article in existing_recommended_articles:
if article not in article_list:
ArticlePageRecommendedSections(
page=main_article,
recommended_article=article).save()
def | (apps, schema_editor):
'''
Derived from https://github.com/wagtail/wagtail/issues/2110
'''
articles = ArticlePage.objects.all().exact_type(ArticlePage)
for article in articles:
stream_data = []
linked_articles = []
for block in article.body.stream_data:
if block['type'] == 'page':
if ArticlePage.objects.filter(id=block['value']):
linked_articles.append(ArticlePage.objects.get(
id=block['value']))
else:
# add block to new stream_data
stream_data.append(block)
if linked_articles:
create_recomended_articles(article, linked_articles)
stream_block = article.body.stream_block
article.body = StreamValue(stream_block, stream_data, is_lazy=True)
article.save()
section = article.get_parent().specific
section.enable_recommended_section = True
section.enable_next_section = True
section.save()
class Migration(migrations.Migration):
dependencies = [
('iogt', '0002_create_importers_group'),
]
operations = [
migrations.RunPython(convert_articles),
]
| convert_articles | identifier_name |
0003_convert_recomended_articles.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from molo.core.models import ArticlePage, ArticlePageRecommendedSections
from wagtail.wagtailcore.blocks import StreamValue
def create_recomended_articles(main_article, article_list):
'''
Creates recommended article objects from article_list
and _prepends_ to existing recommended articles.
'''
existing_recommended_articles = [
ra.recommended_article.specific
for ra in main_article.recommended_articles.all()]
ArticlePageRecommendedSections.objects.filter(page=main_article).delete()
for hyperlinked_article in article_list:
ArticlePageRecommendedSections(
page=main_article,
recommended_article=hyperlinked_article).save()
# re-create existing recommended articles
for article in existing_recommended_articles:
if article not in article_list:
ArticlePageRecommendedSections(
page=main_article,
recommended_article=article).save()
def convert_articles(apps, schema_editor):
'''
Derived from https://github.com/wagtail/wagtail/issues/2110
'''
articles = ArticlePage.objects.all().exact_type(ArticlePage)
for article in articles:
stream_data = []
linked_articles = []
for block in article.body.stream_data:
if block['type'] == 'page':
if ArticlePage.objects.filter(id=block['value']):
linked_articles.append(ArticlePage.objects.get(
id=block['value']))
else:
# add block to new stream_data
stream_data.append(block)
if linked_articles:
|
stream_block = article.body.stream_block
article.body = StreamValue(stream_block, stream_data, is_lazy=True)
article.save()
section = article.get_parent().specific
section.enable_recommended_section = True
section.enable_next_section = True
section.save()
class Migration(migrations.Migration):
dependencies = [
('iogt', '0002_create_importers_group'),
]
operations = [
migrations.RunPython(convert_articles),
]
| create_recomended_articles(article, linked_articles) | conditional_block |
0003_convert_recomended_articles.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from molo.core.models import ArticlePage, ArticlePageRecommendedSections
from wagtail.wagtailcore.blocks import StreamValue
def create_recomended_articles(main_article, article_list):
'''
Creates recommended article objects from article_list
and _prepends_ to existing recommended articles.
'''
existing_recommended_articles = [
ra.recommended_article.specific
for ra in main_article.recommended_articles.all()]
ArticlePageRecommendedSections.objects.filter(page=main_article).delete()
for hyperlinked_article in article_list:
ArticlePageRecommendedSections(
page=main_article,
recommended_article=hyperlinked_article).save()
# re-create existing recommended articles
for article in existing_recommended_articles:
if article not in article_list:
ArticlePageRecommendedSections(
page=main_article,
recommended_article=article).save()
def convert_articles(apps, schema_editor):
'''
Derived from https://github.com/wagtail/wagtail/issues/2110
'''
articles = ArticlePage.objects.all().exact_type(ArticlePage)
for article in articles:
stream_data = []
linked_articles = []
for block in article.body.stream_data:
if block['type'] == 'page':
if ArticlePage.objects.filter(id=block['value']):
linked_articles.append(ArticlePage.objects.get(
id=block['value']))
else:
# add block to new stream_data
stream_data.append(block)
if linked_articles:
create_recomended_articles(article, linked_articles)
stream_block = article.body.stream_block
article.body = StreamValue(stream_block, stream_data, is_lazy=True)
article.save()
section = article.get_parent().specific
section.enable_recommended_section = True
section.enable_next_section = True
section.save()
class Migration(migrations.Migration):
| dependencies = [
('iogt', '0002_create_importers_group'),
]
operations = [
migrations.RunPython(convert_articles),
] | identifier_body |
|
BucketsPage.ts | import { SelectionModel } from '@angular/cdk/collections';
import { AfterViewInit, ChangeDetectionStrategy, Component, ViewChild } from '@angular/core';
import { FormControl, FormGroup } from '@angular/forms';
import { MatDialog } from '@angular/material/dialog';
import { MatSort } from '@angular/material/sort';
import { MatTableDataSource } from '@angular/material/table';
import { Title } from '@angular/platform-browser';
import { ActivatedRoute, Router } from '@angular/router';
import { BehaviorSubject } from 'rxjs';
import { Bucket, StorageClient } from '../../client';
import { YamcsService } from '../../core/services/YamcsService';
import { Option } from '../../shared/forms/Select';
import { CreateBucketDialog } from './CreateBucketDialog';
@Component({
templateUrl: './BucketsPage.html',
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class BucketsPage implements AfterViewInit {
@ViewChild(MatSort, { static: true })
sort: MatSort;
instance = '_global';
displayedColumns = [
'select',
'name',
'size',
'numObjects',
'actions',
];
filterForm = new FormGroup({
instance: new FormControl('_global'),
});
instanceOptions$ = new BehaviorSubject<Option[]>([
{ id: '_global', label: '_global' },
]);
dataSource = new MatTableDataSource<Bucket>();
selection = new SelectionModel<Bucket>(true, []);
private storageClient: StorageClient;
constructor(
private yamcs: YamcsService,
private dialog: MatDialog,
private router: Router,
private route: ActivatedRoute,
title: Title,
) {
title.setTitle('Buckets');
this.storageClient = this.yamcs.createStorageClient();
yamcs.yamcsClient.getInstances({
filter: 'state=RUNNING',
}).then(instances => {
for (const instance of instances) {
this.instanceOptions$.next([
...this.instanceOptions$.value,
{
id: instance.name,
label: instance.name,
}
]);
}
});
this.initializeOptions();
this.refreshDataSources();
this.filterForm.get('instance')!.valueChanges.forEach(instance => {
this.instance = instance;
this.refreshDataSources();
});
}
private initializeOptions() {
const queryParams = this.route.snapshot.queryParamMap;
if (queryParams.has('instance')) {
this.instance = queryParams.get('instance')!;
this.filterForm.get('instance')!.setValue(this.instance);
}
}
ngAfterViewInit() {
this.dataSource.sort = this.sort;
}
isAllSelected() {
const numSelected = this.selection.selected.length;
const numRows = this.dataSource.data.length;
return numSelected === numRows;
}
masterToggle() {
this.isAllSelected() ?
this.selection.clear() :
this.dataSource.data.forEach(row => this.selection.select(row));
}
toggleOne(row: Bucket) {
if (!this.selection.isSelected(row) || this.selection.selected.length > 1) |
this.selection.toggle(row);
}
createBucket() {
const dialogRef = this.dialog.open(CreateBucketDialog, {
width: '400px',
data: {
bucketInstance: this.instance,
},
});
dialogRef.afterClosed().subscribe(result => {
if (result) {
this.refreshDataSources();
}
});
}
deleteSelectedBuckets() {
if (confirm('Are you sure you want to delete the selected buckets?')) {
const deletePromises = [];
for (const bucket of this.selection.selected) {
const promise = this.storageClient.deleteBucket(this.instance, bucket.name);
deletePromises.push(promise);
}
Promise.all(deletePromises).then(() => {
this.selection.clear();
this.refreshDataSources();
});
}
}
private refreshDataSources() {
this.updateURL();
this.storageClient.getBuckets(this.instance).then(buckets => {
this.dataSource.data = buckets;
});
}
private updateURL() {
this.router.navigate([], {
replaceUrl: true,
relativeTo: this.route,
queryParams: {
instance: this.instance || null,
},
queryParamsHandling: 'merge',
});
}
}
| {
this.selection.clear();
} | conditional_block |
BucketsPage.ts | import { SelectionModel } from '@angular/cdk/collections';
import { AfterViewInit, ChangeDetectionStrategy, Component, ViewChild } from '@angular/core';
import { FormControl, FormGroup } from '@angular/forms';
import { MatDialog } from '@angular/material/dialog';
import { MatSort } from '@angular/material/sort';
import { MatTableDataSource } from '@angular/material/table';
import { Title } from '@angular/platform-browser';
import { ActivatedRoute, Router } from '@angular/router';
import { BehaviorSubject } from 'rxjs';
import { Bucket, StorageClient } from '../../client';
import { YamcsService } from '../../core/services/YamcsService';
import { Option } from '../../shared/forms/Select';
import { CreateBucketDialog } from './CreateBucketDialog';
@Component({
templateUrl: './BucketsPage.html',
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class BucketsPage implements AfterViewInit {
@ViewChild(MatSort, { static: true })
sort: MatSort;
instance = '_global';
displayedColumns = [
'select',
'name',
'size',
'numObjects',
'actions',
];
filterForm = new FormGroup({
instance: new FormControl('_global'),
});
instanceOptions$ = new BehaviorSubject<Option[]>([
{ id: '_global', label: '_global' },
]);
dataSource = new MatTableDataSource<Bucket>();
selection = new SelectionModel<Bucket>(true, []);
private storageClient: StorageClient;
constructor(
private yamcs: YamcsService,
private dialog: MatDialog,
private router: Router,
private route: ActivatedRoute,
title: Title,
) {
title.setTitle('Buckets');
this.storageClient = this.yamcs.createStorageClient();
yamcs.yamcsClient.getInstances({
filter: 'state=RUNNING',
}).then(instances => {
for (const instance of instances) {
this.instanceOptions$.next([
...this.instanceOptions$.value,
{
id: instance.name,
label: instance.name,
}
]);
}
});
this.initializeOptions();
this.refreshDataSources();
this.filterForm.get('instance')!.valueChanges.forEach(instance => {
this.instance = instance;
this.refreshDataSources();
});
}
private initializeOptions() {
const queryParams = this.route.snapshot.queryParamMap;
if (queryParams.has('instance')) {
this.instance = queryParams.get('instance')!;
this.filterForm.get('instance')!.setValue(this.instance);
}
}
ngAfterViewInit() {
this.dataSource.sort = this.sort;
}
isAllSelected() {
const numSelected = this.selection.selected.length;
const numRows = this.dataSource.data.length;
return numSelected === numRows;
}
masterToggle() {
this.isAllSelected() ?
this.selection.clear() : | if (!this.selection.isSelected(row) || this.selection.selected.length > 1) {
this.selection.clear();
}
this.selection.toggle(row);
}
createBucket() {
const dialogRef = this.dialog.open(CreateBucketDialog, {
width: '400px',
data: {
bucketInstance: this.instance,
},
});
dialogRef.afterClosed().subscribe(result => {
if (result) {
this.refreshDataSources();
}
});
}
deleteSelectedBuckets() {
if (confirm('Are you sure you want to delete the selected buckets?')) {
const deletePromises = [];
for (const bucket of this.selection.selected) {
const promise = this.storageClient.deleteBucket(this.instance, bucket.name);
deletePromises.push(promise);
}
Promise.all(deletePromises).then(() => {
this.selection.clear();
this.refreshDataSources();
});
}
}
private refreshDataSources() {
this.updateURL();
this.storageClient.getBuckets(this.instance).then(buckets => {
this.dataSource.data = buckets;
});
}
private updateURL() {
this.router.navigate([], {
replaceUrl: true,
relativeTo: this.route,
queryParams: {
instance: this.instance || null,
},
queryParamsHandling: 'merge',
});
}
} | this.dataSource.data.forEach(row => this.selection.select(row));
}
toggleOne(row: Bucket) { | random_line_split |
BucketsPage.ts | import { SelectionModel } from '@angular/cdk/collections';
import { AfterViewInit, ChangeDetectionStrategy, Component, ViewChild } from '@angular/core';
import { FormControl, FormGroup } from '@angular/forms';
import { MatDialog } from '@angular/material/dialog';
import { MatSort } from '@angular/material/sort';
import { MatTableDataSource } from '@angular/material/table';
import { Title } from '@angular/platform-browser';
import { ActivatedRoute, Router } from '@angular/router';
import { BehaviorSubject } from 'rxjs';
import { Bucket, StorageClient } from '../../client';
import { YamcsService } from '../../core/services/YamcsService';
import { Option } from '../../shared/forms/Select';
import { CreateBucketDialog } from './CreateBucketDialog';
@Component({
templateUrl: './BucketsPage.html',
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class BucketsPage implements AfterViewInit {
@ViewChild(MatSort, { static: true })
sort: MatSort;
instance = '_global';
displayedColumns = [
'select',
'name',
'size',
'numObjects',
'actions',
];
filterForm = new FormGroup({
instance: new FormControl('_global'),
});
instanceOptions$ = new BehaviorSubject<Option[]>([
{ id: '_global', label: '_global' },
]);
dataSource = new MatTableDataSource<Bucket>();
selection = new SelectionModel<Bucket>(true, []);
private storageClient: StorageClient;
constructor(
private yamcs: YamcsService,
private dialog: MatDialog,
private router: Router,
private route: ActivatedRoute,
title: Title,
) {
title.setTitle('Buckets');
this.storageClient = this.yamcs.createStorageClient();
yamcs.yamcsClient.getInstances({
filter: 'state=RUNNING',
}).then(instances => {
for (const instance of instances) {
this.instanceOptions$.next([
...this.instanceOptions$.value,
{
id: instance.name,
label: instance.name,
}
]);
}
});
this.initializeOptions();
this.refreshDataSources();
this.filterForm.get('instance')!.valueChanges.forEach(instance => {
this.instance = instance;
this.refreshDataSources();
});
}
private | () {
const queryParams = this.route.snapshot.queryParamMap;
if (queryParams.has('instance')) {
this.instance = queryParams.get('instance')!;
this.filterForm.get('instance')!.setValue(this.instance);
}
}
ngAfterViewInit() {
this.dataSource.sort = this.sort;
}
isAllSelected() {
const numSelected = this.selection.selected.length;
const numRows = this.dataSource.data.length;
return numSelected === numRows;
}
masterToggle() {
this.isAllSelected() ?
this.selection.clear() :
this.dataSource.data.forEach(row => this.selection.select(row));
}
toggleOne(row: Bucket) {
if (!this.selection.isSelected(row) || this.selection.selected.length > 1) {
this.selection.clear();
}
this.selection.toggle(row);
}
createBucket() {
const dialogRef = this.dialog.open(CreateBucketDialog, {
width: '400px',
data: {
bucketInstance: this.instance,
},
});
dialogRef.afterClosed().subscribe(result => {
if (result) {
this.refreshDataSources();
}
});
}
deleteSelectedBuckets() {
if (confirm('Are you sure you want to delete the selected buckets?')) {
const deletePromises = [];
for (const bucket of this.selection.selected) {
const promise = this.storageClient.deleteBucket(this.instance, bucket.name);
deletePromises.push(promise);
}
Promise.all(deletePromises).then(() => {
this.selection.clear();
this.refreshDataSources();
});
}
}
private refreshDataSources() {
this.updateURL();
this.storageClient.getBuckets(this.instance).then(buckets => {
this.dataSource.data = buckets;
});
}
private updateURL() {
this.router.navigate([], {
replaceUrl: true,
relativeTo: this.route,
queryParams: {
instance: this.instance || null,
},
queryParamsHandling: 'merge',
});
}
}
| initializeOptions | identifier_name |
BucketsPage.ts | import { SelectionModel } from '@angular/cdk/collections';
import { AfterViewInit, ChangeDetectionStrategy, Component, ViewChild } from '@angular/core';
import { FormControl, FormGroup } from '@angular/forms';
import { MatDialog } from '@angular/material/dialog';
import { MatSort } from '@angular/material/sort';
import { MatTableDataSource } from '@angular/material/table';
import { Title } from '@angular/platform-browser';
import { ActivatedRoute, Router } from '@angular/router';
import { BehaviorSubject } from 'rxjs';
import { Bucket, StorageClient } from '../../client';
import { YamcsService } from '../../core/services/YamcsService';
import { Option } from '../../shared/forms/Select';
import { CreateBucketDialog } from './CreateBucketDialog';
@Component({
templateUrl: './BucketsPage.html',
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class BucketsPage implements AfterViewInit {
@ViewChild(MatSort, { static: true })
sort: MatSort;
instance = '_global';
displayedColumns = [
'select',
'name',
'size',
'numObjects',
'actions',
];
filterForm = new FormGroup({
instance: new FormControl('_global'),
});
instanceOptions$ = new BehaviorSubject<Option[]>([
{ id: '_global', label: '_global' },
]);
dataSource = new MatTableDataSource<Bucket>();
selection = new SelectionModel<Bucket>(true, []);
private storageClient: StorageClient;
constructor(
private yamcs: YamcsService,
private dialog: MatDialog,
private router: Router,
private route: ActivatedRoute,
title: Title,
) {
title.setTitle('Buckets');
this.storageClient = this.yamcs.createStorageClient();
yamcs.yamcsClient.getInstances({
filter: 'state=RUNNING',
}).then(instances => {
for (const instance of instances) {
this.instanceOptions$.next([
...this.instanceOptions$.value,
{
id: instance.name,
label: instance.name,
}
]);
}
});
this.initializeOptions();
this.refreshDataSources();
this.filterForm.get('instance')!.valueChanges.forEach(instance => {
this.instance = instance;
this.refreshDataSources();
});
}
private initializeOptions() {
const queryParams = this.route.snapshot.queryParamMap;
if (queryParams.has('instance')) {
this.instance = queryParams.get('instance')!;
this.filterForm.get('instance')!.setValue(this.instance);
}
}
ngAfterViewInit() {
this.dataSource.sort = this.sort;
}
isAllSelected() {
const numSelected = this.selection.selected.length;
const numRows = this.dataSource.data.length;
return numSelected === numRows;
}
masterToggle() {
this.isAllSelected() ?
this.selection.clear() :
this.dataSource.data.forEach(row => this.selection.select(row));
}
toggleOne(row: Bucket) {
if (!this.selection.isSelected(row) || this.selection.selected.length > 1) {
this.selection.clear();
}
this.selection.toggle(row);
}
createBucket() {
const dialogRef = this.dialog.open(CreateBucketDialog, {
width: '400px',
data: {
bucketInstance: this.instance,
},
});
dialogRef.afterClosed().subscribe(result => {
if (result) {
this.refreshDataSources();
}
});
}
deleteSelectedBuckets() {
if (confirm('Are you sure you want to delete the selected buckets?')) {
const deletePromises = [];
for (const bucket of this.selection.selected) {
const promise = this.storageClient.deleteBucket(this.instance, bucket.name);
deletePromises.push(promise);
}
Promise.all(deletePromises).then(() => {
this.selection.clear();
this.refreshDataSources();
});
}
}
private refreshDataSources() {
this.updateURL();
this.storageClient.getBuckets(this.instance).then(buckets => {
this.dataSource.data = buckets;
});
}
private updateURL() |
}
| {
this.router.navigate([], {
replaceUrl: true,
relativeTo: this.route,
queryParams: {
instance: this.instance || null,
},
queryParamsHandling: 'merge',
});
} | identifier_body |
trackTimeOnSite.js | import { bind, unbind } from '@segmentstream/utils/eventListener'
import listenEvents from './listenEvents'
import Storage from '../Storage'
const timeout = 10 // 10 seconds | let hasActive = false
let events = []
const storagePrefix = 'timeOnSite:'
const storage = new Storage({ prefix: storagePrefix })
// Load from storage
let activeTime = storage.get('activeTime') || 0
let time = storage.get('time') || 0
const firedEventsJSON = storage.get('firedEvents')
let firedEvents = firedEventsJSON ? JSON.parse(firedEventsJSON) : []
const addEventsListener = () => {
listenEvents.forEach((eventName) => {
bind(window.document, eventName, setActive, false)
})
}
const removeEventsListener = () => {
listenEvents.forEach((eventName) => {
unbind(window.document, eventName, setActive, false)
})
}
const incActiveTime = () => {
activeTime += timeout
storage.set('activeTime', activeTime)
}
const incTime = () => {
time += timeout
storage.set('time', time)
}
const fireEvent = (eventName) => {
firedEvents.push(eventName)
storage.set('firedEvents', JSON.stringify(firedEvents))
}
const processEvents = () => {
if (hasActive) {
incActiveTime()
hasActive = false
}
incTime()
events.forEach((event) => {
const timeForEvent = event.isActiveTime ? activeTime : time
if (!firedEvents.includes(`${event.name}:${event.seconds}`) && event.seconds <= timeForEvent) {
event.handler(timeForEvent)
fireEvent(`${event.name}:${event.seconds}`)
}
})
}
const setActive = () => { hasActive = true }
const addEvent = (seconds, handler, eventName, isActiveTime) => {
events.push({ seconds, handler, isActiveTime, name: eventName })
}
const startTracking = () => {
interval = setInterval(processEvents, timeout * 1000)
addEventsListener()
}
const stopTracking = () => {
clearInterval(interval)
removeEventsListener()
}
startTracking()
export const reset = () => {
if (interval) {
stopTracking()
}
activeTime = 0
time = 0
hasActive = false
firedEvents = []
storage.remove('activeTime')
storage.remove('time')
storage.remove('firedEvents')
startTracking()
}
export default (seconds, handler, eventName, isActiveTime = false) => {
if (!seconds) return
if (typeof handler !== 'function') {
throw new TypeError('Must pass function handler to `ddManager.trackTimeOnSite`.')
}
String(seconds)
.replace(/\s+/mg, '')
.split(',')
.forEach((secondsStr) => {
const second = parseInt(secondsStr)
if (second > 0) {
addEvent(second, handler, eventName, isActiveTime)
}
})
} | let interval = null | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.