file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
12.1k
suffix
large_stringlengths
0
12k
middle
large_stringlengths
0
7.51k
fim_type
large_stringclasses
4 values
test_collection_times.py
from concurrency.get_websites import get_number_of_links import time # Run get_number_of_links and compare it to a serial version # stub out load_url with a sleep function so the time is always the same # Show that the concurrent version takes less time than the serial import unittest from unittest.mock import patch, MagicMock from bs4 import BeautifulSoup from concurrency.get_websites import get_number_of_links, get_number_of_links_serial class TestConcurrency(unittest.TestCase): def setUp(self): self.loadtime = 1 self.fake_urls = ['url1','url2', 'url3'] @patch('concurrency.get_websites.BeautifulSoup') @patch('concurrency.get_websites.load_url') def test_concurrent_slower_than_serial(self, mock_load_url, bs_mock): """ Time the collection of data from websites """ bs_data = MagicMock(return_value="<html><a href='foo'>Baz</a></html>") bs_mock.return_value = bs_data mock_load_url.side_effect = lambda foo: time.sleep(self.loadtime) concurrent_start = time.time() list(get_number_of_links(self.fake_urls)) concurrent_total = time.time() - concurrent_start serial_start = time.time() get_number_of_links_serial(self.fake_urls) serial_total = time.time() - serial_start print("Concurrent collection: {}".format(concurrent_total)) print("Serial collection: {}".format(serial_total)) self.assertLess(concurrent_total, serial_total) if __name__ == "__main__":
unittest.main()
conditional_block
test_collection_times.py
from concurrency.get_websites import get_number_of_links import time # Run get_number_of_links and compare it to a serial version # stub out load_url with a sleep function so the time is always the same # Show that the concurrent version takes less time than the serial import unittest from unittest.mock import patch, MagicMock from bs4 import BeautifulSoup from concurrency.get_websites import get_number_of_links, get_number_of_links_serial class
(unittest.TestCase): def setUp(self): self.loadtime = 1 self.fake_urls = ['url1','url2', 'url3'] @patch('concurrency.get_websites.BeautifulSoup') @patch('concurrency.get_websites.load_url') def test_concurrent_slower_than_serial(self, mock_load_url, bs_mock): """ Time the collection of data from websites """ bs_data = MagicMock(return_value="<html><a href='foo'>Baz</a></html>") bs_mock.return_value = bs_data mock_load_url.side_effect = lambda foo: time.sleep(self.loadtime) concurrent_start = time.time() list(get_number_of_links(self.fake_urls)) concurrent_total = time.time() - concurrent_start serial_start = time.time() get_number_of_links_serial(self.fake_urls) serial_total = time.time() - serial_start print("Concurrent collection: {}".format(concurrent_total)) print("Serial collection: {}".format(serial_total)) self.assertLess(concurrent_total, serial_total) if __name__ == "__main__": unittest.main()
TestConcurrency
identifier_name
test_collection_times.py
from concurrency.get_websites import get_number_of_links import time # Run get_number_of_links and compare it to a serial version # stub out load_url with a sleep function so the time is always the same # Show that the concurrent version takes less time than the serial import unittest from unittest.mock import patch, MagicMock from bs4 import BeautifulSoup from concurrency.get_websites import get_number_of_links, get_number_of_links_serial class TestConcurrency(unittest.TestCase): def setUp(self):
self.loadtime = 1 self.fake_urls = ['url1','url2', 'url3'] @patch('concurrency.get_websites.BeautifulSoup') @patch('concurrency.get_websites.load_url') def test_concurrent_slower_than_serial(self, mock_load_url, bs_mock): """ Time the collection of data from websites """ bs_data = MagicMock(return_value="<html><a href='foo'>Baz</a></html>") bs_mock.return_value = bs_data mock_load_url.side_effect = lambda foo: time.sleep(self.loadtime) concurrent_start = time.time() list(get_number_of_links(self.fake_urls)) concurrent_total = time.time() - concurrent_start serial_start = time.time() get_number_of_links_serial(self.fake_urls) serial_total = time.time() - serial_start print("Concurrent collection: {}".format(concurrent_total)) print("Serial collection: {}".format(serial_total)) self.assertLess(concurrent_total, serial_total) if __name__ == "__main__": unittest.main()
random_line_split
test_collection_times.py
from concurrency.get_websites import get_number_of_links import time # Run get_number_of_links and compare it to a serial version # stub out load_url with a sleep function so the time is always the same # Show that the concurrent version takes less time than the serial import unittest from unittest.mock import patch, MagicMock from bs4 import BeautifulSoup from concurrency.get_websites import get_number_of_links, get_number_of_links_serial class TestConcurrency(unittest.TestCase):
self.assertLess(concurrent_total, serial_total) if __name__ == "__main__": unittest.main()
def setUp(self): self.loadtime = 1 self.fake_urls = ['url1','url2', 'url3'] @patch('concurrency.get_websites.BeautifulSoup') @patch('concurrency.get_websites.load_url') def test_concurrent_slower_than_serial(self, mock_load_url, bs_mock): """ Time the collection of data from websites """ bs_data = MagicMock(return_value="<html><a href='foo'>Baz</a></html>") bs_mock.return_value = bs_data mock_load_url.side_effect = lambda foo: time.sleep(self.loadtime) concurrent_start = time.time() list(get_number_of_links(self.fake_urls)) concurrent_total = time.time() - concurrent_start serial_start = time.time() get_number_of_links_serial(self.fake_urls) serial_total = time.time() - serial_start print("Concurrent collection: {}".format(concurrent_total)) print("Serial collection: {}".format(serial_total))
identifier_body
util.py
import subprocess import os import errno def
(url, local_fname=None, force_write=False): # requests is not default installed import requests if local_fname is None: local_fname = url.split('/')[-1] if not force_write and os.path.exists(local_fname): return local_fname dir_name = os.path.dirname(local_fname) if dir_name != "": if not os.path.exists(dir_name): try: # try to create the directory if it doesn't exists os.makedirs(dir_name) except OSError as exc: if exc.errno != errno.EEXIST: raise r = requests.get(url, stream=True) assert r.status_code == 200, "failed to open %s" % url with open(local_fname, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) return local_fname def get_gpus(): """ return a list of GPUs """ try: re = subprocess.check_output(["nvidia-smi", "-L"], universal_newlines=True) except OSError: return [] return range(len([i for i in re.split('\n') if 'GPU' in i]))
download_file
identifier_name
util.py
import subprocess import os import errno def download_file(url, local_fname=None, force_write=False): # requests is not default installed import requests if local_fname is None: local_fname = url.split('/')[-1] if not force_write and os.path.exists(local_fname): return local_fname dir_name = os.path.dirname(local_fname) if dir_name != "": if not os.path.exists(dir_name): try: # try to create the directory if it doesn't exists os.makedirs(dir_name) except OSError as exc: if exc.errno != errno.EEXIST: raise r = requests.get(url, stream=True) assert r.status_code == 200, "failed to open %s" % url with open(local_fname, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) return local_fname def get_gpus():
""" return a list of GPUs """ try: re = subprocess.check_output(["nvidia-smi", "-L"], universal_newlines=True) except OSError: return [] return range(len([i for i in re.split('\n') if 'GPU' in i]))
identifier_body
util.py
import subprocess import os import errno def download_file(url, local_fname=None, force_write=False): # requests is not default installed import requests if local_fname is None: local_fname = url.split('/')[-1] if not force_write and os.path.exists(local_fname): return local_fname dir_name = os.path.dirname(local_fname) if dir_name != "": if not os.path.exists(dir_name): try: # try to create the directory if it doesn't exists os.makedirs(dir_name) except OSError as exc: if exc.errno != errno.EEXIST:
r = requests.get(url, stream=True) assert r.status_code == 200, "failed to open %s" % url with open(local_fname, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) return local_fname def get_gpus(): """ return a list of GPUs """ try: re = subprocess.check_output(["nvidia-smi", "-L"], universal_newlines=True) except OSError: return [] return range(len([i for i in re.split('\n') if 'GPU' in i]))
raise
conditional_block
util.py
import subprocess import os import errno def download_file(url, local_fname=None, force_write=False): # requests is not default installed import requests if local_fname is None: local_fname = url.split('/')[-1] if not force_write and os.path.exists(local_fname): return local_fname dir_name = os.path.dirname(local_fname) if dir_name != "":
if not os.path.exists(dir_name): try: # try to create the directory if it doesn't exists os.makedirs(dir_name) except OSError as exc: if exc.errno != errno.EEXIST: raise r = requests.get(url, stream=True) assert r.status_code == 200, "failed to open %s" % url with open(local_fname, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) return local_fname def get_gpus(): """ return a list of GPUs """ try: re = subprocess.check_output(["nvidia-smi", "-L"], universal_newlines=True) except OSError: return [] return range(len([i for i in re.split('\n') if 'GPU' in i]))
random_line_split
pi.py
#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys def main(): k, a, b, a1, b1 = 2L, 4L, 1L, 12L, 4L while 1: # Next approximation p, q, k = k*k, 2L*k+1L, k+1L a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a//b, a1//b1 while d == d1:
def output(d): # Use write() to avoid spaces between the digits # Use str() to avoid the 'L' sys.stdout.write(str(d)) # Flush so the output is seen immediately sys.stdout.flush() if __name__ == "__main__": main()
output(d) a, a1 = 10L*(a%b), 10L*(a1%b1) d, d1 = a//b, a1//b1
conditional_block
pi.py
#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys
def main(): k, a, b, a1, b1 = 2L, 4L, 1L, 12L, 4L while 1: # Next approximation p, q, k = k*k, 2L*k+1L, k+1L a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a//b, a1//b1 while d == d1: output(d) a, a1 = 10L*(a%b), 10L*(a1%b1) d, d1 = a//b, a1//b1 def output(d): # Use write() to avoid spaces between the digits # Use str() to avoid the 'L' sys.stdout.write(str(d)) # Flush so the output is seen immediately sys.stdout.flush() if __name__ == "__main__": main()
random_line_split
pi.py
#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys def main():
def output(d): # Use write() to avoid spaces between the digits # Use str() to avoid the 'L' sys.stdout.write(str(d)) # Flush so the output is seen immediately sys.stdout.flush() if __name__ == "__main__": main()
k, a, b, a1, b1 = 2L, 4L, 1L, 12L, 4L while 1: # Next approximation p, q, k = k*k, 2L*k+1L, k+1L a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a//b, a1//b1 while d == d1: output(d) a, a1 = 10L*(a%b), 10L*(a1%b1) d, d1 = a//b, a1//b1
identifier_body
pi.py
#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys def
(): k, a, b, a1, b1 = 2L, 4L, 1L, 12L, 4L while 1: # Next approximation p, q, k = k*k, 2L*k+1L, k+1L a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a//b, a1//b1 while d == d1: output(d) a, a1 = 10L*(a%b), 10L*(a1%b1) d, d1 = a//b, a1//b1 def output(d): # Use write() to avoid spaces between the digits # Use str() to avoid the 'L' sys.stdout.write(str(d)) # Flush so the output is seen immediately sys.stdout.flush() if __name__ == "__main__": main()
main
identifier_name
common.rs
This module contains various infrastructure that is common across all assembler backends use proc_macro2::{Span, TokenTree}; use quote::ToTokens; use quote::quote; use syn::spanned::Spanned; use syn::parse; use syn::Token; use crate::parse_helpers::{ParseOpt, eat_pseudo_keyword}; use crate::serialize; /// Enum representing the result size of a value/expression/register/etc in bytes. /// Uses the NASM syntax for sizes (a word is 16 bits) #[derive(Debug, PartialOrd, PartialEq, Ord, Eq, Hash, Clone, Copy)] pub enum Size { BYTE = 1, WORD = 2, DWORD = 4, FWORD = 6, QWORD = 8, PWORD = 10, OWORD = 16, HWORD = 32, } impl Size { pub fn in_bytes(self) -> u8 { self as u8 } pub fn as_literal(self) -> syn::Ident { syn::Ident::new(match self { Size::BYTE => "i8", Size::WORD => "i16", Size::DWORD => "i32", Size::FWORD => "i48", Size::QWORD => "i64", Size::PWORD => "i80", Size::OWORD => "i128", Size::HWORD => "i256" }, Span::mixed_site()) } } /** * Jump types */ #[derive(Debug, Clone)] pub struct Jump { pub kind: JumpKind, pub offset: Option<syn::Expr> } #[derive(Debug, Clone)] pub enum JumpKind { // note: these symbol choices try to avoid stuff that is a valid starting symbol for parse_expr // in order to allow the full range of expressions to be used. the only currently existing ambiguity is // with the symbol <, as this symbol is also the starting symbol for the universal calling syntax <Type as Trait>.method(args) Global(syn::Ident), // -> label (["+" "-"] offset)? Backward(syn::Ident), // > label (["+" "-"] offset)? Forward(syn::Ident), // < label (["+" "-"] offset)? Dynamic(syn::Expr), // =>expr | => (expr) (["+" "-"] offset)? Bare(syn::Expr) // jump to this address } impl ParseOpt for Jump { fn parse(input: parse::ParseStream) -> parse::Result<Option<Jump>> { // extern label if eat_pseudo_keyword(input, "extern") { let expr: syn::Expr = input.parse()?; return Ok(Some(Jump { kind: JumpKind::Bare(expr), offset: None })); } // -> global_label let kind = if input.peek(Token![->]) { let _: Token![->] = input.parse()?; let name: syn::Ident = input.parse()?; JumpKind::Global(name) // > forward_label } else if input.peek(Token![>]) { let _: Token![>] = input.parse()?; let name: syn::Ident = input.parse()?; JumpKind::Forward(name) // < backwards_label } else if input.peek(Token![<]) { let _: Token![<] = input.parse()?; let name: syn::Ident = input.parse()?; JumpKind::Backward(name) // => dynamic_label } else if input.peek(Token![=>]) { let _: Token![=>] = input.parse()?; let expr: syn::Expr = if input.peek(syn::token::Paren) { let inner; let _ = syn::parenthesized!(inner in input); let inner = &inner; inner.parse()? } else { input.parse()? }; JumpKind::Dynamic(expr) // nothing } else { return Ok(None); }; // parse optional offset let offset = if input.peek(Token![-]) || input.peek(Token![+]) { if input.peek(Token![+]) { let _: Token![+] = input.parse()?; } let expr: syn::Expr = input.parse()?; Some(expr) } else { None }; Ok(Some(Jump::new(kind, offset))) } } impl Jump { pub fn new(kind: JumpKind, offset: Option<syn::Expr>) -> Jump { Jump { kind, offset } } /// Takes a jump and encodes it as a relocation starting `start_offset` bytes ago, relative to `ref_offset`. /// Any data detailing the type of relocation emitted should be contained in `data`, which is emitted as a tuple of u8's. pub fn encode(self, field_offset: u8, ref_offset: u8, data: &[u8]) -> Stmt { let span = self.span(); let target_offset = delimited(if let Some(offset) = self.offset { quote!(#offset) } else { quote!(0isize) }); // Create a relocation descriptor, containing all information about the actual jump except for the target itself. let relocation = Relocation { target_offset, field_offset, ref_offset, kind: serialize::expr_tuple_of_u8s(span, data) }; match self.kind { JumpKind::Global(ident) => Stmt::GlobalJumpTarget(ident, relocation), JumpKind::Backward(ident) => Stmt::BackwardJumpTarget(ident, relocation), JumpKind::Forward(ident) => Stmt::ForwardJumpTarget(ident, relocation), JumpKind::Dynamic(expr) => Stmt::DynamicJumpTarget(delimited(expr), relocation), JumpKind::Bare(expr) => Stmt::BareJumpTarget(delimited(expr), relocation), } } pub fn span(&self) -> Span { match &self.kind { JumpKind::Global(ident) => ident.span(), JumpKind::Backward(ident) => ident.span(), JumpKind::Forward(ident) => ident.span(), JumpKind::Dynamic(expr) => expr.span(), JumpKind::Bare(expr) => expr.span(), } } } /// A relocation entry description #[derive(Debug, Clone)] pub struct Relocation { pub target_offset: TokenTree, pub field_offset: u8, pub ref_offset: u8, pub kind: TokenTree, } /// An abstract representation of a dynasm runtime statement to be emitted #[derive(Debug, Clone)] pub enum Stmt { // simply push data into the instruction stream. unsigned Const(u64, Size), // push data that is stored inside of an expression. unsigned ExprUnsigned(TokenTree, Size), // push signed data into the instruction stream. signed ExprSigned(TokenTree, Size), // extend the instruction stream with unsigned bytes Extend(Vec<u8>), // extend the instruction stream with unsigned bytes ExprExtend(TokenTree), // align the instruction stream to some alignment Align(TokenTree, TokenTree), // label declarations GlobalLabel(syn::Ident), LocalLabel(syn::Ident), DynamicLabel(TokenTree), // and their respective relocations (as expressions as they differ per assembler). GlobalJumpTarget(syn::Ident, Relocation), ForwardJumpTarget(syn::Ident, Relocation), BackwardJumpTarget(syn::Ident, Relocation), DynamicJumpTarget(TokenTree, Relocation), BareJumpTarget(TokenTree, Relocation), // a statement that provides some information for the next statement, // and should therefore not be reordered with it PrefixStmt(TokenTree), // a random statement that has to be inserted between assembly hunks Stmt(TokenTree) } // convenience methods impl Stmt { #![allow(dead_code)] pub fn u8(value: u8) -> Stmt { Stmt::Const(u64::from(value), Size::BYTE) } pub fn
(value: u16) -> Stmt { Stmt::Const(u64::from(value), Size::WORD) } pub fn u32(value: u32) -> Stmt { Stmt::Const(u64::from(value), Size::DWORD) } pub fn u64(value: u64) -> Stmt { Stmt::Const(value, Size::QWORD) } } // Makes a None-delimited TokenTree item out of anything that can be converted to tokens. // This is a useful shortcut to escape issues around not-properly delimited tokenstreams // because it is guaranteed to be parsed back properly to its source ast at type-level. pub fn delimited<T: ToTokens>(expr: T) -> TokenTree { let span = expr.span(); let mut group = proc_macro2::Group::new( proc_macro2::Delimiter::None, expr.into_token_stream() ); group.set_span(span); proc_macro2::TokenTree::Group(group) } /// Create a bitmask with `scale` bits set pub fn bitmask(scale: u8) -> u32 { 1u32.checked_shl(u32::from(scale)).unwrap_or(0).wrapping_sub(1) } /// Create a bitmask with `scale` bits set pub fn bitmask64(scale: u8) -> u64 { 1u64.checked_shl(u32::from(scale)).unwrap_or(0).wrapping_sub(
u16
identifier_name
common.rs
//! This module contains various infrastructure that is common across all assembler backends use proc_macro2::{Span, TokenTree}; use quote::ToTokens; use quote::quote; use syn::spanned::Spanned; use syn::parse; use syn::Token; use crate::parse_helpers::{ParseOpt, eat_pseudo_keyword}; use crate::serialize; /// Enum representing the result size of a value/expression/register/etc in bytes. /// Uses the NASM syntax for sizes (a word is 16 bits) #[derive(Debug, PartialOrd, PartialEq, Ord, Eq, Hash, Clone, Copy)] pub enum Size { BYTE = 1, WORD = 2, DWORD = 4, FWORD = 6, QWORD = 8, PWORD = 10, OWORD = 16, HWORD = 32, } impl Size { pub fn in_bytes(self) -> u8 { self as u8 } pub fn as_literal(self) -> syn::Ident { syn::Ident::new(match self { Size::BYTE => "i8", Size::WORD => "i16", Size::DWORD => "i32", Size::FWORD => "i48",
} } /** * Jump types */ #[derive(Debug, Clone)] pub struct Jump { pub kind: JumpKind, pub offset: Option<syn::Expr> } #[derive(Debug, Clone)] pub enum JumpKind { // note: these symbol choices try to avoid stuff that is a valid starting symbol for parse_expr // in order to allow the full range of expressions to be used. the only currently existing ambiguity is // with the symbol <, as this symbol is also the starting symbol for the universal calling syntax <Type as Trait>.method(args) Global(syn::Ident), // -> label (["+" "-"] offset)? Backward(syn::Ident), // > label (["+" "-"] offset)? Forward(syn::Ident), // < label (["+" "-"] offset)? Dynamic(syn::Expr), // =>expr | => (expr) (["+" "-"] offset)? Bare(syn::Expr) // jump to this address } impl ParseOpt for Jump { fn parse(input: parse::ParseStream) -> parse::Result<Option<Jump>> { // extern label if eat_pseudo_keyword(input, "extern") { let expr: syn::Expr = input.parse()?; return Ok(Some(Jump { kind: JumpKind::Bare(expr), offset: None })); } // -> global_label let kind = if input.peek(Token![->]) { let _: Token![->] = input.parse()?; let name: syn::Ident = input.parse()?; JumpKind::Global(name) // > forward_label } else if input.peek(Token![>]) { let _: Token![>] = input.parse()?; let name: syn::Ident = input.parse()?; JumpKind::Forward(name) // < backwards_label } else if input.peek(Token![<]) { let _: Token![<] = input.parse()?; let name: syn::Ident = input.parse()?; JumpKind::Backward(name) // => dynamic_label } else if input.peek(Token![=>]) { let _: Token![=>] = input.parse()?; let expr: syn::Expr = if input.peek(syn::token::Paren) { let inner; let _ = syn::parenthesized!(inner in input); let inner = &inner; inner.parse()? } else { input.parse()? }; JumpKind::Dynamic(expr) // nothing } else { return Ok(None); }; // parse optional offset let offset = if input.peek(Token![-]) || input.peek(Token![+]) { if input.peek(Token![+]) { let _: Token![+] = input.parse()?; } let expr: syn::Expr = input.parse()?; Some(expr) } else { None }; Ok(Some(Jump::new(kind, offset))) } } impl Jump { pub fn new(kind: JumpKind, offset: Option<syn::Expr>) -> Jump { Jump { kind, offset } } /// Takes a jump and encodes it as a relocation starting `start_offset` bytes ago, relative to `ref_offset`. /// Any data detailing the type of relocation emitted should be contained in `data`, which is emitted as a tuple of u8's. pub fn encode(self, field_offset: u8, ref_offset: u8, data: &[u8]) -> Stmt { let span = self.span(); let target_offset = delimited(if let Some(offset) = self.offset { quote!(#offset) } else { quote!(0isize) }); // Create a relocation descriptor, containing all information about the actual jump except for the target itself. let relocation = Relocation { target_offset, field_offset, ref_offset, kind: serialize::expr_tuple_of_u8s(span, data) }; match self.kind { JumpKind::Global(ident) => Stmt::GlobalJumpTarget(ident, relocation), JumpKind::Backward(ident) => Stmt::BackwardJumpTarget(ident, relocation), JumpKind::Forward(ident) => Stmt::ForwardJumpTarget(ident, relocation), JumpKind::Dynamic(expr) => Stmt::DynamicJumpTarget(delimited(expr), relocation), JumpKind::Bare(expr) => Stmt::BareJumpTarget(delimited(expr), relocation), } } pub fn span(&self) -> Span { match &self.kind { JumpKind::Global(ident) => ident.span(), JumpKind::Backward(ident) => ident.span(), JumpKind::Forward(ident) => ident.span(), JumpKind::Dynamic(expr) => expr.span(), JumpKind::Bare(expr) => expr.span(), } } } /// A relocation entry description #[derive(Debug, Clone)] pub struct Relocation { pub target_offset: TokenTree, pub field_offset: u8, pub ref_offset: u8, pub kind: TokenTree, } /// An abstract representation of a dynasm runtime statement to be emitted #[derive(Debug, Clone)] pub enum Stmt { // simply push data into the instruction stream. unsigned Const(u64, Size), // push data that is stored inside of an expression. unsigned ExprUnsigned(TokenTree, Size), // push signed data into the instruction stream. signed ExprSigned(TokenTree, Size), // extend the instruction stream with unsigned bytes Extend(Vec<u8>), // extend the instruction stream with unsigned bytes ExprExtend(TokenTree), // align the instruction stream to some alignment Align(TokenTree, TokenTree), // label declarations GlobalLabel(syn::Ident), LocalLabel(syn::Ident), DynamicLabel(TokenTree), // and their respective relocations (as expressions as they differ per assembler). GlobalJumpTarget(syn::Ident, Relocation), ForwardJumpTarget(syn::Ident, Relocation), BackwardJumpTarget(syn::Ident, Relocation), DynamicJumpTarget(TokenTree, Relocation), BareJumpTarget(TokenTree, Relocation), // a statement that provides some information for the next statement, // and should therefore not be reordered with it PrefixStmt(TokenTree), // a random statement that has to be inserted between assembly hunks Stmt(TokenTree) } // convenience methods impl Stmt { #![allow(dead_code)] pub fn u8(value: u8) -> Stmt { Stmt::Const(u64::from(value), Size::BYTE) } pub fn u16(value: u16) -> Stmt { Stmt::Const(u64::from(value), Size::WORD) } pub fn u32(value: u32) -> Stmt { Stmt::Const(u64::from(value), Size::DWORD) } pub fn u64(value: u64) -> Stmt { Stmt::Const(value, Size::QWORD) } } // Makes a None-delimited TokenTree item out of anything that can be converted to tokens. // This is a useful shortcut to escape issues around not-properly delimited tokenstreams // because it is guaranteed to be parsed back properly to its source ast at type-level. pub fn delimited<T: ToTokens>(expr: T) -> TokenTree { let span = expr.span(); let mut group = proc_macro2::Group::new( proc_macro2::Delimiter::None, expr.into_token_stream() ); group.set_span(span); proc_macro2::TokenTree::Group(group) } /// Create a bitmask with `scale` bits set pub fn bitmask(scale: u8) -> u32 { 1u32.checked_shl(u32::from(scale)).unwrap_or(0).wrapping_sub(1) } /// Create a bitmask with `scale` bits set pub fn bitmask64(scale: u8) -> u64 { 1u64.checked_shl(u32::from(scale)).unwrap_or(0).wrapping_sub(
Size::QWORD => "i64", Size::PWORD => "i80", Size::OWORD => "i128", Size::HWORD => "i256" }, Span::mixed_site())
random_line_split
sha.ts
"./custom_types"; import jsSHA1 from "./sha1"; import jsSHA256 from "./sha256"; import jsSHA512 from "./sha512"; import jsSHA3 from "./sha3"; type FixedLengthVariantType = | "SHA-1" | "SHA-224" | "SHA-256" | "SHA-384" | "SHA-512" | "SHA3-224" | "SHA3-256" | "SHA3-384" | "SHA3-512"; export default class jsSHA { private readonly shaObj: jsSHA1 | jsSHA256 | jsSHA512 | jsSHA3; /** * @param variant The desired SHA variant (SHA-1, SHA-224, SHA-256, SHA-384, SHA-512, SHA3-224, SHA3-256, SHA3-256, * SHA3-384, SHA3-512, SHAKE128, SHAKE256, CSHAKE128, CSHAKE256, KMAC128, or KMAC256) as a string. * @param inputFormat The input format to be used in future `update` calls (TEXT, HEX, B64, BYTES, ARRAYBUFFER, * or UINT8ARRAY) as a string. * @param options Options in the form of { encoding?: "UTF8" | "UTF16BE" | "UTF16LE"; numRounds?: number }. * `encoding` is for only TEXT input (defaults to UTF8) and `numRounds` defaults to 1. * `numRounds` is not valid for any of the MAC or CSHAKE variants. * * If the variant supports HMAC, `options` may have an additional `hmacKey` key which must be in the form of * {value: <INPUT>, format: <FORMAT>, encoding?: "UTF8" | "UTF16BE" | "UTF16LE"} where <FORMAT> takes the same * values as `inputFormat` and <INPUT> can be a `string | ArrayBuffer | Uint8Array` depending on <FORMAT>. * Supplying this key switches to HMAC calculation and replaces the now deprecated call to `setHMACKey`. * * If the variant is CSHAKE128 or CSHAKE256, `options` may have two additional keys, `customization` and `funcName`, * which are the NIST customization and function-name strings. Both must be in the same form as `hmacKey`. * * If the variant is KMAC128 or KMAC256, `options` can include the `customization` key from CSHAKE variants and * *must* have a `kmacKey` key that takes the same form as the `customization` key. */ constructor(variant: FixedLengthVariantType, inputFormat: "TEXT", options?: FixedLengthOptionsEncodingType); constructor( variant: FixedLengthVariantType, inputFormat: FormatNoTextType, options?: FixedLengthOptionsNoEncodingType ); constructor(variant: "SHAKE128" | "SHAKE256", inputFormat: "TEXT", options?: SHAKEOptionsEncodingType); constructor(variant: "SHAKE128" | "SHAKE256", inputFormat: FormatNoTextType, options?: SHAKEOptionsNoEncodingType); constructor(variant: "CSHAKE128" | "CSHAKE256", inputFormat: "TEXT", options?: CSHAKEOptionsEncodingType); constructor(variant: "CSHAKE128" | "CSHAKE256", inputFormat: FormatNoTextType, options?: CSHAKEOptionsNoEncodingType); constructor(variant: "KMAC128" | "KMAC256", inputFormat: "TEXT", options: KMACOptionsEncodingType); constructor(variant: "KMAC128" | "KMAC256", inputFormat: FormatNoTextType, options: KMACOptionsNoEncodingType); // eslint-disable-next-line @typescript-eslint/no-explicit-any constructor(variant: any, inputFormat: any, options?: any) { if ("SHA-1" == variant) { this.shaObj = new jsSHA1(variant, inputFormat, options); } else if ("SHA-224" == variant || "SHA-256" == variant) { this.shaObj = new jsSHA256(variant, inputFormat, options); } else if ("SHA-384" == variant || "SHA-512" == variant) { this.shaObj = new jsSHA512(variant, inputFormat, options); } else if ( "SHA3-224" == variant || "SHA3-256" == variant || "SHA3-384" == variant || "SHA3-512" == variant || "SHAKE128" == variant || "SHAKE256" == variant || "CSHAKE128" == variant || "CSHAKE256" == variant || "KMAC128" == variant || "KMAC256" == variant )
else { throw new Error(sha_variant_error); } } /** * Takes `input` and hashes as many blocks as possible. Stores the rest for either a future `update` or `getHash` call. * * @param input The input to be hashed */ update(input: string | ArrayBuffer | Uint8Array): void { this.shaObj.update(input); } /** * Returns the desired SHA or MAC (if a HMAC/KMAC key was specified) hash of the input fed in via `update` calls. * * @param format The desired output formatting (B64, HEX, BYTES, ARRAYBUFFER, or UINT8ARRAY) as a string. * @param options Options in the form of { outputUpper?: boolean; b64Pad?: string; outputLen?: number; }. * `outputLen` is required for variable length output variants (this option was previously called `shakeLen` which * is now deprecated). * `outputUpper` is only for HEX output (defaults to false) and b64pad is only for B64 output (defaults to "="). * @returns The hash in the format specified. */ getHash(format: "HEX", options?: { outputUpper?: boolean; outputLen?: number; shakeLen?: number }): string; getHash(format: "B64", options?: { b64Pad?: string; outputLen?: number; shakeLen?: number }): string; getHash(format: "BYTES", options?: { outputLen?: number; shakeLen?: number }): string; getHash(format: "UINT8ARRAY", options?: { outputLen?: number; shakeLen?: number }): Uint8Array; getHash(format: "ARRAYBUFFER", options?: { outputLen?: number; shakeLen?: number }): ArrayBuffer; // eslint-disable-next-line @typescript-eslint/no-explicit-any getHash(format: any, options?: any): any { return this.shaObj.getHash(format, options); } /** * Sets the HMAC key for an eventual `getHMAC` call. Must be called immediately after jsSHA object instantiation. * Now deprecated in favor of setting the `hmacKey` at object instantiation. * * @param key The key used to calculate the HMAC * @param inputFormat The format of key (HEX, TEXT, B64, BYTES, ARRAYBUFFER, or UINT8ARRAY) as a string. * @param options Options in the form of { encoding?: "UTF8" | "UTF16BE" | "UTF16LE }. `encoding` is only for TEXT * and defaults to UTF8. */ setHMACKey(key: string, inputFormat: "TEXT", options?: { encoding?: EncodingType }): void; setHMACKey(key: string, inputFormat: "B64" | "HEX" | "BYTES"): void; setHMACKey(key: ArrayBuffer, inputFormat: "ARRAYBUFFER"): void; setHMACKey(key: Uint8Array, inputFormat: "UINT8ARRAY"): void; // eslint-disable-next-line @typescript-eslint/no-explicit-any setHMACKey(key: any, inputFormat: any, options?: any): void { this.shaObj.setHMACKey(key, inputFormat, options); } /** * Returns the the HMAC in the specified format using the key given by a previous `setHMACKey` call. Now deprecated * in favor of just calling `getHash`. * * @param format The desired output formatting (B64, HEX, BYTES, ARRAYBUFFER, or UINT8ARRAY
{ this.shaObj = new jsSHA3(variant, inputFormat, options); }
conditional_block
sha.ts
The desired SHA variant (SHA-1, SHA-224, SHA-256, SHA-384, SHA-512, SHA3-224, SHA3-256, SHA3-256, * SHA3-384, SHA3-512, SHAKE128, SHAKE256, CSHAKE128, CSHAKE256, KMAC128, or KMAC256) as a string. * @param inputFormat The input format to be used in future `update` calls (TEXT, HEX, B64, BYTES, ARRAYBUFFER, * or UINT8ARRAY) as a string. * @param options Options in the form of { encoding?: "UTF8" | "UTF16BE" | "UTF16LE"; numRounds?: number }. * `encoding` is for only TEXT input (defaults to UTF8) and `numRounds` defaults to 1. * `numRounds` is not valid for any of the MAC or CSHAKE variants. * * If the variant supports HMAC, `options` may have an additional `hmacKey` key which must be in the form of * {value: <INPUT>, format: <FORMAT>, encoding?: "UTF8" | "UTF16BE" | "UTF16LE"} where <FORMAT> takes the same * values as `inputFormat` and <INPUT> can be a `string | ArrayBuffer | Uint8Array` depending on <FORMAT>. * Supplying this key switches to HMAC calculation and replaces the now deprecated call to `setHMACKey`. * * If the variant is CSHAKE128 or CSHAKE256, `options` may have two additional keys, `customization` and `funcName`, * which are the NIST customization and function-name strings. Both must be in the same form as `hmacKey`. * * If the variant is KMAC128 or KMAC256, `options` can include the `customization` key from CSHAKE variants and * *must* have a `kmacKey` key that takes the same form as the `customization` key. */ constructor(variant: FixedLengthVariantType, inputFormat: "TEXT", options?: FixedLengthOptionsEncodingType); constructor( variant: FixedLengthVariantType, inputFormat: FormatNoTextType, options?: FixedLengthOptionsNoEncodingType ); constructor(variant: "SHAKE128" | "SHAKE256", inputFormat: "TEXT", options?: SHAKEOptionsEncodingType); constructor(variant: "SHAKE128" | "SHAKE256", inputFormat: FormatNoTextType, options?: SHAKEOptionsNoEncodingType); constructor(variant: "CSHAKE128" | "CSHAKE256", inputFormat: "TEXT", options?: CSHAKEOptionsEncodingType); constructor(variant: "CSHAKE128" | "CSHAKE256", inputFormat: FormatNoTextType, options?: CSHAKEOptionsNoEncodingType); constructor(variant: "KMAC128" | "KMAC256", inputFormat: "TEXT", options: KMACOptionsEncodingType); constructor(variant: "KMAC128" | "KMAC256", inputFormat: FormatNoTextType, options: KMACOptionsNoEncodingType); // eslint-disable-next-line @typescript-eslint/no-explicit-any constructor(variant: any, inputFormat: any, options?: any) { if ("SHA-1" == variant) { this.shaObj = new jsSHA1(variant, inputFormat, options); } else if ("SHA-224" == variant || "SHA-256" == variant) { this.shaObj = new jsSHA256(variant, inputFormat, options); } else if ("SHA-384" == variant || "SHA-512" == variant) { this.shaObj = new jsSHA512(variant, inputFormat, options); } else if ( "SHA3-224" == variant || "SHA3-256" == variant || "SHA3-384" == variant || "SHA3-512" == variant || "SHAKE128" == variant || "SHAKE256" == variant || "CSHAKE128" == variant || "CSHAKE256" == variant || "KMAC128" == variant || "KMAC256" == variant ) { this.shaObj = new jsSHA3(variant, inputFormat, options); } else { throw new Error(sha_variant_error); } } /** * Takes `input` and hashes as many blocks as possible. Stores the rest for either a future `update` or `getHash` call. * * @param input The input to be hashed */ update(input: string | ArrayBuffer | Uint8Array): void { this.shaObj.update(input); } /** * Returns the desired SHA or MAC (if a HMAC/KMAC key was specified) hash of the input fed in via `update` calls. * * @param format The desired output formatting (B64, HEX, BYTES, ARRAYBUFFER, or UINT8ARRAY) as a string. * @param options Options in the form of { outputUpper?: boolean; b64Pad?: string; outputLen?: number; }. * `outputLen` is required for variable length output variants (this option was previously called `shakeLen` which * is now deprecated). * `outputUpper` is only for HEX output (defaults to false) and b64pad is only for B64 output (defaults to "="). * @returns The hash in the format specified. */ getHash(format: "HEX", options?: { outputUpper?: boolean; outputLen?: number; shakeLen?: number }): string; getHash(format: "B64", options?: { b64Pad?: string; outputLen?: number; shakeLen?: number }): string; getHash(format: "BYTES", options?: { outputLen?: number; shakeLen?: number }): string; getHash(format: "UINT8ARRAY", options?: { outputLen?: number; shakeLen?: number }): Uint8Array; getHash(format: "ARRAYBUFFER", options?: { outputLen?: number; shakeLen?: number }): ArrayBuffer; // eslint-disable-next-line @typescript-eslint/no-explicit-any getHash(format: any, options?: any): any { return this.shaObj.getHash(format, options); } /** * Sets the HMAC key for an eventual `getHMAC` call. Must be called immediately after jsSHA object instantiation. * Now deprecated in favor of setting the `hmacKey` at object instantiation. * * @param key The key used to calculate the HMAC * @param inputFormat The format of key (HEX, TEXT, B64, BYTES, ARRAYBUFFER, or UINT8ARRAY) as a string. * @param options Options in the form of { encoding?: "UTF8" | "UTF16BE" | "UTF16LE }. `encoding` is only for TEXT * and defaults to UTF8. */ setHMACKey(key: string, inputFormat: "TEXT", options?: { encoding?: EncodingType }): void; setHMACKey(key: string, inputFormat: "B64" | "HEX" | "BYTES"): void; setHMACKey(key: ArrayBuffer, inputFormat: "ARRAYBUFFER"): void; setHMACKey(key: Uint8Array, inputFormat: "UINT8ARRAY"): void; // eslint-disable-next-line @typescript-eslint/no-explicit-any setHMACKey(key: any, inputFormat: any, options?: any): void { this.shaObj.setHMACKey(key, inputFormat, options); } /** * Returns the the HMAC in the specified format using the key given by a previous `setHMACKey` call. Now deprecated * in favor of just calling `getHash`. * * @param format The desired output formatting (B64, HEX, BYTES, ARRAYBUFFER, or UINT8ARRAY) as a string. * @param options Options in the form of { outputUpper?: boolean; b64Pad?: string }. `outputUpper` is only for HEX * output (defaults to false) and `b64pad` is only for B64 output (defaults to "="). * @returns The HMAC in the format specified. */ getHMAC(format: "HEX", options?: { outputUpper?: boolean }): string; getHMAC(format: "B64", options?: { b64Pad?: string }): string; getHMAC(format: "BYTES"): string; getHMAC(format: "UINT8ARRAY"): Uint8Array; getHMAC(format: "ARRAYBUFFER"): ArrayBuffer; // eslint-disable-next-line @typescript-eslint/no-explicit-any
getHMAC
identifier_name
sha.ts
"./custom_types"; import jsSHA1 from "./sha1"; import jsSHA256 from "./sha256"; import jsSHA512 from "./sha512"; import jsSHA3 from "./sha3"; type FixedLengthVariantType = | "SHA-1" | "SHA-224" | "SHA-256" | "SHA-384" | "SHA-512" | "SHA3-224" | "SHA3-256" | "SHA3-384" | "SHA3-512"; export default class jsSHA { private readonly shaObj: jsSHA1 | jsSHA256 | jsSHA512 | jsSHA3; /** * @param variant The desired SHA variant (SHA-1, SHA-224, SHA-256, SHA-384, SHA-512, SHA3-224, SHA3-256, SHA3-256, * SHA3-384, SHA3-512, SHAKE128, SHAKE256, CSHAKE128, CSHAKE256, KMAC128, or KMAC256) as a string. * @param inputFormat The input format to be used in future `update` calls (TEXT, HEX, B64, BYTES, ARRAYBUFFER, * or UINT8ARRAY) as a string. * @param options Options in the form of { encoding?: "UTF8" | "UTF16BE" | "UTF16LE"; numRounds?: number }. * `encoding` is for only TEXT input (defaults to UTF8) and `numRounds` defaults to 1. * `numRounds` is not valid for any of the MAC or CSHAKE variants. * * If the variant supports HMAC, `options` may have an additional `hmacKey` key which must be in the form of * {value: <INPUT>, format: <FORMAT>, encoding?: "UTF8" | "UTF16BE" | "UTF16LE"} where <FORMAT> takes the same * values as `inputFormat` and <INPUT> can be a `string | ArrayBuffer | Uint8Array` depending on <FORMAT>. * Supplying this key switches to HMAC calculation and replaces the now deprecated call to `setHMACKey`. * * If the variant is CSHAKE128 or CSHAKE256, `options` may have two additional keys, `customization` and `funcName`, * which are the NIST customization and function-name strings. Both must be in the same form as `hmacKey`. * * If the variant is KMAC128 or KMAC256, `options` can include the `customization` key from CSHAKE variants and * *must* have a `kmacKey` key that takes the same form as the `customization` key. */ constructor(variant: FixedLengthVariantType, inputFormat: "TEXT", options?: FixedLengthOptionsEncodingType); constructor( variant: FixedLengthVariantType, inputFormat: FormatNoTextType, options?: FixedLengthOptionsNoEncodingType ); constructor(variant: "SHAKE128" | "SHAKE256", inputFormat: "TEXT", options?: SHAKEOptionsEncodingType); constructor(variant: "SHAKE128" | "SHAKE256", inputFormat: FormatNoTextType, options?: SHAKEOptionsNoEncodingType); constructor(variant: "CSHAKE128" | "CSHAKE256", inputFormat: "TEXT", options?: CSHAKEOptionsEncodingType); constructor(variant: "CSHAKE128" | "CSHAKE256", inputFormat: FormatNoTextType, options?: CSHAKEOptionsNoEncodingType); constructor(variant: "KMAC128" | "KMAC256", inputFormat: "TEXT", options: KMACOptionsEncodingType); constructor(variant: "KMAC128" | "KMAC256", inputFormat: FormatNoTextType, options: KMACOptionsNoEncodingType); // eslint-disable-next-line @typescript-eslint/no-explicit-any constructor(variant: any, inputFormat: any, options?: any) { if ("SHA-1" == variant) { this.shaObj = new jsSHA1(variant, inputFormat, options); } else if ("SHA-224" == variant || "SHA-256" == variant) { this.shaObj = new jsSHA256(variant, inputFormat, options); } else if ("SHA-384" == variant || "SHA-512" == variant) { this.shaObj = new jsSHA512(variant, inputFormat, options); } else if ( "SHA3-224" == variant || "SHA3-256" == variant || "SHA3-384" == variant || "SHA3-512" == variant || "SHAKE128" == variant || "SHAKE256" == variant || "CSHAKE128" == variant || "CSHAKE256" == variant || "KMAC128" == variant || "KMAC256" == variant ) { this.shaObj = new jsSHA3(variant, inputFormat, options); } else { throw new Error(sha_variant_error); } } /** * Takes `input` and hashes as many blocks as possible. Stores the rest for either a future `update` or `getHash` call. * * @param input The input to be hashed */ update(input: string | ArrayBuffer | Uint8Array): void { this.shaObj.update(input); } /** * Returns the desired SHA or MAC (if a HMAC/KMAC key was specified) hash of the input fed in via `update` calls. * * @param format The desired output formatting (B64, HEX, BYTES, ARRAYBUFFER, or UINT8ARRAY) as a string. * @param options Options in the form of { outputUpper?: boolean; b64Pad?: string; outputLen?: number; }. * `outputLen` is required for variable length output variants (this option was previously called `shakeLen` which * is now deprecated). * `outputUpper` is only for HEX output (defaults to false) and b64pad is only for B64 output (defaults to "="). * @returns The hash in the format specified. */ getHash(format: "HEX", options?: { outputUpper?: boolean; outputLen?: number; shakeLen?: number }): string; getHash(format: "B64", options?: { b64Pad?: string; outputLen?: number; shakeLen?: number }): string; getHash(format: "BYTES", options?: { outputLen?: number; shakeLen?: number }): string; getHash(format: "UINT8ARRAY", options?: { outputLen?: number; shakeLen?: number }): Uint8Array; getHash(format: "ARRAYBUFFER", options?: { outputLen?: number; shakeLen?: number }): ArrayBuffer; // eslint-disable-next-line @typescript-eslint/no-explicit-any getHash(format: any, options?: any): any { return this.shaObj.getHash(format, options); } /** * Sets the HMAC key for an eventual `getHMAC` call. Must be called immediately after jsSHA object instantiation. * Now deprecated in favor of setting the `hmacKey` at object instantiation. * * @param key The key used to calculate the HMAC * @param inputFormat The format of key (HEX, TEXT, B64, BYTES, ARRAYBUFFER, or UINT8ARRAY) as a string. * @param options Options in the form of { encoding?: "UTF8" | "UTF16BE" | "UTF16LE }. `encoding` is only for TEXT * and defaults to UTF8. */ setHMACKey(key: string, inputFormat: "TEXT", options?: { encoding?: EncodingType }): void; setHMACKey(key: string, inputFormat: "B64" | "HEX" | "BYTES"): void; setHMACKey(key: ArrayBuffer, inputFormat: "ARRAYBUFFER"): void; setHMACKey(key: Uint8Array, inputFormat: "UINT8ARRAY"): void; // eslint-disable-next-line @typescript-eslint/no-explicit-any setHMACKey(key: any, inputFormat: any, options?: any): void
/** * Returns the the HMAC in the specified format using the key given by a previous `setHMACKey` call. Now deprecated * in favor of just calling `getHash`. * * @param format The desired output formatting (B64, HEX, BYTES, ARRAYBUFFER, or UINT8
{ this.shaObj.setHMACKey(key, inputFormat, options); }
identifier_body
sha.ts
from "./custom_types"; import jsSHA1 from "./sha1"; import jsSHA256 from "./sha256"; import jsSHA512 from "./sha512"; import jsSHA3 from "./sha3"; type FixedLengthVariantType = | "SHA-1" | "SHA-224" | "SHA-256" | "SHA-384" | "SHA-512" | "SHA3-224" | "SHA3-256" | "SHA3-384" | "SHA3-512"; export default class jsSHA { private readonly shaObj: jsSHA1 | jsSHA256 | jsSHA512 | jsSHA3; /** * @param variant The desired SHA variant (SHA-1, SHA-224, SHA-256, SHA-384, SHA-512, SHA3-224, SHA3-256, SHA3-256, * SHA3-384, SHA3-512, SHAKE128, SHAKE256, CSHAKE128, CSHAKE256, KMAC128, or KMAC256) as a string. * @param inputFormat The input format to be used in future `update` calls (TEXT, HEX, B64, BYTES, ARRAYBUFFER,
* {value: <INPUT>, format: <FORMAT>, encoding?: "UTF8" | "UTF16BE" | "UTF16LE"} where <FORMAT> takes the same * values as `inputFormat` and <INPUT> can be a `string | ArrayBuffer | Uint8Array` depending on <FORMAT>. * Supplying this key switches to HMAC calculation and replaces the now deprecated call to `setHMACKey`. * * If the variant is CSHAKE128 or CSHAKE256, `options` may have two additional keys, `customization` and `funcName`, * which are the NIST customization and function-name strings. Both must be in the same form as `hmacKey`. * * If the variant is KMAC128 or KMAC256, `options` can include the `customization` key from CSHAKE variants and * *must* have a `kmacKey` key that takes the same form as the `customization` key. */ constructor(variant: FixedLengthVariantType, inputFormat: "TEXT", options?: FixedLengthOptionsEncodingType); constructor( variant: FixedLengthVariantType, inputFormat: FormatNoTextType, options?: FixedLengthOptionsNoEncodingType ); constructor(variant: "SHAKE128" | "SHAKE256", inputFormat: "TEXT", options?: SHAKEOptionsEncodingType); constructor(variant: "SHAKE128" | "SHAKE256", inputFormat: FormatNoTextType, options?: SHAKEOptionsNoEncodingType); constructor(variant: "CSHAKE128" | "CSHAKE256", inputFormat: "TEXT", options?: CSHAKEOptionsEncodingType); constructor(variant: "CSHAKE128" | "CSHAKE256", inputFormat: FormatNoTextType, options?: CSHAKEOptionsNoEncodingType); constructor(variant: "KMAC128" | "KMAC256", inputFormat: "TEXT", options: KMACOptionsEncodingType); constructor(variant: "KMAC128" | "KMAC256", inputFormat: FormatNoTextType, options: KMACOptionsNoEncodingType); // eslint-disable-next-line @typescript-eslint/no-explicit-any constructor(variant: any, inputFormat: any, options?: any) { if ("SHA-1" == variant) { this.shaObj = new jsSHA1(variant, inputFormat, options); } else if ("SHA-224" == variant || "SHA-256" == variant) { this.shaObj = new jsSHA256(variant, inputFormat, options); } else if ("SHA-384" == variant || "SHA-512" == variant) { this.shaObj = new jsSHA512(variant, inputFormat, options); } else if ( "SHA3-224" == variant || "SHA3-256" == variant || "SHA3-384" == variant || "SHA3-512" == variant || "SHAKE128" == variant || "SHAKE256" == variant || "CSHAKE128" == variant || "CSHAKE256" == variant || "KMAC128" == variant || "KMAC256" == variant ) { this.shaObj = new jsSHA3(variant, inputFormat, options); } else { throw new Error(sha_variant_error); } } /** * Takes `input` and hashes as many blocks as possible. Stores the rest for either a future `update` or `getHash` call. * * @param input The input to be hashed */ update(input: string | ArrayBuffer | Uint8Array): void { this.shaObj.update(input); } /** * Returns the desired SHA or MAC (if a HMAC/KMAC key was specified) hash of the input fed in via `update` calls. * * @param format The desired output formatting (B64, HEX, BYTES, ARRAYBUFFER, or UINT8ARRAY) as a string. * @param options Options in the form of { outputUpper?: boolean; b64Pad?: string; outputLen?: number; }. * `outputLen` is required for variable length output variants (this option was previously called `shakeLen` which * is now deprecated). * `outputUpper` is only for HEX output (defaults to false) and b64pad is only for B64 output (defaults to "="). * @returns The hash in the format specified. */ getHash(format: "HEX", options?: { outputUpper?: boolean; outputLen?: number; shakeLen?: number }): string; getHash(format: "B64", options?: { b64Pad?: string; outputLen?: number; shakeLen?: number }): string; getHash(format: "BYTES", options?: { outputLen?: number; shakeLen?: number }): string; getHash(format: "UINT8ARRAY", options?: { outputLen?: number; shakeLen?: number }): Uint8Array; getHash(format: "ARRAYBUFFER", options?: { outputLen?: number; shakeLen?: number }): ArrayBuffer; // eslint-disable-next-line @typescript-eslint/no-explicit-any getHash(format: any, options?: any): any { return this.shaObj.getHash(format, options); } /** * Sets the HMAC key for an eventual `getHMAC` call. Must be called immediately after jsSHA object instantiation. * Now deprecated in favor of setting the `hmacKey` at object instantiation. * * @param key The key used to calculate the HMAC * @param inputFormat The format of key (HEX, TEXT, B64, BYTES, ARRAYBUFFER, or UINT8ARRAY) as a string. * @param options Options in the form of { encoding?: "UTF8" | "UTF16BE" | "UTF16LE }. `encoding` is only for TEXT * and defaults to UTF8. */ setHMACKey(key: string, inputFormat: "TEXT", options?: { encoding?: EncodingType }): void; setHMACKey(key: string, inputFormat: "B64" | "HEX" | "BYTES"): void; setHMACKey(key: ArrayBuffer, inputFormat: "ARRAYBUFFER"): void; setHMACKey(key: Uint8Array, inputFormat: "UINT8ARRAY"): void; // eslint-disable-next-line @typescript-eslint/no-explicit-any setHMACKey(key: any, inputFormat: any, options?: any): void { this.shaObj.setHMACKey(key, inputFormat, options); } /** * Returns the the HMAC in the specified format using the key given by a previous `setHMACKey` call. Now deprecated * in favor of just calling `getHash`. * * @param format The desired output formatting (B64, HEX, BYTES, ARRAYBUFFER, or UINT8ARRAY)
* or UINT8ARRAY) as a string. * @param options Options in the form of { encoding?: "UTF8" | "UTF16BE" | "UTF16LE"; numRounds?: number }. * `encoding` is for only TEXT input (defaults to UTF8) and `numRounds` defaults to 1. * `numRounds` is not valid for any of the MAC or CSHAKE variants. * * If the variant supports HMAC, `options` may have an additional `hmacKey` key which must be in the form of
random_line_split
react-user-tour-tests.tsx
// Tests for type definitions for react-user-tour // Project: https://github.com/socialtables/react-user-tour // Definitions by: Carlo Cancellieri <https://github.com/ccancellieri> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped /// <reference types='react-dom' /> import * as React from 'react'; import * as ReactDOM from 'react-dom'; import ReactUserTour from 'react-user-tour'; interface State { tourStep:number; isTourActive:boolean; } class TestApp extends React.Component<{}, State> {
(p:any){ super(p); this.setState({ isTourActive:true, tourStep:1 }); } render() { const Tour = <ReactUserTour active={this.state.isTourActive} step={this.state.tourStep} onNext={(step:number) => this.setState({tourStep: step, isTourActive: true})} onBack={(step:number) => this.setState({tourStep: step, isTourActive: true})} onCancel={() => this.setState({tourStep: this.state.tourStep, isTourActive: false})} steps={[ { step: 1, selector: '.MyClass', title: <div>React User Tour</div>, body: <div>Provide a simple guided tour around a website utilizing css selectors.</div>, position: 'bottom' } ]} />; return <div id='test-app'> {Tour} </div>; } } ReactDOM.render(React.createElement(TestApp, {}), document.getElementById('test-app'));
constructor
identifier_name
react-user-tour-tests.tsx
// Tests for type definitions for react-user-tour // Project: https://github.com/socialtables/react-user-tour // Definitions by: Carlo Cancellieri <https://github.com/ccancellieri> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped /// <reference types='react-dom' /> import * as React from 'react'; import * as ReactDOM from 'react-dom'; import ReactUserTour from 'react-user-tour'; interface State { tourStep:number; isTourActive:boolean; } class TestApp extends React.Component<{}, State> { constructor(p:any)
render() { const Tour = <ReactUserTour active={this.state.isTourActive} step={this.state.tourStep} onNext={(step:number) => this.setState({tourStep: step, isTourActive: true})} onBack={(step:number) => this.setState({tourStep: step, isTourActive: true})} onCancel={() => this.setState({tourStep: this.state.tourStep, isTourActive: false})} steps={[ { step: 1, selector: '.MyClass', title: <div>React User Tour</div>, body: <div>Provide a simple guided tour around a website utilizing css selectors.</div>, position: 'bottom' } ]} />; return <div id='test-app'> {Tour} </div>; } } ReactDOM.render(React.createElement(TestApp, {}), document.getElementById('test-app'));
{ super(p); this.setState({ isTourActive:true, tourStep:1 }); }
identifier_body
react-user-tour-tests.tsx
// Tests for type definitions for react-user-tour // Project: https://github.com/socialtables/react-user-tour // Definitions by: Carlo Cancellieri <https://github.com/ccancellieri> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped /// <reference types='react-dom' /> import * as React from 'react'; import * as ReactDOM from 'react-dom'; import ReactUserTour from 'react-user-tour'; interface State { tourStep:number; isTourActive:boolean; } class TestApp extends React.Component<{}, State> { constructor(p:any){ super(p); this.setState({ isTourActive:true, tourStep:1 }); } render() { const Tour = <ReactUserTour active={this.state.isTourActive}
onBack={(step:number) => this.setState({tourStep: step, isTourActive: true})} onCancel={() => this.setState({tourStep: this.state.tourStep, isTourActive: false})} steps={[ { step: 1, selector: '.MyClass', title: <div>React User Tour</div>, body: <div>Provide a simple guided tour around a website utilizing css selectors.</div>, position: 'bottom' } ]} />; return <div id='test-app'> {Tour} </div>; } } ReactDOM.render(React.createElement(TestApp, {}), document.getElementById('test-app'));
step={this.state.tourStep} onNext={(step:number) => this.setState({tourStep: step, isTourActive: true})}
random_line_split
vector.rs
/* * Copyright 2018 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use std::marker::PhantomData; use std::mem::size_of; use std::slice::from_raw_parts; use std::str::from_utf8_unchecked; use endian_scalar::{EndianScalar, read_scalar}; use follow::Follow; use primitives::*; #[derive(Debug)] pub struct
<'a, T: 'a>(&'a [u8], usize, PhantomData<T>); impl<'a, T: 'a> Vector<'a, T> { #[inline(always)] pub fn new(buf: &'a [u8], loc: usize) -> Self { Vector { 0: buf, 1: loc, 2: PhantomData, } } #[inline(always)] pub fn len(&self) -> usize { read_scalar::<UOffsetT>(&self.0[self.1 as usize..]) as usize } } impl<'a, T: Follow<'a> + 'a> Vector<'a, T> { #[inline(always)] pub fn get(&self, idx: usize) -> T::Inner { debug_assert!(idx < read_scalar::<u32>(&self.0[self.1 as usize..]) as usize); let sz = size_of::<T>(); debug_assert!(sz > 0); T::follow(self.0, self.1 as usize + SIZE_UOFFSET + sz * idx) } } pub trait SafeSliceAccess {} impl<'a, T: SafeSliceAccess + 'a> Vector<'a, T> { pub fn safe_slice(self) -> &'a [T] { let buf = self.0; let loc = self.1; let sz = size_of::<T>(); debug_assert!(sz > 0); let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize; let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz]; let ptr = data_buf.as_ptr() as *const T; let s: &'a [T] = unsafe { from_raw_parts(ptr, len) }; s } } impl SafeSliceAccess for u8 {} impl SafeSliceAccess for i8 {} impl SafeSliceAccess for bool {} #[cfg(target_endian = "little")] mod le_safe_slice_impls { impl super::SafeSliceAccess for u16 {} impl super::SafeSliceAccess for u32 {} impl super::SafeSliceAccess for u64 {} impl super::SafeSliceAccess for i16 {} impl super::SafeSliceAccess for i32 {} impl super::SafeSliceAccess for i64 {} impl super::SafeSliceAccess for f32 {} impl super::SafeSliceAccess for f64 {} } pub use self::le_safe_slice_impls::*; pub fn follow_cast_ref<'a, T: Sized + 'a>(buf: &'a [u8], loc: usize) -> &'a T { let sz = size_of::<T>(); let buf = &buf[loc..loc + sz]; let ptr = buf.as_ptr() as *const T; unsafe { &*ptr } } impl<'a> Follow<'a> for &'a str { type Inner = &'a str; fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize; let slice = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len]; let s = unsafe { from_utf8_unchecked(slice) }; s } } fn follow_slice_helper<T>(buf: &[u8], loc: usize) -> &[T] { let sz = size_of::<T>(); debug_assert!(sz > 0); let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize; let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz]; let ptr = data_buf.as_ptr() as *const T; let s: &[T] = unsafe { from_raw_parts(ptr, len) }; s } /// Implement direct slice access if the host is little-endian. #[cfg(target_endian = "little")] impl<'a, T: EndianScalar> Follow<'a> for &'a [T] { type Inner = &'a [T]; fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { follow_slice_helper::<T>(buf, loc) } } /// Implement Follow for all possible Vectors that have Follow-able elements. impl<'a, T: Follow<'a> + 'a> Follow<'a> for Vector<'a, T> { type Inner = Vector<'a, T>; fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Vector::new(buf, loc) } }
Vector
identifier_name
vector.rs
/* * Copyright 2018 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use std::marker::PhantomData; use std::mem::size_of; use std::slice::from_raw_parts; use std::str::from_utf8_unchecked; use endian_scalar::{EndianScalar, read_scalar}; use follow::Follow; use primitives::*; #[derive(Debug)] pub struct Vector<'a, T: 'a>(&'a [u8], usize, PhantomData<T>); impl<'a, T: 'a> Vector<'a, T> { #[inline(always)] pub fn new(buf: &'a [u8], loc: usize) -> Self { Vector { 0: buf, 1: loc, 2: PhantomData, } } #[inline(always)] pub fn len(&self) -> usize { read_scalar::<UOffsetT>(&self.0[self.1 as usize..]) as usize } } impl<'a, T: Follow<'a> + 'a> Vector<'a, T> { #[inline(always)] pub fn get(&self, idx: usize) -> T::Inner { debug_assert!(idx < read_scalar::<u32>(&self.0[self.1 as usize..]) as usize); let sz = size_of::<T>(); debug_assert!(sz > 0); T::follow(self.0, self.1 as usize + SIZE_UOFFSET + sz * idx) } } pub trait SafeSliceAccess {} impl<'a, T: SafeSliceAccess + 'a> Vector<'a, T> { pub fn safe_slice(self) -> &'a [T] { let buf = self.0; let loc = self.1; let sz = size_of::<T>(); debug_assert!(sz > 0); let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize; let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz]; let ptr = data_buf.as_ptr() as *const T; let s: &'a [T] = unsafe { from_raw_parts(ptr, len) }; s } } impl SafeSliceAccess for u8 {} impl SafeSliceAccess for i8 {} impl SafeSliceAccess for bool {} #[cfg(target_endian = "little")] mod le_safe_slice_impls { impl super::SafeSliceAccess for u16 {} impl super::SafeSliceAccess for u32 {} impl super::SafeSliceAccess for u64 {} impl super::SafeSliceAccess for i16 {} impl super::SafeSliceAccess for i32 {} impl super::SafeSliceAccess for i64 {} impl super::SafeSliceAccess for f32 {} impl super::SafeSliceAccess for f64 {} } pub use self::le_safe_slice_impls::*; pub fn follow_cast_ref<'a, T: Sized + 'a>(buf: &'a [u8], loc: usize) -> &'a T
impl<'a> Follow<'a> for &'a str { type Inner = &'a str; fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize; let slice = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len]; let s = unsafe { from_utf8_unchecked(slice) }; s } } fn follow_slice_helper<T>(buf: &[u8], loc: usize) -> &[T] { let sz = size_of::<T>(); debug_assert!(sz > 0); let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize; let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz]; let ptr = data_buf.as_ptr() as *const T; let s: &[T] = unsafe { from_raw_parts(ptr, len) }; s } /// Implement direct slice access if the host is little-endian. #[cfg(target_endian = "little")] impl<'a, T: EndianScalar> Follow<'a> for &'a [T] { type Inner = &'a [T]; fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { follow_slice_helper::<T>(buf, loc) } } /// Implement Follow for all possible Vectors that have Follow-able elements. impl<'a, T: Follow<'a> + 'a> Follow<'a> for Vector<'a, T> { type Inner = Vector<'a, T>; fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Vector::new(buf, loc) } }
{ let sz = size_of::<T>(); let buf = &buf[loc..loc + sz]; let ptr = buf.as_ptr() as *const T; unsafe { &*ptr } }
identifier_body
vector.rs
/* * Copyright 2018 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use std::marker::PhantomData; use std::mem::size_of; use std::slice::from_raw_parts; use std::str::from_utf8_unchecked;
use primitives::*; #[derive(Debug)] pub struct Vector<'a, T: 'a>(&'a [u8], usize, PhantomData<T>); impl<'a, T: 'a> Vector<'a, T> { #[inline(always)] pub fn new(buf: &'a [u8], loc: usize) -> Self { Vector { 0: buf, 1: loc, 2: PhantomData, } } #[inline(always)] pub fn len(&self) -> usize { read_scalar::<UOffsetT>(&self.0[self.1 as usize..]) as usize } } impl<'a, T: Follow<'a> + 'a> Vector<'a, T> { #[inline(always)] pub fn get(&self, idx: usize) -> T::Inner { debug_assert!(idx < read_scalar::<u32>(&self.0[self.1 as usize..]) as usize); let sz = size_of::<T>(); debug_assert!(sz > 0); T::follow(self.0, self.1 as usize + SIZE_UOFFSET + sz * idx) } } pub trait SafeSliceAccess {} impl<'a, T: SafeSliceAccess + 'a> Vector<'a, T> { pub fn safe_slice(self) -> &'a [T] { let buf = self.0; let loc = self.1; let sz = size_of::<T>(); debug_assert!(sz > 0); let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize; let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz]; let ptr = data_buf.as_ptr() as *const T; let s: &'a [T] = unsafe { from_raw_parts(ptr, len) }; s } } impl SafeSliceAccess for u8 {} impl SafeSliceAccess for i8 {} impl SafeSliceAccess for bool {} #[cfg(target_endian = "little")] mod le_safe_slice_impls { impl super::SafeSliceAccess for u16 {} impl super::SafeSliceAccess for u32 {} impl super::SafeSliceAccess for u64 {} impl super::SafeSliceAccess for i16 {} impl super::SafeSliceAccess for i32 {} impl super::SafeSliceAccess for i64 {} impl super::SafeSliceAccess for f32 {} impl super::SafeSliceAccess for f64 {} } pub use self::le_safe_slice_impls::*; pub fn follow_cast_ref<'a, T: Sized + 'a>(buf: &'a [u8], loc: usize) -> &'a T { let sz = size_of::<T>(); let buf = &buf[loc..loc + sz]; let ptr = buf.as_ptr() as *const T; unsafe { &*ptr } } impl<'a> Follow<'a> for &'a str { type Inner = &'a str; fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize; let slice = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len]; let s = unsafe { from_utf8_unchecked(slice) }; s } } fn follow_slice_helper<T>(buf: &[u8], loc: usize) -> &[T] { let sz = size_of::<T>(); debug_assert!(sz > 0); let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize; let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz]; let ptr = data_buf.as_ptr() as *const T; let s: &[T] = unsafe { from_raw_parts(ptr, len) }; s } /// Implement direct slice access if the host is little-endian. #[cfg(target_endian = "little")] impl<'a, T: EndianScalar> Follow<'a> for &'a [T] { type Inner = &'a [T]; fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { follow_slice_helper::<T>(buf, loc) } } /// Implement Follow for all possible Vectors that have Follow-able elements. impl<'a, T: Follow<'a> + 'a> Follow<'a> for Vector<'a, T> { type Inner = Vector<'a, T>; fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Vector::new(buf, loc) } }
use endian_scalar::{EndianScalar, read_scalar}; use follow::Follow;
random_line_split
fakepuppy.py
#!/usr/bin/python import sys import getopt import time import os
DATA_DIR='/local/devel/guppy/testing/' opts, args = getopt.getopt(sys.argv[1:], 'c:t') transfer = False listdir = False for opt, optarg in opts: if opt == '-c': if optarg == 'get' or optarg == 'put': transfer = True if optarg == 'dir': listdir = True if optarg == 'cancel': os.system("pkill -f 'fakepuppy.py -c get.*'") if optarg == 'size': size = True if transfer: inc = 10 percent = 0.0 for i in xrange(100/inc): percent = percent + inc print >> sys.stderr, "\r%6.2f%%, %5.2f Mbits/s, %02d:%02d:%02d elapsed, %d:%02d:%02d remaining" % (percent, 2.2, 1, 1, 1, 2, 2, 2), time.sleep(0.5) print elif listdir: listing = open(DATA_DIR + 'puppy-listdir.txt') for line in listing: print line, listing.close() elif size: print 'Total %10u kiB %7u MiB %4u GiB' % (0, 0, 120) print 'Free %10u kiB %7u MiB %4u GiB' % (0, 500, 0) else: print opts, '|', args,
random_line_split
fakepuppy.py
#!/usr/bin/python import sys import getopt import time import os DATA_DIR='/local/devel/guppy/testing/' opts, args = getopt.getopt(sys.argv[1:], 'c:t') transfer = False listdir = False for opt, optarg in opts:
if transfer: inc = 10 percent = 0.0 for i in xrange(100/inc): percent = percent + inc print >> sys.stderr, "\r%6.2f%%, %5.2f Mbits/s, %02d:%02d:%02d elapsed, %d:%02d:%02d remaining" % (percent, 2.2, 1, 1, 1, 2, 2, 2), time.sleep(0.5) print elif listdir: listing = open(DATA_DIR + 'puppy-listdir.txt') for line in listing: print line, listing.close() elif size: print 'Total %10u kiB %7u MiB %4u GiB' % (0, 0, 120) print 'Free %10u kiB %7u MiB %4u GiB' % (0, 500, 0) else: print opts, '|', args,
if opt == '-c': if optarg == 'get' or optarg == 'put': transfer = True if optarg == 'dir': listdir = True if optarg == 'cancel': os.system("pkill -f 'fakepuppy.py -c get.*'") if optarg == 'size': size = True
conditional_block
middleware.py
try: from django.conf import settings STRACKS_CONNECTOR = settings.STRACKS_CONNECTOR except (ImportError, AttributeError): STRACKS_CONNECTOR = None STRACKS_API = None from stracks_api.api import API from stracks_api import client import django.http STRACKS_API = None if STRACKS_CONNECTOR: STRACKS_API = API() class StracksMiddleware(object): def process_request(self, request): if not STRACKS_API: return ## ## get useragent, ip, path ## fetch session, create one if necessary ## create request, store it in local thread storage useragent = request.META.get('HTTP_USER_AGENT', 'unknown') ip = request.META.get('REMOTE_ADDR', '<none>') path = request.get_full_path() sess = request.session.get('stracks-session') if sess is None: sess = STRACKS_API.session() request.session['stracks-session'] = sess request = sess.request(ip, useragent, path) client.set_request(request) def process_response(self, request, response):
client.set_request(None) return response def process_exception(self, request, exception): if not STRACKS_API: return ## do not log 404 exceptions, see issue #356 if isinstance(exception, django.http.Http404): return client.exception("Crash: %s" % exception)
if not STRACKS_API: return response r = client.get_request() if r: if not request.user.is_anonymous(): ## if there's an active user then he owns ## the request. We need to map it to an ## entity from django.utils.importlib import import_module ueb = getattr(settings, 'USER_ENTITY_BUILDER', None) if ueb: ## XXX error handling modstr, func = settings.USER_ENTITY_BUILDER.rsplit('.', 1) mod = import_module(modstr) f = getattr(mod, func) r.set_owner(f(request.user)) r.end()
identifier_body
middleware.py
try: from django.conf import settings STRACKS_CONNECTOR = settings.STRACKS_CONNECTOR except (ImportError, AttributeError): STRACKS_CONNECTOR = None STRACKS_API = None from stracks_api.api import API from stracks_api import client import django.http STRACKS_API = None if STRACKS_CONNECTOR: STRACKS_API = API() class StracksMiddleware(object): def process_request(self, request): if not STRACKS_API: return ## ## get useragent, ip, path ## fetch session, create one if necessary ## create request, store it in local thread storage useragent = request.META.get('HTTP_USER_AGENT', 'unknown') ip = request.META.get('REMOTE_ADDR', '<none>') path = request.get_full_path() sess = request.session.get('stracks-session') if sess is None: sess = STRACKS_API.session() request.session['stracks-session'] = sess request = sess.request(ip, useragent, path) client.set_request(request) def process_response(self, request, response): if not STRACKS_API: return response r = client.get_request() if r: if not request.user.is_anonymous(): ## if there's an active user then he owns ## the request. We need to map it to an ## entity from django.utils.importlib import import_module ueb = getattr(settings, 'USER_ENTITY_BUILDER', None) if ueb: ## XXX error handling modstr, func = settings.USER_ENTITY_BUILDER.rsplit('.', 1) mod = import_module(modstr) f = getattr(mod, func) r.set_owner(f(request.user)) r.end() client.set_request(None) return response def process_exception(self, request, exception): if not STRACKS_API: return ## do not log 404 exceptions, see issue #356 if isinstance(exception, django.http.Http404): return
client.exception("Crash: %s" % exception)
random_line_split
middleware.py
try: from django.conf import settings STRACKS_CONNECTOR = settings.STRACKS_CONNECTOR except (ImportError, AttributeError): STRACKS_CONNECTOR = None STRACKS_API = None from stracks_api.api import API from stracks_api import client import django.http STRACKS_API = None if STRACKS_CONNECTOR: STRACKS_API = API() class StracksMiddleware(object): def process_request(self, request): if not STRACKS_API: return ## ## get useragent, ip, path ## fetch session, create one if necessary ## create request, store it in local thread storage useragent = request.META.get('HTTP_USER_AGENT', 'unknown') ip = request.META.get('REMOTE_ADDR', '<none>') path = request.get_full_path() sess = request.session.get('stracks-session') if sess is None:
request = sess.request(ip, useragent, path) client.set_request(request) def process_response(self, request, response): if not STRACKS_API: return response r = client.get_request() if r: if not request.user.is_anonymous(): ## if there's an active user then he owns ## the request. We need to map it to an ## entity from django.utils.importlib import import_module ueb = getattr(settings, 'USER_ENTITY_BUILDER', None) if ueb: ## XXX error handling modstr, func = settings.USER_ENTITY_BUILDER.rsplit('.', 1) mod = import_module(modstr) f = getattr(mod, func) r.set_owner(f(request.user)) r.end() client.set_request(None) return response def process_exception(self, request, exception): if not STRACKS_API: return ## do not log 404 exceptions, see issue #356 if isinstance(exception, django.http.Http404): return client.exception("Crash: %s" % exception)
sess = STRACKS_API.session() request.session['stracks-session'] = sess
conditional_block
middleware.py
try: from django.conf import settings STRACKS_CONNECTOR = settings.STRACKS_CONNECTOR except (ImportError, AttributeError): STRACKS_CONNECTOR = None STRACKS_API = None from stracks_api.api import API from stracks_api import client import django.http STRACKS_API = None if STRACKS_CONNECTOR: STRACKS_API = API() class
(object): def process_request(self, request): if not STRACKS_API: return ## ## get useragent, ip, path ## fetch session, create one if necessary ## create request, store it in local thread storage useragent = request.META.get('HTTP_USER_AGENT', 'unknown') ip = request.META.get('REMOTE_ADDR', '<none>') path = request.get_full_path() sess = request.session.get('stracks-session') if sess is None: sess = STRACKS_API.session() request.session['stracks-session'] = sess request = sess.request(ip, useragent, path) client.set_request(request) def process_response(self, request, response): if not STRACKS_API: return response r = client.get_request() if r: if not request.user.is_anonymous(): ## if there's an active user then he owns ## the request. We need to map it to an ## entity from django.utils.importlib import import_module ueb = getattr(settings, 'USER_ENTITY_BUILDER', None) if ueb: ## XXX error handling modstr, func = settings.USER_ENTITY_BUILDER.rsplit('.', 1) mod = import_module(modstr) f = getattr(mod, func) r.set_owner(f(request.user)) r.end() client.set_request(None) return response def process_exception(self, request, exception): if not STRACKS_API: return ## do not log 404 exceptions, see issue #356 if isinstance(exception, django.http.Http404): return client.exception("Crash: %s" % exception)
StracksMiddleware
identifier_name
MARock.py
import os as os import numpy as np import scipy as sp from pathlib import Path from openpnm.utils import logging, Project from openpnm.network import GenericNetwork from openpnm.io import GenericIO from openpnm.topotools import trim logger = logging.getLogger(__name__) class
(GenericIO): r""" 3DMA-Rock is a network extraction algorithm developed by Brent Lindquist and his group It uses Medial Axis thinning to find the skeleton of the pore space, then extracts geometrical features such as pore volume and throat cross-sectional area. [1] Lindquist, W. Brent, S. M. Lee, W. Oh, A. B. Venkatarangan, H. Shin, and M. Prodanovic. "3DMA-Rock: A software package for automated analysis of rock pore structure in 3-D computed microtomography images." SUNY Stony Brook (2005). """ @classmethod def load(cls, path, voxel_size=1, project=None): r""" Load data from a 3DMA-Rock extracted network. This format consists of two files: 'rockname.np2th' and 'rockname.th2pn'. They should be stored together in a folder which is referred to by the path argument. These files are binary and therefore not human readable. Parameters ---------- path : string The location of the 'np2th' and 'th2np' files. This can be an absolute path or relative to the current working directory. network : OpenPNM Network Object If an Network object is recieved, this method will add new data to it but NOT overwrite anything that already exists. This can be used to append data from different sources. voxel_size : scalar The resolution of the image on which 3DMA-Rock was run, in terms of the linear length of eac voxel. The default is 1. This is used to scale the voxel counts to actual dimension. It is recommended that this value be in SI units [m] to work well with OpenPNM. project : OpenPNM Project object A GenericNetwork is created and added to the specified Project. If no Project is supplied then one will be created and returned. """ net = {} path = Path(path) path = path.resolve() for file in os.listdir(path): if file.endswith(".np2th"): np2th_file = os.path.join(path, file) elif file.endswith(".th2np"): th2np_file = os.path.join(path, file) with open(np2th_file, mode='rb') as f: [Np, Nt] = np.fromfile(file=f, count=2, dtype='u4') net['pore.boundary_type'] = sp.ndarray([Np, ], int) net['throat.conns'] = np.ones([Nt, 2], int)*(-1) net['pore.coordination'] = sp.ndarray([Np, ], int) net['pore.ID_number'] = sp.ndarray([Np, ], int) for i in range(0, Np): ID = np.fromfile(file=f, count=1, dtype='u4') net['pore.ID_number'][i] = ID net['pore.boundary_type'][i] = np.fromfile(file=f, count=1, dtype='u1') z = np.fromfile(file=f, count=1, dtype='u4')[0] net['pore.coordination'][i] = z att_pores = np.fromfile(file=f, count=z, dtype='u4') att_throats = np.fromfile(file=f, count=z, dtype='u4') for j in range(0, len(att_throats)): t = att_throats[j] - 1 p = att_pores[j] - 1 net['throat.conns'][t] = [i, p] net['throat.conns'] = np.sort(net['throat.conns'], axis=1) net['pore.volume'] = np.fromfile(file=f, count=Np, dtype='u4') nx = np.fromfile(file=f, count=1, dtype='u4') nxy = np.fromfile(file=f, count=1, dtype='u4') pos = np.fromfile(file=f, count=Np, dtype='u4') ny = nxy/nx ni = np.mod(pos, nx) nj = np.mod(np.floor(pos/nx), ny) nk = np.floor(np.floor(pos/nx)/ny) net['pore.coords'] = np.array([ni, nj, nk]).T with open(th2np_file, mode='rb') as f: Nt = np.fromfile(file=f, count=1, dtype='u4')[0] net['throat.area'] = np.ones([Nt, ], dtype=int)*(-1) for i in range(0, Nt): ID = np.fromfile(file=f, count=1, dtype='u4') net['throat.area'][i] = np.fromfile(file=f, count=1, dtype='f4') # numvox = np.fromfile(file=f, count=1, dtype='u4') att_pores = np.fromfile(file=f, count=2, dtype='u4') nx = np.fromfile(file=f, count=1, dtype='u4') nxy = np.fromfile(file=f, count=1, dtype='u4') pos = np.fromfile(file=f, count=Nt, dtype='u4') ny = nxy/nx ni = np.mod(pos, nx) nj = np.mod(np.floor(pos/nx), ny) nk = np.floor(np.floor(pos/nx)/ny) net['throat.coords'] = np.array([ni, nj, nk]).T net['pore.internal'] = net['pore.boundary_type'] == 0 # Convert voxel area and volume to actual dimensions net['throat.area'] = (voxel_size**2)*net['throat.area'] net['pore.volume'] = (voxel_size**3)*net['pore.volume'] if project is None: project = Project(name=path) network = GenericNetwork(project=project) network = cls._update_network(network=network, net=net) # Trim headless throats before returning ind = np.where(network['throat.conns'][:, 0] == -1)[0] trim(network=network, throats=ind) return project
MARock
identifier_name
MARock.py
import os as os import numpy as np import scipy as sp from pathlib import Path from openpnm.utils import logging, Project from openpnm.network import GenericNetwork from openpnm.io import GenericIO from openpnm.topotools import trim logger = logging.getLogger(__name__) class MARock(GenericIO): r""" 3DMA-Rock is a network extraction algorithm developed by Brent Lindquist and his group It uses Medial Axis thinning to find the skeleton of the pore space, then extracts geometrical features such as pore volume and throat cross-sectional area. [1] Lindquist, W. Brent, S. M. Lee, W. Oh, A. B. Venkatarangan, H. Shin, and M. Prodanovic. "3DMA-Rock: A software package for automated analysis of rock pore structure in 3-D computed microtomography images." SUNY Stony Brook (2005). """ @classmethod def load(cls, path, voxel_size=1, project=None): r""" Load data from a 3DMA-Rock extracted network. This format consists of two files: 'rockname.np2th' and 'rockname.th2pn'. They should be stored together in a folder which is referred to by the path argument. These files are binary and therefore not human readable. Parameters ---------- path : string The location of the 'np2th' and 'th2np' files. This can be an absolute path or relative to the current working directory. network : OpenPNM Network Object If an Network object is recieved, this method will add new data to it but NOT overwrite anything that already exists. This can be used to append data from different sources. voxel_size : scalar The resolution of the image on which 3DMA-Rock was run, in terms of the linear length of eac voxel. The default is 1. This is used to scale the voxel counts to actual dimension. It is recommended that this value be in SI units [m] to work well with OpenPNM. project : OpenPNM Project object A GenericNetwork is created and added to the specified Project. If no Project is supplied then one will be created and returned. """ net = {} path = Path(path) path = path.resolve() for file in os.listdir(path):
with open(np2th_file, mode='rb') as f: [Np, Nt] = np.fromfile(file=f, count=2, dtype='u4') net['pore.boundary_type'] = sp.ndarray([Np, ], int) net['throat.conns'] = np.ones([Nt, 2], int)*(-1) net['pore.coordination'] = sp.ndarray([Np, ], int) net['pore.ID_number'] = sp.ndarray([Np, ], int) for i in range(0, Np): ID = np.fromfile(file=f, count=1, dtype='u4') net['pore.ID_number'][i] = ID net['pore.boundary_type'][i] = np.fromfile(file=f, count=1, dtype='u1') z = np.fromfile(file=f, count=1, dtype='u4')[0] net['pore.coordination'][i] = z att_pores = np.fromfile(file=f, count=z, dtype='u4') att_throats = np.fromfile(file=f, count=z, dtype='u4') for j in range(0, len(att_throats)): t = att_throats[j] - 1 p = att_pores[j] - 1 net['throat.conns'][t] = [i, p] net['throat.conns'] = np.sort(net['throat.conns'], axis=1) net['pore.volume'] = np.fromfile(file=f, count=Np, dtype='u4') nx = np.fromfile(file=f, count=1, dtype='u4') nxy = np.fromfile(file=f, count=1, dtype='u4') pos = np.fromfile(file=f, count=Np, dtype='u4') ny = nxy/nx ni = np.mod(pos, nx) nj = np.mod(np.floor(pos/nx), ny) nk = np.floor(np.floor(pos/nx)/ny) net['pore.coords'] = np.array([ni, nj, nk]).T with open(th2np_file, mode='rb') as f: Nt = np.fromfile(file=f, count=1, dtype='u4')[0] net['throat.area'] = np.ones([Nt, ], dtype=int)*(-1) for i in range(0, Nt): ID = np.fromfile(file=f, count=1, dtype='u4') net['throat.area'][i] = np.fromfile(file=f, count=1, dtype='f4') # numvox = np.fromfile(file=f, count=1, dtype='u4') att_pores = np.fromfile(file=f, count=2, dtype='u4') nx = np.fromfile(file=f, count=1, dtype='u4') nxy = np.fromfile(file=f, count=1, dtype='u4') pos = np.fromfile(file=f, count=Nt, dtype='u4') ny = nxy/nx ni = np.mod(pos, nx) nj = np.mod(np.floor(pos/nx), ny) nk = np.floor(np.floor(pos/nx)/ny) net['throat.coords'] = np.array([ni, nj, nk]).T net['pore.internal'] = net['pore.boundary_type'] == 0 # Convert voxel area and volume to actual dimensions net['throat.area'] = (voxel_size**2)*net['throat.area'] net['pore.volume'] = (voxel_size**3)*net['pore.volume'] if project is None: project = Project(name=path) network = GenericNetwork(project=project) network = cls._update_network(network=network, net=net) # Trim headless throats before returning ind = np.where(network['throat.conns'][:, 0] == -1)[0] trim(network=network, throats=ind) return project
if file.endswith(".np2th"): np2th_file = os.path.join(path, file) elif file.endswith(".th2np"): th2np_file = os.path.join(path, file)
conditional_block
MARock.py
import os as os import numpy as np import scipy as sp from pathlib import Path from openpnm.utils import logging, Project from openpnm.network import GenericNetwork from openpnm.io import GenericIO from openpnm.topotools import trim logger = logging.getLogger(__name__) class MARock(GenericIO): r""" 3DMA-Rock is a network extraction algorithm developed by Brent Lindquist and his group It uses Medial Axis thinning to find the skeleton of the pore space, then extracts geometrical features such as pore volume and throat cross-sectional area. [1] Lindquist, W. Brent, S. M. Lee, W. Oh, A. B. Venkatarangan, H. Shin, and M. Prodanovic. "3DMA-Rock: A software package for automated analysis of rock pore structure in 3-D computed microtomography images." SUNY Stony Brook (2005). """ @classmethod def load(cls, path, voxel_size=1, project=None):
scale the voxel counts to actual dimension. It is recommended that this value be in SI units [m] to work well with OpenPNM. project : OpenPNM Project object A GenericNetwork is created and added to the specified Project. If no Project is supplied then one will be created and returned. """ net = {} path = Path(path) path = path.resolve() for file in os.listdir(path): if file.endswith(".np2th"): np2th_file = os.path.join(path, file) elif file.endswith(".th2np"): th2np_file = os.path.join(path, file) with open(np2th_file, mode='rb') as f: [Np, Nt] = np.fromfile(file=f, count=2, dtype='u4') net['pore.boundary_type'] = sp.ndarray([Np, ], int) net['throat.conns'] = np.ones([Nt, 2], int)*(-1) net['pore.coordination'] = sp.ndarray([Np, ], int) net['pore.ID_number'] = sp.ndarray([Np, ], int) for i in range(0, Np): ID = np.fromfile(file=f, count=1, dtype='u4') net['pore.ID_number'][i] = ID net['pore.boundary_type'][i] = np.fromfile(file=f, count=1, dtype='u1') z = np.fromfile(file=f, count=1, dtype='u4')[0] net['pore.coordination'][i] = z att_pores = np.fromfile(file=f, count=z, dtype='u4') att_throats = np.fromfile(file=f, count=z, dtype='u4') for j in range(0, len(att_throats)): t = att_throats[j] - 1 p = att_pores[j] - 1 net['throat.conns'][t] = [i, p] net['throat.conns'] = np.sort(net['throat.conns'], axis=1) net['pore.volume'] = np.fromfile(file=f, count=Np, dtype='u4') nx = np.fromfile(file=f, count=1, dtype='u4') nxy = np.fromfile(file=f, count=1, dtype='u4') pos = np.fromfile(file=f, count=Np, dtype='u4') ny = nxy/nx ni = np.mod(pos, nx) nj = np.mod(np.floor(pos/nx), ny) nk = np.floor(np.floor(pos/nx)/ny) net['pore.coords'] = np.array([ni, nj, nk]).T with open(th2np_file, mode='rb') as f: Nt = np.fromfile(file=f, count=1, dtype='u4')[0] net['throat.area'] = np.ones([Nt, ], dtype=int)*(-1) for i in range(0, Nt): ID = np.fromfile(file=f, count=1, dtype='u4') net['throat.area'][i] = np.fromfile(file=f, count=1, dtype='f4') # numvox = np.fromfile(file=f, count=1, dtype='u4') att_pores = np.fromfile(file=f, count=2, dtype='u4') nx = np.fromfile(file=f, count=1, dtype='u4') nxy = np.fromfile(file=f, count=1, dtype='u4') pos = np.fromfile(file=f, count=Nt, dtype='u4') ny = nxy/nx ni = np.mod(pos, nx) nj = np.mod(np.floor(pos/nx), ny) nk = np.floor(np.floor(pos/nx)/ny) net['throat.coords'] = np.array([ni, nj, nk]).T net['pore.internal'] = net['pore.boundary_type'] == 0 # Convert voxel area and volume to actual dimensions net['throat.area'] = (voxel_size**2)*net['throat.area'] net['pore.volume'] = (voxel_size**3)*net['pore.volume'] if project is None: project = Project(name=path) network = GenericNetwork(project=project) network = cls._update_network(network=network, net=net) # Trim headless throats before returning ind = np.where(network['throat.conns'][:, 0] == -1)[0] trim(network=network, throats=ind) return project
r""" Load data from a 3DMA-Rock extracted network. This format consists of two files: 'rockname.np2th' and 'rockname.th2pn'. They should be stored together in a folder which is referred to by the path argument. These files are binary and therefore not human readable. Parameters ---------- path : string The location of the 'np2th' and 'th2np' files. This can be an absolute path or relative to the current working directory. network : OpenPNM Network Object If an Network object is recieved, this method will add new data to it but NOT overwrite anything that already exists. This can be used to append data from different sources. voxel_size : scalar The resolution of the image on which 3DMA-Rock was run, in terms of the linear length of eac voxel. The default is 1. This is used to
identifier_body
MARock.py
import os as os import numpy as np import scipy as sp from pathlib import Path from openpnm.utils import logging, Project from openpnm.network import GenericNetwork from openpnm.io import GenericIO from openpnm.topotools import trim logger = logging.getLogger(__name__)
class MARock(GenericIO): r""" 3DMA-Rock is a network extraction algorithm developed by Brent Lindquist and his group It uses Medial Axis thinning to find the skeleton of the pore space, then extracts geometrical features such as pore volume and throat cross-sectional area. [1] Lindquist, W. Brent, S. M. Lee, W. Oh, A. B. Venkatarangan, H. Shin, and M. Prodanovic. "3DMA-Rock: A software package for automated analysis of rock pore structure in 3-D computed microtomography images." SUNY Stony Brook (2005). """ @classmethod def load(cls, path, voxel_size=1, project=None): r""" Load data from a 3DMA-Rock extracted network. This format consists of two files: 'rockname.np2th' and 'rockname.th2pn'. They should be stored together in a folder which is referred to by the path argument. These files are binary and therefore not human readable. Parameters ---------- path : string The location of the 'np2th' and 'th2np' files. This can be an absolute path or relative to the current working directory. network : OpenPNM Network Object If an Network object is recieved, this method will add new data to it but NOT overwrite anything that already exists. This can be used to append data from different sources. voxel_size : scalar The resolution of the image on which 3DMA-Rock was run, in terms of the linear length of eac voxel. The default is 1. This is used to scale the voxel counts to actual dimension. It is recommended that this value be in SI units [m] to work well with OpenPNM. project : OpenPNM Project object A GenericNetwork is created and added to the specified Project. If no Project is supplied then one will be created and returned. """ net = {} path = Path(path) path = path.resolve() for file in os.listdir(path): if file.endswith(".np2th"): np2th_file = os.path.join(path, file) elif file.endswith(".th2np"): th2np_file = os.path.join(path, file) with open(np2th_file, mode='rb') as f: [Np, Nt] = np.fromfile(file=f, count=2, dtype='u4') net['pore.boundary_type'] = sp.ndarray([Np, ], int) net['throat.conns'] = np.ones([Nt, 2], int)*(-1) net['pore.coordination'] = sp.ndarray([Np, ], int) net['pore.ID_number'] = sp.ndarray([Np, ], int) for i in range(0, Np): ID = np.fromfile(file=f, count=1, dtype='u4') net['pore.ID_number'][i] = ID net['pore.boundary_type'][i] = np.fromfile(file=f, count=1, dtype='u1') z = np.fromfile(file=f, count=1, dtype='u4')[0] net['pore.coordination'][i] = z att_pores = np.fromfile(file=f, count=z, dtype='u4') att_throats = np.fromfile(file=f, count=z, dtype='u4') for j in range(0, len(att_throats)): t = att_throats[j] - 1 p = att_pores[j] - 1 net['throat.conns'][t] = [i, p] net['throat.conns'] = np.sort(net['throat.conns'], axis=1) net['pore.volume'] = np.fromfile(file=f, count=Np, dtype='u4') nx = np.fromfile(file=f, count=1, dtype='u4') nxy = np.fromfile(file=f, count=1, dtype='u4') pos = np.fromfile(file=f, count=Np, dtype='u4') ny = nxy/nx ni = np.mod(pos, nx) nj = np.mod(np.floor(pos/nx), ny) nk = np.floor(np.floor(pos/nx)/ny) net['pore.coords'] = np.array([ni, nj, nk]).T with open(th2np_file, mode='rb') as f: Nt = np.fromfile(file=f, count=1, dtype='u4')[0] net['throat.area'] = np.ones([Nt, ], dtype=int)*(-1) for i in range(0, Nt): ID = np.fromfile(file=f, count=1, dtype='u4') net['throat.area'][i] = np.fromfile(file=f, count=1, dtype='f4') # numvox = np.fromfile(file=f, count=1, dtype='u4') att_pores = np.fromfile(file=f, count=2, dtype='u4') nx = np.fromfile(file=f, count=1, dtype='u4') nxy = np.fromfile(file=f, count=1, dtype='u4') pos = np.fromfile(file=f, count=Nt, dtype='u4') ny = nxy/nx ni = np.mod(pos, nx) nj = np.mod(np.floor(pos/nx), ny) nk = np.floor(np.floor(pos/nx)/ny) net['throat.coords'] = np.array([ni, nj, nk]).T net['pore.internal'] = net['pore.boundary_type'] == 0 # Convert voxel area and volume to actual dimensions net['throat.area'] = (voxel_size**2)*net['throat.area'] net['pore.volume'] = (voxel_size**3)*net['pore.volume'] if project is None: project = Project(name=path) network = GenericNetwork(project=project) network = cls._update_network(network=network, net=net) # Trim headless throats before returning ind = np.where(network['throat.conns'][:, 0] == -1)[0] trim(network=network, throats=ind) return project
random_line_split
sudokus-tests.ts
import { solve, ProgressFn, Cell } from 'sudokus'; const onProgress: ProgressFn = (cell: Cell[][]) => { cell[0][0].fixed; cell[0][0].value; };
[ [0, 0, 0, 2, 9, 0, 1, 0, 0], [6, 0, 0, 5, 0, 1, 0, 7, 0], [0, 0, 0, 0, 0, 0, 0, 3, 4], [0, 0, 0, 0, 0, 0, 9, 4, 0], [4, 5, 0, 3, 0, 0, 0, 6, 2], [2, 0, 9, 0, 0, 4, 3, 1, 0], [0, 2, 0, 0, 0, 0, 4, 9, 0], [0, 0, 6, 0, 0, 8, 0, 0, 0], [0, 4, 3, 0, 2, 0, 0, 8, 7], ], { onProgress } );
solve(
random_line_split
BodyDOMSource.ts
import xs, {Stream, MemoryStream} from 'xstream'; import {adapt} from '@cycle/run/lib/adapt'; import {DevToolEnabledSource} from '@cycle/run'; import {EventsFnOptions, DOMSource} from './DOMSource'; import {fromEvent} from './fromEvent'; export class BodyDOMSource { constructor(private _name: string) {} public select(selector: string): BodyDOMSource { // This functionality is still undefined/undecided. return this; } public elements(): MemoryStream<Array<HTMLBodyElement>> { const out: DevToolEnabledSource & MemoryStream<Array<HTMLBodyElement>> = adapt(xs.of([document.body])); out._isCycleSource = this._name; return out; } public element(): MemoryStream<HTMLBodyElement> {
out._isCycleSource = this._name; return out; } public events<K extends keyof HTMLBodyElementEventMap>( eventType: K, options?: EventsFnOptions, bubbles?: boolean ): Stream<HTMLBodyElementEventMap[K]>; public events( eventType: string, options: EventsFnOptions = {}, bubbles?: boolean ): Stream<Event> { let stream: Stream<Event>; stream = fromEvent( document.body, eventType, options.useCapture, options.preventDefault ); const out: DevToolEnabledSource & Stream<Event> = adapt(stream); out._isCycleSource = this._name; return out; } }
const out: DevToolEnabledSource & MemoryStream<HTMLBodyElement> = adapt( xs.of(document.body) );
random_line_split
BodyDOMSource.ts
import xs, {Stream, MemoryStream} from 'xstream'; import {adapt} from '@cycle/run/lib/adapt'; import {DevToolEnabledSource} from '@cycle/run'; import {EventsFnOptions, DOMSource} from './DOMSource'; import {fromEvent} from './fromEvent'; export class BodyDOMSource { constructor(private _name: string) {} public select(selector: string): BodyDOMSource { // This functionality is still undefined/undecided. return this; } public
(): MemoryStream<Array<HTMLBodyElement>> { const out: DevToolEnabledSource & MemoryStream<Array<HTMLBodyElement>> = adapt(xs.of([document.body])); out._isCycleSource = this._name; return out; } public element(): MemoryStream<HTMLBodyElement> { const out: DevToolEnabledSource & MemoryStream<HTMLBodyElement> = adapt( xs.of(document.body) ); out._isCycleSource = this._name; return out; } public events<K extends keyof HTMLBodyElementEventMap>( eventType: K, options?: EventsFnOptions, bubbles?: boolean ): Stream<HTMLBodyElementEventMap[K]>; public events( eventType: string, options: EventsFnOptions = {}, bubbles?: boolean ): Stream<Event> { let stream: Stream<Event>; stream = fromEvent( document.body, eventType, options.useCapture, options.preventDefault ); const out: DevToolEnabledSource & Stream<Event> = adapt(stream); out._isCycleSource = this._name; return out; } }
elements
identifier_name
BodyDOMSource.ts
import xs, {Stream, MemoryStream} from 'xstream'; import {adapt} from '@cycle/run/lib/adapt'; import {DevToolEnabledSource} from '@cycle/run'; import {EventsFnOptions, DOMSource} from './DOMSource'; import {fromEvent} from './fromEvent'; export class BodyDOMSource { constructor(private _name: string) {} public select(selector: string): BodyDOMSource { // This functionality is still undefined/undecided. return this; } public elements(): MemoryStream<Array<HTMLBodyElement>> { const out: DevToolEnabledSource & MemoryStream<Array<HTMLBodyElement>> = adapt(xs.of([document.body])); out._isCycleSource = this._name; return out; } public element(): MemoryStream<HTMLBodyElement>
public events<K extends keyof HTMLBodyElementEventMap>( eventType: K, options?: EventsFnOptions, bubbles?: boolean ): Stream<HTMLBodyElementEventMap[K]>; public events( eventType: string, options: EventsFnOptions = {}, bubbles?: boolean ): Stream<Event> { let stream: Stream<Event>; stream = fromEvent( document.body, eventType, options.useCapture, options.preventDefault ); const out: DevToolEnabledSource & Stream<Event> = adapt(stream); out._isCycleSource = this._name; return out; } }
{ const out: DevToolEnabledSource & MemoryStream<HTMLBodyElement> = adapt( xs.of(document.body) ); out._isCycleSource = this._name; return out; }
identifier_body
app.js
var authrocket = new AuthRocket({ jsUrl: 'https://tessellate.e1.loginrocket.com/v1/', accountId: 'org_0vFdP9Zc11Y7yucwRTSCg8', apiKey: 'key_AAAe02TUpGzBNgkrLkXYi6j57tmaiU0ll27NEvDRjBq', realmId: 'rl_0vFhopfukY34r8EPGmKVpb' }); console.log('authrocket:', authrocket); //Set logged in status when dom is loaded document.addEventListener("DOMContentLoaded", function(event) { setStatus(); }); //Set status styles function setStatus() { var statusEl = document.getElementById("status"); var logoutButton = document.getElementById("logout-btn"); if(authrocket.isLoggedIn){ statusEl.innerHTML = "True"; statusEl.style.color = 'green'; // statusEl.className = statusEl.className ? ' status-loggedIn' : 'status-loggedIn'; logoutButton.style.display='inline';
logoutButton.style.display='none'; } } function login(loginData){ if(!loginData){ var loginData = {}; loginData.username = document.getElementById('login-username').value; loginData.password = document.getElementById('login-password').value; } authrocket.login(loginData).then(function(loginInfo){ console.log('successful login:', loginInfo); setStatus(); }, function(err){ console.error('login() : Error logging in:', err); }); } function logout(){ authrocket.logout().then(function(){ console.log('successful logout'); setStatus(); }, function(err){ console.error('logout() : Error logging out:', err); }); } function signup(signupData){ if(!signupData){ var signupData = {}; signupData.name = document.getElementById('signup-name').value; signupData.username = document.getElementById('signup-username').value; signupData.email = document.getElementById('signup-email').value; signupData.password = document.getElementById('signup-password').value; } authrocket.signup(signupData).then(function(signupRes){ console.log('successful signup', signupRes); setStatus(); }, function(err){ console.error('logout() : Error signing up:', err); }); } function getUsers(){ authrocket.Users.get().then(function(usersList){ console.log('users loaded', usersList); }, function(err){ console.error('error getting users', err); }); }
} else { statusEl.innerHTML = "False"; statusEl.style.color = 'red';
random_line_split
app.js
var authrocket = new AuthRocket({ jsUrl: 'https://tessellate.e1.loginrocket.com/v1/', accountId: 'org_0vFdP9Zc11Y7yucwRTSCg8', apiKey: 'key_AAAe02TUpGzBNgkrLkXYi6j57tmaiU0ll27NEvDRjBq', realmId: 'rl_0vFhopfukY34r8EPGmKVpb' }); console.log('authrocket:', authrocket); //Set logged in status when dom is loaded document.addEventListener("DOMContentLoaded", function(event) { setStatus(); }); //Set status styles function setStatus() { var statusEl = document.getElementById("status"); var logoutButton = document.getElementById("logout-btn"); if(authrocket.isLoggedIn){ statusEl.innerHTML = "True"; statusEl.style.color = 'green'; // statusEl.className = statusEl.className ? ' status-loggedIn' : 'status-loggedIn'; logoutButton.style.display='inline'; } else { statusEl.innerHTML = "False"; statusEl.style.color = 'red'; logoutButton.style.display='none'; } } function login(loginData){ if(!loginData){ var loginData = {}; loginData.username = document.getElementById('login-username').value; loginData.password = document.getElementById('login-password').value; } authrocket.login(loginData).then(function(loginInfo){ console.log('successful login:', loginInfo); setStatus(); }, function(err){ console.error('login() : Error logging in:', err); }); } function logout(){ authrocket.logout().then(function(){ console.log('successful logout'); setStatus(); }, function(err){ console.error('logout() : Error logging out:', err); }); } function signup(signupData){ if(!signupData){ var signupData = {}; signupData.name = document.getElementById('signup-name').value; signupData.username = document.getElementById('signup-username').value; signupData.email = document.getElementById('signup-email').value; signupData.password = document.getElementById('signup-password').value; } authrocket.signup(signupData).then(function(signupRes){ console.log('successful signup', signupRes); setStatus(); }, function(err){ console.error('logout() : Error signing up:', err); }); } function getUsers()
{ authrocket.Users.get().then(function(usersList){ console.log('users loaded', usersList); }, function(err){ console.error('error getting users', err); }); }
identifier_body
app.js
var authrocket = new AuthRocket({ jsUrl: 'https://tessellate.e1.loginrocket.com/v1/', accountId: 'org_0vFdP9Zc11Y7yucwRTSCg8', apiKey: 'key_AAAe02TUpGzBNgkrLkXYi6j57tmaiU0ll27NEvDRjBq', realmId: 'rl_0vFhopfukY34r8EPGmKVpb' }); console.log('authrocket:', authrocket); //Set logged in status when dom is loaded document.addEventListener("DOMContentLoaded", function(event) { setStatus(); }); //Set status styles function setStatus() { var statusEl = document.getElementById("status"); var logoutButton = document.getElementById("logout-btn"); if(authrocket.isLoggedIn)
else { statusEl.innerHTML = "False"; statusEl.style.color = 'red'; logoutButton.style.display='none'; } } function login(loginData){ if(!loginData){ var loginData = {}; loginData.username = document.getElementById('login-username').value; loginData.password = document.getElementById('login-password').value; } authrocket.login(loginData).then(function(loginInfo){ console.log('successful login:', loginInfo); setStatus(); }, function(err){ console.error('login() : Error logging in:', err); }); } function logout(){ authrocket.logout().then(function(){ console.log('successful logout'); setStatus(); }, function(err){ console.error('logout() : Error logging out:', err); }); } function signup(signupData){ if(!signupData){ var signupData = {}; signupData.name = document.getElementById('signup-name').value; signupData.username = document.getElementById('signup-username').value; signupData.email = document.getElementById('signup-email').value; signupData.password = document.getElementById('signup-password').value; } authrocket.signup(signupData).then(function(signupRes){ console.log('successful signup', signupRes); setStatus(); }, function(err){ console.error('logout() : Error signing up:', err); }); } function getUsers(){ authrocket.Users.get().then(function(usersList){ console.log('users loaded', usersList); }, function(err){ console.error('error getting users', err); }); }
{ statusEl.innerHTML = "True"; statusEl.style.color = 'green'; // statusEl.className = statusEl.className ? ' status-loggedIn' : 'status-loggedIn'; logoutButton.style.display='inline'; }
conditional_block
app.js
var authrocket = new AuthRocket({ jsUrl: 'https://tessellate.e1.loginrocket.com/v1/', accountId: 'org_0vFdP9Zc11Y7yucwRTSCg8', apiKey: 'key_AAAe02TUpGzBNgkrLkXYi6j57tmaiU0ll27NEvDRjBq', realmId: 'rl_0vFhopfukY34r8EPGmKVpb' }); console.log('authrocket:', authrocket); //Set logged in status when dom is loaded document.addEventListener("DOMContentLoaded", function(event) { setStatus(); }); //Set status styles function
() { var statusEl = document.getElementById("status"); var logoutButton = document.getElementById("logout-btn"); if(authrocket.isLoggedIn){ statusEl.innerHTML = "True"; statusEl.style.color = 'green'; // statusEl.className = statusEl.className ? ' status-loggedIn' : 'status-loggedIn'; logoutButton.style.display='inline'; } else { statusEl.innerHTML = "False"; statusEl.style.color = 'red'; logoutButton.style.display='none'; } } function login(loginData){ if(!loginData){ var loginData = {}; loginData.username = document.getElementById('login-username').value; loginData.password = document.getElementById('login-password').value; } authrocket.login(loginData).then(function(loginInfo){ console.log('successful login:', loginInfo); setStatus(); }, function(err){ console.error('login() : Error logging in:', err); }); } function logout(){ authrocket.logout().then(function(){ console.log('successful logout'); setStatus(); }, function(err){ console.error('logout() : Error logging out:', err); }); } function signup(signupData){ if(!signupData){ var signupData = {}; signupData.name = document.getElementById('signup-name').value; signupData.username = document.getElementById('signup-username').value; signupData.email = document.getElementById('signup-email').value; signupData.password = document.getElementById('signup-password').value; } authrocket.signup(signupData).then(function(signupRes){ console.log('successful signup', signupRes); setStatus(); }, function(err){ console.error('logout() : Error signing up:', err); }); } function getUsers(){ authrocket.Users.get().then(function(usersList){ console.log('users loaded', usersList); }, function(err){ console.error('error getting users', err); }); }
setStatus
identifier_name
grab_xml_processing.py
# coding: utf-8 from tests.util import build_grab from tests.util import BaseGrabTestCase class
(BaseGrabTestCase): def setUp(self): self.server.reset() def test_xml_with_declaration(self): self.server.response['get.data'] =\ b'<?xml version="1.0" encoding="UTF-8"?>'\ b'<root><foo>foo</foo></root>' grab = build_grab() grab.go(self.server.get_url()) self.assertTrue(grab.doc.select('//foo').text() == 'foo') def test_declaration_bug(self): """ 1. Build Grab instance with XML with xml declaration 2. Call search method 3. Call xpath 4. Get ValueError: Unicode strings with encoding declaration are not supported. """ xml = b'<?xml version="1.0" encoding="UTF-8"?>'\ b'<tree><leaf>text</leaf></tree>' grab = build_grab(document_body=xml) self.assertTrue(grab.doc.text_search(u'text')) self.assertEqual(grab.doc.select('//leaf').text(), u'text') # Similar bugs grab = build_grab(document_body=xml) self.assertTrue(grab.doc.rex_search(u'text')) self.assertEqual(grab.doc.select('//leaf').text(), u'text')
GrabXMLProcessingTestCase
identifier_name
grab_xml_processing.py
# coding: utf-8 from tests.util import build_grab from tests.util import BaseGrabTestCase class GrabXMLProcessingTestCase(BaseGrabTestCase): def setUp(self): self.server.reset() def test_xml_with_declaration(self):
self.assertTrue(grab.doc.select('//foo').text() == 'foo') def test_declaration_bug(self): """ 1. Build Grab instance with XML with xml declaration 2. Call search method 3. Call xpath 4. Get ValueError: Unicode strings with encoding declaration are not supported. """ xml = b'<?xml version="1.0" encoding="UTF-8"?>'\ b'<tree><leaf>text</leaf></tree>' grab = build_grab(document_body=xml) self.assertTrue(grab.doc.text_search(u'text')) self.assertEqual(grab.doc.select('//leaf').text(), u'text') # Similar bugs grab = build_grab(document_body=xml) self.assertTrue(grab.doc.rex_search(u'text')) self.assertEqual(grab.doc.select('//leaf').text(), u'text')
self.server.response['get.data'] =\ b'<?xml version="1.0" encoding="UTF-8"?>'\ b'<root><foo>foo</foo></root>' grab = build_grab() grab.go(self.server.get_url())
random_line_split
grab_xml_processing.py
# coding: utf-8 from tests.util import build_grab from tests.util import BaseGrabTestCase class GrabXMLProcessingTestCase(BaseGrabTestCase): def setUp(self): self.server.reset() def test_xml_with_declaration(self): self.server.response['get.data'] =\ b'<?xml version="1.0" encoding="UTF-8"?>'\ b'<root><foo>foo</foo></root>' grab = build_grab() grab.go(self.server.get_url()) self.assertTrue(grab.doc.select('//foo').text() == 'foo') def test_declaration_bug(self):
""" 1. Build Grab instance with XML with xml declaration 2. Call search method 3. Call xpath 4. Get ValueError: Unicode strings with encoding declaration are not supported. """ xml = b'<?xml version="1.0" encoding="UTF-8"?>'\ b'<tree><leaf>text</leaf></tree>' grab = build_grab(document_body=xml) self.assertTrue(grab.doc.text_search(u'text')) self.assertEqual(grab.doc.select('//leaf').text(), u'text') # Similar bugs grab = build_grab(document_body=xml) self.assertTrue(grab.doc.rex_search(u'text')) self.assertEqual(grab.doc.select('//leaf').text(), u'text')
identifier_body
numbers.rs
//! Functions operating on numbers. use std::sync::Mutex; use rand::{StdRng, Rng, SeedableRng}; use lisp::LispObject; use remacs_sys::{EmacsInt, INTMASK}; use remacs_macros::lisp_fn; lazy_static! { static ref RNG: Mutex<StdRng> = Mutex::new(StdRng::new().unwrap()); } /// Return t if OBJECT is a floating point number. #[lisp_fn] fn floatp(object: LispObject) -> LispObject { LispObject::from_bool(object.is_float()) } /// Return t if OBJECT is an integer. #[lisp_fn] fn integerp(object: LispObject) -> LispObject { LispObject::from_bool(object.is_integer()) } /// Return t if OBJECT is an integer or a marker (editor pointer). #[lisp_fn] fn integer_or_marker_p(object: LispObject) -> LispObject { LispObject::from_bool(object.is_marker() || object.is_integer()) } /// Return t if OBJECT is a non-negative integer. #[lisp_fn]
#[lisp_fn] fn numberp(object: LispObject) -> LispObject { LispObject::from_bool(object.is_number()) } /// Return t if OBJECT is a number or a marker (editor pointer). #[lisp_fn] fn number_or_marker_p(object: LispObject) -> LispObject { LispObject::from_bool(object.is_number() || object.is_marker()) } /// Return a pseudo-random number. /// All integers representable in Lisp, i.e. between `most-negative-fixnum' /// and `most-positive-fixnum', inclusive, are equally likely. /// /// With positive integer LIMIT, return random number in interval [0,LIMIT). /// With argument t, set the random number seed from the system's entropy /// pool if available, otherwise from less-random volatile data such as the time. /// With a string argument, set the seed based on the string's contents. /// Other values of LIMIT are ignored. /// /// See Info node `(elisp)Random Numbers' for more details. #[lisp_fn(min = "0")] fn random(limit: LispObject) -> LispObject { let mut rng = RNG.lock().unwrap(); if limit == LispObject::constant_t() { *rng = StdRng::new().unwrap(); } else if let Some(s) = limit.as_string() { let values: Vec<usize> = s.as_slice().iter().map(|&x| x as usize).collect(); rng.reseed(&values); } if let Some(limit) = limit.as_fixnum() { // Return the remainder, except reject the rare case where // get_random returns a number so close to INTMASK that the // remainder isn't random. loop { let val: EmacsInt = rng.gen(); let remainder = val.abs() % limit; if val - remainder <= INTMASK - limit + 1 { return LispObject::from_fixnum(remainder); } } } else { LispObject::from_fixnum_truncated(rng.gen()) } }
fn natnump(object: LispObject) -> LispObject { LispObject::from_bool(object.is_natnum()) } /// Return t if OBJECT is a number (floating point or integer).
random_line_split
numbers.rs
//! Functions operating on numbers. use std::sync::Mutex; use rand::{StdRng, Rng, SeedableRng}; use lisp::LispObject; use remacs_sys::{EmacsInt, INTMASK}; use remacs_macros::lisp_fn; lazy_static! { static ref RNG: Mutex<StdRng> = Mutex::new(StdRng::new().unwrap()); } /// Return t if OBJECT is a floating point number. #[lisp_fn] fn
(object: LispObject) -> LispObject { LispObject::from_bool(object.is_float()) } /// Return t if OBJECT is an integer. #[lisp_fn] fn integerp(object: LispObject) -> LispObject { LispObject::from_bool(object.is_integer()) } /// Return t if OBJECT is an integer or a marker (editor pointer). #[lisp_fn] fn integer_or_marker_p(object: LispObject) -> LispObject { LispObject::from_bool(object.is_marker() || object.is_integer()) } /// Return t if OBJECT is a non-negative integer. #[lisp_fn] fn natnump(object: LispObject) -> LispObject { LispObject::from_bool(object.is_natnum()) } /// Return t if OBJECT is a number (floating point or integer). #[lisp_fn] fn numberp(object: LispObject) -> LispObject { LispObject::from_bool(object.is_number()) } /// Return t if OBJECT is a number or a marker (editor pointer). #[lisp_fn] fn number_or_marker_p(object: LispObject) -> LispObject { LispObject::from_bool(object.is_number() || object.is_marker()) } /// Return a pseudo-random number. /// All integers representable in Lisp, i.e. between `most-negative-fixnum' /// and `most-positive-fixnum', inclusive, are equally likely. /// /// With positive integer LIMIT, return random number in interval [0,LIMIT). /// With argument t, set the random number seed from the system's entropy /// pool if available, otherwise from less-random volatile data such as the time. /// With a string argument, set the seed based on the string's contents. /// Other values of LIMIT are ignored. /// /// See Info node `(elisp)Random Numbers' for more details. #[lisp_fn(min = "0")] fn random(limit: LispObject) -> LispObject { let mut rng = RNG.lock().unwrap(); if limit == LispObject::constant_t() { *rng = StdRng::new().unwrap(); } else if let Some(s) = limit.as_string() { let values: Vec<usize> = s.as_slice().iter().map(|&x| x as usize).collect(); rng.reseed(&values); } if let Some(limit) = limit.as_fixnum() { // Return the remainder, except reject the rare case where // get_random returns a number so close to INTMASK that the // remainder isn't random. loop { let val: EmacsInt = rng.gen(); let remainder = val.abs() % limit; if val - remainder <= INTMASK - limit + 1 { return LispObject::from_fixnum(remainder); } } } else { LispObject::from_fixnum_truncated(rng.gen()) } }
floatp
identifier_name
numbers.rs
//! Functions operating on numbers. use std::sync::Mutex; use rand::{StdRng, Rng, SeedableRng}; use lisp::LispObject; use remacs_sys::{EmacsInt, INTMASK}; use remacs_macros::lisp_fn; lazy_static! { static ref RNG: Mutex<StdRng> = Mutex::new(StdRng::new().unwrap()); } /// Return t if OBJECT is a floating point number. #[lisp_fn] fn floatp(object: LispObject) -> LispObject { LispObject::from_bool(object.is_float()) } /// Return t if OBJECT is an integer. #[lisp_fn] fn integerp(object: LispObject) -> LispObject { LispObject::from_bool(object.is_integer()) } /// Return t if OBJECT is an integer or a marker (editor pointer). #[lisp_fn] fn integer_or_marker_p(object: LispObject) -> LispObject { LispObject::from_bool(object.is_marker() || object.is_integer()) } /// Return t if OBJECT is a non-negative integer. #[lisp_fn] fn natnump(object: LispObject) -> LispObject { LispObject::from_bool(object.is_natnum()) } /// Return t if OBJECT is a number (floating point or integer). #[lisp_fn] fn numberp(object: LispObject) -> LispObject { LispObject::from_bool(object.is_number()) } /// Return t if OBJECT is a number or a marker (editor pointer). #[lisp_fn] fn number_or_marker_p(object: LispObject) -> LispObject { LispObject::from_bool(object.is_number() || object.is_marker()) } /// Return a pseudo-random number. /// All integers representable in Lisp, i.e. between `most-negative-fixnum' /// and `most-positive-fixnum', inclusive, are equally likely. /// /// With positive integer LIMIT, return random number in interval [0,LIMIT). /// With argument t, set the random number seed from the system's entropy /// pool if available, otherwise from less-random volatile data such as the time. /// With a string argument, set the seed based on the string's contents. /// Other values of LIMIT are ignored. /// /// See Info node `(elisp)Random Numbers' for more details. #[lisp_fn(min = "0")] fn random(limit: LispObject) -> LispObject
} else { LispObject::from_fixnum_truncated(rng.gen()) } }
{ let mut rng = RNG.lock().unwrap(); if limit == LispObject::constant_t() { *rng = StdRng::new().unwrap(); } else if let Some(s) = limit.as_string() { let values: Vec<usize> = s.as_slice().iter().map(|&x| x as usize).collect(); rng.reseed(&values); } if let Some(limit) = limit.as_fixnum() { // Return the remainder, except reject the rare case where // get_random returns a number so close to INTMASK that the // remainder isn't random. loop { let val: EmacsInt = rng.gen(); let remainder = val.abs() % limit; if val - remainder <= INTMASK - limit + 1 { return LispObject::from_fixnum(remainder); } }
identifier_body
numbers.rs
//! Functions operating on numbers. use std::sync::Mutex; use rand::{StdRng, Rng, SeedableRng}; use lisp::LispObject; use remacs_sys::{EmacsInt, INTMASK}; use remacs_macros::lisp_fn; lazy_static! { static ref RNG: Mutex<StdRng> = Mutex::new(StdRng::new().unwrap()); } /// Return t if OBJECT is a floating point number. #[lisp_fn] fn floatp(object: LispObject) -> LispObject { LispObject::from_bool(object.is_float()) } /// Return t if OBJECT is an integer. #[lisp_fn] fn integerp(object: LispObject) -> LispObject { LispObject::from_bool(object.is_integer()) } /// Return t if OBJECT is an integer or a marker (editor pointer). #[lisp_fn] fn integer_or_marker_p(object: LispObject) -> LispObject { LispObject::from_bool(object.is_marker() || object.is_integer()) } /// Return t if OBJECT is a non-negative integer. #[lisp_fn] fn natnump(object: LispObject) -> LispObject { LispObject::from_bool(object.is_natnum()) } /// Return t if OBJECT is a number (floating point or integer). #[lisp_fn] fn numberp(object: LispObject) -> LispObject { LispObject::from_bool(object.is_number()) } /// Return t if OBJECT is a number or a marker (editor pointer). #[lisp_fn] fn number_or_marker_p(object: LispObject) -> LispObject { LispObject::from_bool(object.is_number() || object.is_marker()) } /// Return a pseudo-random number. /// All integers representable in Lisp, i.e. between `most-negative-fixnum' /// and `most-positive-fixnum', inclusive, are equally likely. /// /// With positive integer LIMIT, return random number in interval [0,LIMIT). /// With argument t, set the random number seed from the system's entropy /// pool if available, otherwise from less-random volatile data such as the time. /// With a string argument, set the seed based on the string's contents. /// Other values of LIMIT are ignored. /// /// See Info node `(elisp)Random Numbers' for more details. #[lisp_fn(min = "0")] fn random(limit: LispObject) -> LispObject { let mut rng = RNG.lock().unwrap(); if limit == LispObject::constant_t() { *rng = StdRng::new().unwrap(); } else if let Some(s) = limit.as_string() { let values: Vec<usize> = s.as_slice().iter().map(|&x| x as usize).collect(); rng.reseed(&values); } if let Some(limit) = limit.as_fixnum() { // Return the remainder, except reject the rare case where // get_random returns a number so close to INTMASK that the // remainder isn't random. loop { let val: EmacsInt = rng.gen(); let remainder = val.abs() % limit; if val - remainder <= INTMASK - limit + 1 { return LispObject::from_fixnum(remainder); } } } else
}
{ LispObject::from_fixnum_truncated(rng.gen()) }
conditional_block
auxpow_testing.py
#!/usr/bin/env python3 # Copyright(c) 2014-2019 Daniel Kraft # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # Utility routines for auxpow that are needed specifically by the regtests. # This is mostly about actually *solving* an auxpow block(with regtest # difficulty) or inspecting the information for verification. import binascii from test_framework import auxpow def computeAuxpow(block, target, ok): """ Build an auxpow object(serialised as hex string) that solves (ok = True) or doesn't solve(ok = False) the block. """ (tx, header) = auxpow.constructAuxpow(block) (header, _) = mineBlock(header, target, ok) return auxpow.finishAuxpow(tx, header) def mineAuxpowBlock(node): """ Mine an auxpow block on the given RPC connection. This uses the createauxblock and submitauxblock command pair. """ def create(): addr = node.getnewaddress() return node.createauxblock(addr) return mineAuxpowBlockWithMethods(create, node.submitauxblock) def mineAuxpowBlockWithMethods(create, submit): """ Mine an auxpow block, using the given methods for creation and submission. """ auxblock = create() target = auxpow.reverseHex(auxblock['_target']) apow = computeAuxpow(auxblock['hash'], target, True) res = submit(auxblock['hash'], apow) assert res return auxblock['hash'] def getCoinbaseAddr(node, blockHash): """ Extract the coinbase tx' payout address for the given block. """ blockData = node.getblock(blockHash) txn = blockData['tx'] assert len(txn) >= 1 txData = node.getrawtransaction(txn[0], True, blockHash) assert len(txData['vout']) >= 1 and len(txData['vin']) == 1 assert 'coinbase' in txData['vin'][0] addr = txData['vout'][0]['scriptPubKey']['addresses'] assert len(addr) == 1 return addr[0] def mineBlock(header, target, ok): """ Given a block header, update the nonce until it is ok(or not) for the given target. """ data = bytearray(binascii.unhexlify(header)) while True: assert data[79] < 255 data[79] += 1 hexData = binascii.hexlify(data) blockhash = auxpow.getScryptPoW(hexData) if (ok and blockhash < target) or((not ok) and blockhash > target):
return (hexData, blockhash)
break
conditional_block
auxpow_testing.py
#!/usr/bin/env python3 # Copyright(c) 2014-2019 Daniel Kraft # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # Utility routines for auxpow that are needed specifically by the regtests. # This is mostly about actually *solving* an auxpow block(with regtest # difficulty) or inspecting the information for verification. import binascii from test_framework import auxpow def computeAuxpow(block, target, ok): """
(tx, header) = auxpow.constructAuxpow(block) (header, _) = mineBlock(header, target, ok) return auxpow.finishAuxpow(tx, header) def mineAuxpowBlock(node): """ Mine an auxpow block on the given RPC connection. This uses the createauxblock and submitauxblock command pair. """ def create(): addr = node.getnewaddress() return node.createauxblock(addr) return mineAuxpowBlockWithMethods(create, node.submitauxblock) def mineAuxpowBlockWithMethods(create, submit): """ Mine an auxpow block, using the given methods for creation and submission. """ auxblock = create() target = auxpow.reverseHex(auxblock['_target']) apow = computeAuxpow(auxblock['hash'], target, True) res = submit(auxblock['hash'], apow) assert res return auxblock['hash'] def getCoinbaseAddr(node, blockHash): """ Extract the coinbase tx' payout address for the given block. """ blockData = node.getblock(blockHash) txn = blockData['tx'] assert len(txn) >= 1 txData = node.getrawtransaction(txn[0], True, blockHash) assert len(txData['vout']) >= 1 and len(txData['vin']) == 1 assert 'coinbase' in txData['vin'][0] addr = txData['vout'][0]['scriptPubKey']['addresses'] assert len(addr) == 1 return addr[0] def mineBlock(header, target, ok): """ Given a block header, update the nonce until it is ok(or not) for the given target. """ data = bytearray(binascii.unhexlify(header)) while True: assert data[79] < 255 data[79] += 1 hexData = binascii.hexlify(data) blockhash = auxpow.getScryptPoW(hexData) if (ok and blockhash < target) or((not ok) and blockhash > target): break return (hexData, blockhash)
Build an auxpow object(serialised as hex string) that solves (ok = True) or doesn't solve(ok = False) the block. """
random_line_split
auxpow_testing.py
#!/usr/bin/env python3 # Copyright(c) 2014-2019 Daniel Kraft # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # Utility routines for auxpow that are needed specifically by the regtests. # This is mostly about actually *solving* an auxpow block(with regtest # difficulty) or inspecting the information for verification. import binascii from test_framework import auxpow def computeAuxpow(block, target, ok): """ Build an auxpow object(serialised as hex string) that solves (ok = True) or doesn't solve(ok = False) the block. """ (tx, header) = auxpow.constructAuxpow(block) (header, _) = mineBlock(header, target, ok) return auxpow.finishAuxpow(tx, header) def
(node): """ Mine an auxpow block on the given RPC connection. This uses the createauxblock and submitauxblock command pair. """ def create(): addr = node.getnewaddress() return node.createauxblock(addr) return mineAuxpowBlockWithMethods(create, node.submitauxblock) def mineAuxpowBlockWithMethods(create, submit): """ Mine an auxpow block, using the given methods for creation and submission. """ auxblock = create() target = auxpow.reverseHex(auxblock['_target']) apow = computeAuxpow(auxblock['hash'], target, True) res = submit(auxblock['hash'], apow) assert res return auxblock['hash'] def getCoinbaseAddr(node, blockHash): """ Extract the coinbase tx' payout address for the given block. """ blockData = node.getblock(blockHash) txn = blockData['tx'] assert len(txn) >= 1 txData = node.getrawtransaction(txn[0], True, blockHash) assert len(txData['vout']) >= 1 and len(txData['vin']) == 1 assert 'coinbase' in txData['vin'][0] addr = txData['vout'][0]['scriptPubKey']['addresses'] assert len(addr) == 1 return addr[0] def mineBlock(header, target, ok): """ Given a block header, update the nonce until it is ok(or not) for the given target. """ data = bytearray(binascii.unhexlify(header)) while True: assert data[79] < 255 data[79] += 1 hexData = binascii.hexlify(data) blockhash = auxpow.getScryptPoW(hexData) if (ok and blockhash < target) or((not ok) and blockhash > target): break return (hexData, blockhash)
mineAuxpowBlock
identifier_name
auxpow_testing.py
#!/usr/bin/env python3 # Copyright(c) 2014-2019 Daniel Kraft # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # Utility routines for auxpow that are needed specifically by the regtests. # This is mostly about actually *solving* an auxpow block(with regtest # difficulty) or inspecting the information for verification. import binascii from test_framework import auxpow def computeAuxpow(block, target, ok): """ Build an auxpow object(serialised as hex string) that solves (ok = True) or doesn't solve(ok = False) the block. """ (tx, header) = auxpow.constructAuxpow(block) (header, _) = mineBlock(header, target, ok) return auxpow.finishAuxpow(tx, header) def mineAuxpowBlock(node): """ Mine an auxpow block on the given RPC connection. This uses the createauxblock and submitauxblock command pair. """ def create(): addr = node.getnewaddress() return node.createauxblock(addr) return mineAuxpowBlockWithMethods(create, node.submitauxblock) def mineAuxpowBlockWithMethods(create, submit):
def getCoinbaseAddr(node, blockHash): """ Extract the coinbase tx' payout address for the given block. """ blockData = node.getblock(blockHash) txn = blockData['tx'] assert len(txn) >= 1 txData = node.getrawtransaction(txn[0], True, blockHash) assert len(txData['vout']) >= 1 and len(txData['vin']) == 1 assert 'coinbase' in txData['vin'][0] addr = txData['vout'][0]['scriptPubKey']['addresses'] assert len(addr) == 1 return addr[0] def mineBlock(header, target, ok): """ Given a block header, update the nonce until it is ok(or not) for the given target. """ data = bytearray(binascii.unhexlify(header)) while True: assert data[79] < 255 data[79] += 1 hexData = binascii.hexlify(data) blockhash = auxpow.getScryptPoW(hexData) if (ok and blockhash < target) or((not ok) and blockhash > target): break return (hexData, blockhash)
""" Mine an auxpow block, using the given methods for creation and submission. """ auxblock = create() target = auxpow.reverseHex(auxblock['_target']) apow = computeAuxpow(auxblock['hash'], target, True) res = submit(auxblock['hash'], apow) assert res return auxblock['hash']
identifier_body
BOK.js
() {} tempCtor.prototype = baseClass.prototype; childClass.superClass_ = baseClass.prototype; childClass.prototype = new tempCtor(); childClass.prototype.constructor = childClass; }; /** * @param {Function} childClass * Sub class implements the interface * @param {Function} interfaceClass * interface class * * - This function will try to add all functions defined on interface to sub class * if a function already exists in sub class, it will be considered as implemented * and will not be overwritten. * - All functions in interface class must be virtual thus the base implementation * of such function should just throw error, as virtual functions should never be * invoked. * - Unlike BOK.inherits(), there is no change on class constructor when doing * implementation of interface. * */ BOK.implement = function(childClass, interfaceClass) { for(var index in interfaceClass.prototype) { if(!childClass.prototype[index]) { childClass.prototype[index] = interfaceClass.prototype[index]; } } }; BOK.isAndroid = function() { return (navigator.userAgent.toLowerCase().indexOf("android") >-1); }; BOK.isAndroid31 = function() { return (navigator.userAgent.indexOf("Android 3.1") >-1); }; BOK.isIPhone = function() { return (navigator.userAgent.toLowerCase().indexOf("iphone") >-1); }; BOK.isIOS = function() { return (navigator.userAgent.toLowerCase().indexOf("iphone") >-1 || navigator.userAgent.toLowerCase().indexOf("ipad") >-1 || navigator.userAgent.toLowerCase().indexOf("ipod") >-1); }; BOK.isIPad = function() { return (navigator.userAgent.toLowerCase().indexOf("ipad") >-1); }; BOK.isFirefox = function() { //console.log("is firefox: "+navigator.userAgent.toLowerCase()) return (navigator.userAgent.toLowerCase().indexOf("mozilla") >-1); }; BOK.isWeiXin = function(){ return (navigator.userAgent.toLowerCase().indexOf("micromessenger") >-1); }; BOK.quickRound = function(value) { if(value < 0) { return (value-0.5)|0; } else { return (value+0.5)|0; } }; BOK.requestAnimationFrame = function(callBack) { var requestAnimationFrameFunc = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || window.msRequestAnimationFrame || window.oRequestAnimationFrame || function(callback) { setTimeout(callback, 1000 / 60); }; requestAnimationFrameFunc.call(window, callBack); }; /** * @param {String} str * */ BOK.TRACE_LAST_MSG = ''; BOK.TRACE_LOOPING = false; BOK.trace = function(str) { if(this.enableTrace) { if(BOK.TRACE_LAST_MSG == str) { if(!BOK.TRACE_LOOPING) { console.log('**BOK.trace():Looping message** '+str); BOK.TRACE_LOOPING = true; } } else { BOK.TRACE_LAST_MSG = str; BOK.TRACE_LOOPING = false; console.log(str); } } }; BOK.error = function(str) { console.error("ERROR: "+str); }; BOK.warn = function(str) { if(this.enableWarning) console.warn("WARNING: "+str); }; /** * The passed in function may return 'break' to break the iteration process * * @param {Array|Object} collection target array/map for iteration * @param {Function} func function to run on each iterated item with this signature: * function(item, index){} * function may return string 'break' to stop current iteration. * @param {Object=null} [scope] The scope of function call, if left as null then use collection as scope. * */ BOK.BREAK = 'break'; BOK.each = function(collection, func, scope) { if(!scope) scope = collection; if(collection) { if(BOK.isArray(collection)) { //return directly if no elements to iterate var length = collection.length; if(length <= 0) return; for(var i=0; i<length; ++i) { //this if check to ensure i return to the correct //position if there is reduction of array during loop //e.g. using of array.splice() during iteration if(length > collection.length) { i -= length - collection.length; length = collection.length; } switch(func.call(scope, collection[i], i)) { case BOK.BREAK: return; } } } else if(typeof collection === 'object') { for(var key in collection) { switch(func.call(scope, collection[key], key)) { case BOK.BREAK: return; } } } } }; /** * Find and remove object from given collection. * * @param {Array} collection * @param {*} object * @return {number} The index of removed item, -1 if not find. * */ BOK.findAndRemove = function(collection, object) { var index = BOK.findInArray(collection, object); if(index >= 0) collection.splice(index, 1); return index; }; /** * Find object from given collection. * * @param {Array} collection * @param {*} object * @return {number} The index of removed item, -1 if not find. * */ BOK.findInArray = function(collection, object) { for(var i=0; i<collection.length; ++i) { if(collection[i] == object) { return i; } } return -1; }; //check is an object is array BOK.isArray = function(array) { if(Array.isArray) return Array.isArray(array); else return Object.prototype.toString.call( array ) === '[object Array]'; }; /** * @param {int} scale The result is 0 to (scale-1) in int value. * @return {int} * */ BOK.randN = function(scale) { return Math.floor(Math.random() * scale); }; /** * @param {Object} obj * @return {String} * */ BOK.randObjectProperty = function(obj) { var result = null; var count = 0; for (var prop in obj) if (Math.random() < 1/++count) result = prop; return result; }; /** * @param {Object} obj * @return {*} * */ BOK.randObjectContent = function(obj) { return obj[BOK.randObjectProperty(obj)]; }; /** * @param {Array} array * @return {*} * */ BOK.randArrayItem = function(array) { return array[BOK.randN(array.length)]; }; /** * Shuffles a provided array, note that the original array WILL be changed during the process. * This process is using Fisher–Yates algorithm. * @see http://bost.ocks.org/mike/shuffle/ * @param {Array} array * @return {Array} * */ BOK.shuffleArray = function(array) { var m = array.length, t, i; // While there remain elements to shuffle… while (m) { // Pick a remaining element… i = Math.floor(Math.random() * m--); // And swap it with the current element. t = array[m]; array[m] = array[i]; array[i] = t; } return array; }; BOK.setInnerText = function(elem, text) { if(elem.innerText != null) elem.innerText = text; else if(elem.textContent != null) elem.textContent = text; else throw Error('BOK.setInnerText: target element dont support innerText.'); }; /* * convert relative url to absolute url * */ BOK.absPath = function(url) { var Loc = location.href; Loc = Loc.substring(0, Loc.lastIndexOf('/')); while (/^\.\./.test(url)){ Loc = Loc.substring(0, Loc.lastIndexOf('/')); url= url.substring(3); } return Loc + '/' + url; }; /** * @public add a ClassName to an element * */ BOK.addClassName = function(elem, name) { if(!BOK.hasClassName(elem, name)) elem.className += ' '+name; }; /** * @public remove a ClassName to an element * */ BOK.removeClassName = function(elem, name) { var classes = elem.className.split(' '); BOK.each(classes, function(className, index){ if(className == name) classes.splice(index, 1); }); elem.className = classes.join(' '); }; /** * @public check a ClassName to an element * */ BOK.hasClassName = function(elem, name) { var classes = elem.className.split(' '); var hasClass = false; BOK.each(classes, function(className){ if(className == name) hasClass = true; }); return hasClass; }; /* * deep clone object * */ BOK.cloneObject = function(obj) { var clone; if (obj && typeof(obj) == 'object') { if (obj instanceof Array) { var l = obj.length; var cloneAry = new Array(l); for (var i = 0; i < l; i++) { if(obj[i] === obj) cloneAry
tempCtor
identifier_name
BOK.js
tor.prototype = baseClass.prototype; childClass.superClass_ = baseClass.prototype; childClass.prototype = new tempCtor(); childClass.prototype.constructor = childClass; }; /** * @param {Function} childClass * Sub class implements the interface * @param {Function} interfaceClass * interface class * * - This function will try to add all functions defined on interface to sub class * if a function already exists in sub class, it will be considered as implemented * and will not be overwritten. * - All functions in interface class must be virtual thus the base implementation * of such function should just throw error, as virtual functions should never be * invoked. * - Unlike BOK.inherits(), there is no change on class constructor when doing * implementation of interface. * */ BOK.implement = function(childClass, interfaceClass) { for(var index in interfaceClass.prototype) { if(!childClass.prototype[index]) { childClass.prototype[index] = interfaceClass.prototype[index]; } } }; BOK.isAndroid = function() { return (navigator.userAgent.toLowerCase().indexOf("android") >-1); }; BOK.isAndroid31 = function() { return (navigator.userAgent.indexOf("Android 3.1") >-1); }; BOK.isIPhone = function() { return (navigator.userAgent.toLowerCase().indexOf("iphone") >-1); }; BOK.isIOS = function() { return (navigator.userAgent.toLowerCase().indexOf("iphone") >-1 || navigator.userAgent.toLowerCase().indexOf("ipad") >-1 || navigator.userAgent.toLowerCase().indexOf("ipod") >-1); }; BOK.isIPad = function() { return (navigator.userAgent.toLowerCase().indexOf("ipad") >-1); }; BOK.isFirefox = function() { //console.log("is firefox: "+navigator.userAgent.toLowerCase()) return (navigator.userAgent.toLowerCase().indexOf("mozilla") >-1); }; BOK.isWeiXin = function(){ return (navigator.userAgent.toLowerCase().indexOf("micromessenger") >-1); }; BOK.quickRound = function(value) { if(value < 0) { return (value-0.5)|0; } else { return (value+0.5)|0; } }; BOK.requestAnimationFrame = function(callBack) { var requestAnimationFrameFunc = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || window.msRequestAnimationFrame || window.oRequestAnimationFrame || function(callback) { setTimeout(callback, 1000 / 60); }; requestAnimationFrameFunc.call(window, callBack); }; /** * @param {String} str * */ BOK.TRACE_LAST_MSG = ''; BOK.TRACE_LOOPING = false; BOK.trace = function(str) { if(this.enableTrace) { if(BOK.TRACE_LAST_MSG == str) { if(!BOK.TRACE_LOOPING) { console.log('**BOK.trace():Looping message** '+str); BOK.TRACE_LOOPING = true; } } else { BOK.TRACE_LAST_MSG = str; BOK.TRACE_LOOPING = false; console.log(str); } } }; BOK.error = function(str) { console.error("ERROR: "+str); }; BOK.warn = function(str) { if(this.enableWarning) console.warn("WARNING: "+str); }; /** * The passed in function may return 'break' to break the iteration process * * @param {Array|Object} collection target array/map for iteration * @param {Function} func function to run on each iterated item with this signature: * function(item, index){} * function may return string 'break' to stop current iteration. * @param {Object=null} [scope] The scope of function call, if left as null then use collection as scope. * */ BOK.BREAK = 'break'; BOK.each = function(collection, func, scope) { if(!scope) scope = collection; if(collection) { if(BOK.isArray(collection)) { //return directly if no elements to iterate var length = collection.length; if(length <= 0) return; for(var i=0; i<length; ++i) { //this if check to ensure i return to the correct //position if there is reduction of array during loop //e.g. using of array.splice() during iteration if(length > collection.length) { i -= length - collection.length; length = collection.length; } switch(func.call(scope, collection[i], i)) { case BOK.BREAK: return; } } } else if(typeof collection === 'object') { for(var key in collection) { switch(func.call(scope, collection[key], key)) { case BOK.BREAK: return; } } } } }; /** * Find and remove object from given collection. * * @param {Array} collection * @param {*} object * @return {number} The index of removed item, -1 if not find. * */ BOK.findAndRemove = function(collection, object) { var index = BOK.findInArray(collection, object); if(index >= 0) collection.splice(index, 1); return index; }; /** * Find object from given collection. * * @param {Array} collection * @param {*} object * @return {number} The index of removed item, -1 if not find. * */ BOK.findInArray = function(collection, object) { for(var i=0; i<collection.length; ++i) { if(collection[i] == object) { return i; } } return -1; }; //check is an object is array BOK.isArray = function(array) { if(Array.isArray) return Array.isArray(array); else return Object.prototype.toString.call( array ) === '[object Array]'; }; /** * @param {int} scale The result is 0 to (scale-1) in int value. * @return {int} * */ BOK.randN = function(scale) { return Math.floor(Math.random() * scale); }; /** * @param {Object} obj * @return {String} * */ BOK.randObjectProperty = function(obj) { var result = null; var count = 0; for (var prop in obj) if (Math.random() < 1/++count) result = prop; return result; }; /** * @param {Object} obj * @return {*} * */ BOK.randObjectContent = function(obj) { return obj[BOK.randObjectProperty(obj)]; }; /** * @param {Array} array * @return {*} * */ BOK.randArrayItem = function(array) { return array[BOK.randN(array.length)]; }; /** * Shuffles a provided array, note that the original array WILL be changed during the process. * This process is using Fisher–Yates algorithm. * @see http://bost.ocks.org/mike/shuffle/ * @param {Array} array * @return {Array} * */ BOK.shuffleArray = function(array) { var m = array.length, t, i; // While there remain elements to shuffle… while (m) { // Pick a remaining element… i = Math.floor(Math.random() * m--); // And swap it with the current element. t = array[m]; array[m] = array[i]; array[i] = t; } return array; }; BOK.setInnerText = function(elem, text) { if(elem.innerText != null) elem.innerText = text; else if(elem.textContent != null) elem.textContent = text; else throw Error('BOK.setInnerText: target element dont support innerText.'); }; /* * convert relative url to absolute url * */ BOK.absPath = function(url) { var Loc = location.href; Loc = Loc.substring(0, Loc.lastIndexOf('/')); while (/^\.\./.test(url)){ Loc = Loc.substring(0, Loc.lastIndexOf('/')); url= url.substring(3); }
}; /** * @public add a ClassName to an element * */ BOK.addClassName = function(elem, name) { if(!BOK.hasClassName(elem, name)) elem.className += ' '+name; }; /** * @public remove a ClassName to an element * */ BOK.removeClassName = function(elem, name) { var classes = elem.className.split(' '); BOK.each(classes, function(className, index){ if(className == name) classes.splice(index, 1); }); elem.className = classes.join(' '); }; /** * @public check a ClassName to an element * */ BOK.hasClassName = function(elem, name) { var classes = elem.className.split(' '); var hasClass = false; BOK.each(classes, function(className){ if(className == name) hasClass = true; }); return hasClass; }; /* * deep clone object * */ BOK.cloneObject = function(obj) { var clone; if (obj && typeof(obj) == 'object') { if (obj instanceof Array) { var l = obj.length; var cloneAry = new Array(l); for (var i = 0; i < l; i++) { if(obj[i] === obj) cloneAry[i] = cloneAry;
return Loc + '/' + url;
random_line_split
BOK.js
tempCtor.prototype = baseClass.prototype; childClass.superClass_ = baseClass.prototype; childClass.prototype = new tempCtor(); childClass.prototype.constructor = childClass; }; /** * @param {Function} childClass * Sub class implements the interface * @param {Function} interfaceClass * interface class * * - This function will try to add all functions defined on interface to sub class * if a function already exists in sub class, it will be considered as implemented * and will not be overwritten. * - All functions in interface class must be virtual thus the base implementation * of such function should just throw error, as virtual functions should never be * invoked. * - Unlike BOK.inherits(), there is no change on class constructor when doing * implementation of interface. * */ BOK.implement = function(childClass, interfaceClass) { for(var index in interfaceClass.prototype) { if(!childClass.prototype[index]) { childClass.prototype[index] = interfaceClass.prototype[index]; } } }; BOK.isAndroid = function() { return (navigator.userAgent.toLowerCase().indexOf("android") >-1); }; BOK.isAndroid31 = function() { return (navigator.userAgent.indexOf("Android 3.1") >-1); }; BOK.isIPhone = function() { return (navigator.userAgent.toLowerCase().indexOf("iphone") >-1); }; BOK.isIOS = function() { return (navigator.userAgent.toLowerCase().indexOf("iphone") >-1 || navigator.userAgent.toLowerCase().indexOf("ipad") >-1 || navigator.userAgent.toLowerCase().indexOf("ipod") >-1); }; BOK.isIPad = function() { return (navigator.userAgent.toLowerCase().indexOf("ipad") >-1); }; BOK.isFirefox = function() { //console.log("is firefox: "+navigator.userAgent.toLowerCase()) return (navigator.userAgent.toLowerCase().indexOf("mozilla") >-1); }; BOK.isWeiXin = function(){ return (navigator.userAgent.toLowerCase().indexOf("micromessenger") >-1); }; BOK.quickRound = function(value) { if(value < 0) { return (value-0.5)|0; } else { return (value+0.5)|0; } }; BOK.requestAnimationFrame = function(callBack) { var requestAnimationFrameFunc = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || window.msRequestAnimationFrame || window.oRequestAnimationFrame || function(callback) { setTimeout(callback, 1000 / 60); }; requestAnimationFrameFunc.call(window, callBack); }; /** * @param {String} str * */ BOK.TRACE_LAST_MSG = ''; BOK.TRACE_LOOPING = false; BOK.trace = function(str) { if(this.enableTrace) { if(BOK.TRACE_LAST_MSG == str) { if(!BOK.TRACE_LOOPING) { console.log('**BOK.trace():Looping message** '+str); BOK.TRACE_LOOPING = true; } } else { BOK.TRACE_LAST_MSG = str; BOK.TRACE_LOOPING = false; console.log(str); } } }; BOK.error = function(str) { console.error("ERROR: "+str); }; BOK.warn = function(str) { if(this.enableWarning) console.warn("WARNING: "+str); }; /** * The passed in function may return 'break' to break the iteration process * * @param {Array|Object} collection target array/map for iteration * @param {Function} func function to run on each iterated item with this signature: * function(item, index){} * function may return string 'break' to stop current iteration. * @param {Object=null} [scope] The scope of function call, if left as null then use collection as scope. * */ BOK.BREAK = 'break'; BOK.each = function(collection, func, scope) { if(!scope) scope = collection; if(collection) { if(BOK.isArray(collection)) { //return directly if no elements to iterate var length = collection.length; if(length <= 0) return; for(var i=0; i<length; ++i) { //this if check to ensure i return to the correct //position if there is reduction of array during loop //e.g. using of array.splice() during iteration if(length > collection.length) { i -= length - collection.length; length = collection.length; } switch(func.call(scope, collection[i], i)) { case BOK.BREAK: return; } } } else if(typeof collection === 'object') { for(var key in collection) { switch(func.call(scope, collection[key], key)) { case BOK.BREAK: return; } } } } }; /** * Find and remove object from given collection. * * @param {Array} collection * @param {*} object * @return {number} The index of removed item, -1 if not find. * */ BOK.findAndRemove = function(collection, object) { var index = BOK.findInArray(collection, object); if(index >= 0) collection.splice(index, 1); return index; }; /** * Find object from given collection. * * @param {Array} collection * @param {*} object * @return {number} The index of removed item, -1 if not find. * */ BOK.findInArray = function(collection, object) { for(var i=0; i<collection.length; ++i) { if(collection[i] == object) { return i; } } return -1; }; //check is an object is array BOK.isArray = function(array) { if(Array.isArray) return Array.isArray(array); else return Object.prototype.toString.call( array ) === '[object Array]'; }; /** * @param {int} scale The result is 0 to (scale-1) in int value. * @return {int} * */ BOK.randN = function(scale) { return Math.floor(Math.random() * scale); }; /** * @param {Object} obj * @return {String} * */ BOK.randObjectProperty = function(obj) { var result = null; var count = 0; for (var prop in obj) if (Math.random() < 1/++count) result = prop; return result; }; /** * @param {Object} obj * @return {*} * */ BOK.randObjectContent = function(obj) { return obj[BOK.randObjectProperty(obj)]; }; /** * @param {Array} array * @return {*} * */ BOK.randArrayItem = function(array) { return array[BOK.randN(array.length)]; }; /** * Shuffles a provided array, note that the original array WILL be changed during the process. * This process is using Fisher–Yates algorithm. * @see http://bost.ocks.org/mike/shuffle/ * @param {Array} array * @return {Array} * */ BOK.shuffleArray = function(array) { var m = array.length, t, i; // While there remain elements to shuffle… while (m) { // Pick a remaining element… i = Math.floor(Math.random() * m--); // And swap it with the current element. t = array[m]; array[m] = array[i]; array[i] = t; } return array; }; BOK.setInnerText = function(elem, text) { if(elem.innerText != null) elem.innerText = text; else if(elem.textContent != null) elem.textContent = text; else throw Error('BOK.setInnerText: target element dont support innerText.'); }; /* * convert relative url to absolute url * */ BOK.absPath = function(url) { var Loc = location.href; Loc = Loc.substring(0, Loc.lastIndexOf('/')); while (/^\.\./.test(url)){ Loc = Loc.substring(0, Loc.lastIndexOf('/')); url= url.substring(3); } return Loc + '/' + url; }; /** * @public add a ClassName to an element * */ BOK.addClassName = function(elem, name) { if(!BOK.hasClassName(elem, name)) elem.className += ' '+name; }; /** * @public remove a ClassName to an element * */ BOK.removeClassName = function(elem, name) { var classes = elem.className.split(' '); BOK.each(classes, function(className, index){ if(className == name) classes.splice(index, 1); }); elem.className = classes.join(' '); }; /** * @public check a ClassName to an element * */ BOK.hasClassName = function(elem, name) { var classes = elem.className.split(' '); var hasClass = false; BOK.each(classes, function(className){ if(className == name) hasClass = true; }); return hasClass; }; /* * deep clone object * */ BOK.cloneObject = function(obj) { var clone; if (obj && typeof(obj) == 'object') { if (obj instanceof Array) { var l = obj.length; var cloneAry = new Array(l); for (var i = 0; i < l; i++) { if(obj[i] === obj) cloneAry[i] =
{}
identifier_body
dgeni-definitions.ts
import {ClassExportDoc} from 'dgeni-packages/typescript/api-doc-types/ClassExportDoc'; import {ClassLikeExportDoc} from 'dgeni-packages/typescript/api-doc-types/ClassLikeExportDoc'; import {PropertyMemberDoc} from 'dgeni-packages/typescript/api-doc-types/PropertyMemberDoc'; import {NormalizedMethodMemberDoc} from './normalize-method-parameters'; /** Extended Dgeni class-like document that includes separated class members. */ export interface CategorizedClassLikeDoc extends ClassLikeExportDoc { methods: CategorizedMethodMemberDoc[]; properties: CategorizedPropertyMemberDoc[]; isDeprecated: boolean; } /** Extended Dgeni class document that includes extracted Angular metadata. */ export interface CategorizedClassDoc extends ClassExportDoc, CategorizedClassLikeDoc { isDirective: boolean; isService: boolean; isNgModule: boolean; directiveExportAs?: string | null; directiveSelectors?: string[]; directiveMetadata: Map<string, any> | null; extendedDoc: ClassLikeExportDoc | null; } /** Extended Dgeni property-member document that includes extracted Angular metadata. */ export interface CategorizedPropertyMemberDoc extends PropertyMemberDoc { description: string; isDeprecated: boolean; isDirectiveInput: boolean; isDirectiveOutput: boolean; directiveInputAlias: string;
directiveOutputAlias: string; } /** Extended Dgeni method-member document that simplifies logic for the Dgeni template. */ export interface CategorizedMethodMemberDoc extends NormalizedMethodMemberDoc { showReturns: boolean; isDeprecated: boolean; }
random_line_split
StressTest.ts
import ITestImpl = require('../ITestImpl'); class StressTest implements ITestImpl { run (runCount: number, onStatus: (status: any) => any, onOutput: (output: any) => any) { if (!this.prepare(() => this.$$finishRun(runCount, onStatus, onOutput)))
return this.$$finishRun(runCount, onStatus, onOutput); } private $$finishRun (runCount: number, onStatus: (status: any) => any, onOutput: (output: any) => any) { var all: number[] = []; //Pre-run for (var i = 0; i < 5; i++) { this.prepareIteration(); this.runIteration(); } console.profile(); var start = performance.now(); for (var i = 0; i < runCount; i++) { var s = performance.now(); this.prepareIteration(); this.runIteration(); all.push(performance.now() - s); } var total = performance.now() - start; console.profileEnd(); var min = all.reduce((agg, ms) => Math.min(agg, ms), Number.POSITIVE_INFINITY); var max = all.reduce((agg, ms) => Math.max(agg, ms), Number.NEGATIVE_INFINITY); var sum = all.reduce((agg, ms) => agg + ms, 0); var avg = total / runCount; var sd = calcStdDev(all, sum); var status = [ "Iterations Complete: " + runCount.toString(), "Total Elapsed: " + createTimingString(total) ].join("<br />"); onStatus(status); var output = [ "Sum: " + createTimingString(sum), "Min: " + createTimingString(min), "Max: " + createTimingString(max), "Average: " + createTimingString(avg), "Std Dev: " + createTimingString(sd) ].join("<br />"); onOutput(output); } prepare (ready?: () => any): boolean { return false; } prepareIteration () { } runIteration () { } } function createTimingString (ms: number): string { return ms.toString() + "ms (" + (ms / 1000).toFixed(1) + "s)"; } function calcStdDev (all: number[], total: number): number { var avg = total / all.length; return Math.sqrt(all.reduce((agg, ms) => agg + Math.pow(ms - avg, 2), 0) / all.length); } export = StressTest;
random_line_split
StressTest.ts
import ITestImpl = require('../ITestImpl'); class StressTest implements ITestImpl { run (runCount: number, onStatus: (status: any) => any, onOutput: (output: any) => any) { if (!this.prepare(() => this.$$finishRun(runCount, onStatus, onOutput))) return this.$$finishRun(runCount, onStatus, onOutput); } private $$finishRun (runCount: number, onStatus: (status: any) => any, onOutput: (output: any) => any) { var all: number[] = []; //Pre-run for (var i = 0; i < 5; i++) { this.prepareIteration(); this.runIteration(); } console.profile(); var start = performance.now(); for (var i = 0; i < runCount; i++)
var total = performance.now() - start; console.profileEnd(); var min = all.reduce((agg, ms) => Math.min(agg, ms), Number.POSITIVE_INFINITY); var max = all.reduce((agg, ms) => Math.max(agg, ms), Number.NEGATIVE_INFINITY); var sum = all.reduce((agg, ms) => agg + ms, 0); var avg = total / runCount; var sd = calcStdDev(all, sum); var status = [ "Iterations Complete: " + runCount.toString(), "Total Elapsed: " + createTimingString(total) ].join("<br />"); onStatus(status); var output = [ "Sum: " + createTimingString(sum), "Min: " + createTimingString(min), "Max: " + createTimingString(max), "Average: " + createTimingString(avg), "Std Dev: " + createTimingString(sd) ].join("<br />"); onOutput(output); } prepare (ready?: () => any): boolean { return false; } prepareIteration () { } runIteration () { } } function createTimingString (ms: number): string { return ms.toString() + "ms (" + (ms / 1000).toFixed(1) + "s)"; } function calcStdDev (all: number[], total: number): number { var avg = total / all.length; return Math.sqrt(all.reduce((agg, ms) => agg + Math.pow(ms - avg, 2), 0) / all.length); } export = StressTest;
{ var s = performance.now(); this.prepareIteration(); this.runIteration(); all.push(performance.now() - s); }
conditional_block
StressTest.ts
import ITestImpl = require('../ITestImpl'); class StressTest implements ITestImpl { run (runCount: number, onStatus: (status: any) => any, onOutput: (output: any) => any) { if (!this.prepare(() => this.$$finishRun(runCount, onStatus, onOutput))) return this.$$finishRun(runCount, onStatus, onOutput); } private $$finishRun (runCount: number, onStatus: (status: any) => any, onOutput: (output: any) => any) { var all: number[] = []; //Pre-run for (var i = 0; i < 5; i++) { this.prepareIteration(); this.runIteration(); } console.profile(); var start = performance.now(); for (var i = 0; i < runCount; i++) { var s = performance.now(); this.prepareIteration(); this.runIteration(); all.push(performance.now() - s); } var total = performance.now() - start; console.profileEnd(); var min = all.reduce((agg, ms) => Math.min(agg, ms), Number.POSITIVE_INFINITY); var max = all.reduce((agg, ms) => Math.max(agg, ms), Number.NEGATIVE_INFINITY); var sum = all.reduce((agg, ms) => agg + ms, 0); var avg = total / runCount; var sd = calcStdDev(all, sum); var status = [ "Iterations Complete: " + runCount.toString(), "Total Elapsed: " + createTimingString(total) ].join("<br />"); onStatus(status); var output = [ "Sum: " + createTimingString(sum), "Min: " + createTimingString(min), "Max: " + createTimingString(max), "Average: " + createTimingString(avg), "Std Dev: " + createTimingString(sd) ].join("<br />"); onOutput(output); } prepare (ready?: () => any): boolean { return false; } prepareIteration () { }
() { } } function createTimingString (ms: number): string { return ms.toString() + "ms (" + (ms / 1000).toFixed(1) + "s)"; } function calcStdDev (all: number[], total: number): number { var avg = total / all.length; return Math.sqrt(all.reduce((agg, ms) => agg + Math.pow(ms - avg, 2), 0) / all.length); } export = StressTest;
runIteration
identifier_name
StressTest.ts
import ITestImpl = require('../ITestImpl'); class StressTest implements ITestImpl { run (runCount: number, onStatus: (status: any) => any, onOutput: (output: any) => any) { if (!this.prepare(() => this.$$finishRun(runCount, onStatus, onOutput))) return this.$$finishRun(runCount, onStatus, onOutput); } private $$finishRun (runCount: number, onStatus: (status: any) => any, onOutput: (output: any) => any) { var all: number[] = []; //Pre-run for (var i = 0; i < 5; i++) { this.prepareIteration(); this.runIteration(); } console.profile(); var start = performance.now(); for (var i = 0; i < runCount; i++) { var s = performance.now(); this.prepareIteration(); this.runIteration(); all.push(performance.now() - s); } var total = performance.now() - start; console.profileEnd(); var min = all.reduce((agg, ms) => Math.min(agg, ms), Number.POSITIVE_INFINITY); var max = all.reduce((agg, ms) => Math.max(agg, ms), Number.NEGATIVE_INFINITY); var sum = all.reduce((agg, ms) => agg + ms, 0); var avg = total / runCount; var sd = calcStdDev(all, sum); var status = [ "Iterations Complete: " + runCount.toString(), "Total Elapsed: " + createTimingString(total) ].join("<br />"); onStatus(status); var output = [ "Sum: " + createTimingString(sum), "Min: " + createTimingString(min), "Max: " + createTimingString(max), "Average: " + createTimingString(avg), "Std Dev: " + createTimingString(sd) ].join("<br />"); onOutput(output); } prepare (ready?: () => any): boolean { return false; } prepareIteration () { } runIteration ()
} function createTimingString (ms: number): string { return ms.toString() + "ms (" + (ms / 1000).toFixed(1) + "s)"; } function calcStdDev (all: number[], total: number): number { var avg = total / all.length; return Math.sqrt(all.reduce((agg, ms) => agg + Math.pow(ms - avg, 2), 0) / all.length); } export = StressTest;
{ }
identifier_body
imp.rs
// Copyright (C) 2019 Guillaume Desmottes <[email protected]> // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use glib::subclass; use glib::subclass::prelude::*; use gst::subclass::prelude::*; use gst::{gst_debug, gst_element_error}; use gst_video::prelude::VideoDecoderExtManual; use gst_video::prelude::*; use gst_video::subclass::prelude::*; use image::GenericImageView; use once_cell::sync::Lazy; use std::sync::Mutex; use crate::constants::{CDG_HEIGHT, CDG_WIDTH}; static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| { gst::DebugCategory::new("cdgdec", gst::DebugColorFlags::empty(), Some("CDG decoder")) }); pub struct CdgDec { cdg_inter: Mutex<Box<cdg_renderer::CdgInterpreter>>, output_info: Mutex<Option<gst_video::VideoInfo>>, } impl ObjectSubclass for CdgDec { const NAME: &'static str = "CdgDec"; type Type = super::CdgDec; type ParentType = gst_video::VideoDecoder; type Instance = gst::subclass::ElementInstanceStruct<Self>; type Class = subclass::simple::ClassStruct<Self>; glib::glib_object_subclass!(); fn new() -> Self { Self { cdg_inter: Mutex::new(Box::new(cdg_renderer::CdgInterpreter::new())), output_info: Mutex::new(None), } } fn class_init(klass: &mut Self::Class) { klass.set_metadata(
"Decoder/Video", "CDG decoder", "Guillaume Desmottes <[email protected]>", ); let sink_caps = gst::Caps::new_simple("video/x-cdg", &[("parsed", &true)]); let sink_pad_template = gst::PadTemplate::new( "sink", gst::PadDirection::Sink, gst::PadPresence::Always, &sink_caps, ) .unwrap(); klass.add_pad_template(sink_pad_template); let src_caps = gst::Caps::new_simple( "video/x-raw", &[ ("format", &gst_video::VideoFormat::Rgba.to_str()), ("width", &(CDG_WIDTH as i32)), ("height", &(CDG_HEIGHT as i32)), ("framerate", &gst::Fraction::new(0, 1)), ], ); let src_pad_template = gst::PadTemplate::new( "src", gst::PadDirection::Src, gst::PadPresence::Always, &src_caps, ) .unwrap(); klass.add_pad_template(src_pad_template); } } impl ObjectImpl for CdgDec {} impl ElementImpl for CdgDec {} impl VideoDecoderImpl for CdgDec { fn start(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> { let mut out_info = self.output_info.lock().unwrap(); *out_info = None; self.parent_start(element) } fn stop(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> { { let mut cdg_inter = self.cdg_inter.lock().unwrap(); cdg_inter.reset(true); } self.parent_stop(element) } fn handle_frame( &self, element: &Self::Type, mut frame: gst_video::VideoCodecFrame, ) -> Result<gst::FlowSuccess, gst::FlowError> { { let mut out_info = self.output_info.lock().unwrap(); if out_info.is_none() { let output_state = element.set_output_state( gst_video::VideoFormat::Rgba, CDG_WIDTH, CDG_HEIGHT, None, )?; element.negotiate(output_state)?; let out_state = element.get_output_state().unwrap(); *out_info = Some(out_state.get_info()); } } let cmd = { let input = frame.get_input_buffer().unwrap(); let map = input.map_readable().map_err(|_| { gst_element_error!( element, gst::CoreError::Failed, ["Failed to map input buffer readable"] ); gst::FlowError::Error })?; let data = map.as_slice(); cdg::decode_subchannel_cmd(&data) }; let cmd = match cmd { Some(cmd) => cmd, None => { // Not a CDG command element.release_frame(frame); return Ok(gst::FlowSuccess::Ok); } }; let mut cdg_inter = self.cdg_inter.lock().unwrap(); cdg_inter.handle_cmd(cmd); element.allocate_output_frame(&mut frame, None)?; { let output = frame.get_output_buffer_mut().unwrap(); let info = self.output_info.lock().unwrap(); let mut out_frame = gst_video::VideoFrameRef::from_buffer_ref_writable(output, info.as_ref().unwrap()) .map_err(|_| { gst_element_error!( element, gst::CoreError::Failed, ["Failed to map output buffer writable"] ); gst::FlowError::Error })?; let out_stride = out_frame.plane_stride()[0] as usize; for (y, line) in out_frame .plane_data_mut(0) .unwrap() .chunks_exact_mut(out_stride) .take(CDG_HEIGHT as usize) .enumerate() { for (x, pixel) in line .chunks_exact_mut(4) .take(CDG_WIDTH as usize) .enumerate() { let p = cdg_inter.get_pixel(x as u32, y as u32); pixel.copy_from_slice(&p.0); } } } gst_debug!(CAT, obj: element, "Finish frame pts={}", frame.get_pts()); element.finish_frame(frame) } fn decide_allocation( &self, element: &Self::Type, query: &mut gst::QueryRef, ) -> Result<(), gst::ErrorMessage> { if let gst::query::QueryView::Allocation(allocation) = query.view() { if allocation .find_allocation_meta::<gst_video::VideoMeta>() .is_some() { let pools = allocation.get_allocation_pools(); if let Some((ref pool, _, _, _)) = pools.first() { if let Some(pool) = pool { let mut config = pool.get_config(); config.add_option(&gst_video::BUFFER_POOL_OPTION_VIDEO_META); pool.set_config(config).map_err(|e| { gst::gst_error_msg!(gst::CoreError::Negotiation, [&e.message]) })?; } } } } self.parent_decide_allocation(element, query) } fn flush(&self, element: &Self::Type) -> bool { gst_debug!(CAT, obj: element, "flushing, reset CDG interpreter"); let mut cdg_inter = self.cdg_inter.lock().unwrap(); cdg_inter.reset(false); true } }
"CDG decoder",
random_line_split
imp.rs
// Copyright (C) 2019 Guillaume Desmottes <[email protected]> // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use glib::subclass; use glib::subclass::prelude::*; use gst::subclass::prelude::*; use gst::{gst_debug, gst_element_error}; use gst_video::prelude::VideoDecoderExtManual; use gst_video::prelude::*; use gst_video::subclass::prelude::*; use image::GenericImageView; use once_cell::sync::Lazy; use std::sync::Mutex; use crate::constants::{CDG_HEIGHT, CDG_WIDTH}; static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| { gst::DebugCategory::new("cdgdec", gst::DebugColorFlags::empty(), Some("CDG decoder")) }); pub struct CdgDec { cdg_inter: Mutex<Box<cdg_renderer::CdgInterpreter>>, output_info: Mutex<Option<gst_video::VideoInfo>>, } impl ObjectSubclass for CdgDec { const NAME: &'static str = "CdgDec"; type Type = super::CdgDec; type ParentType = gst_video::VideoDecoder; type Instance = gst::subclass::ElementInstanceStruct<Self>; type Class = subclass::simple::ClassStruct<Self>; glib::glib_object_subclass!(); fn new() -> Self { Self { cdg_inter: Mutex::new(Box::new(cdg_renderer::CdgInterpreter::new())), output_info: Mutex::new(None), } } fn class_init(klass: &mut Self::Class) { klass.set_metadata( "CDG decoder", "Decoder/Video", "CDG decoder", "Guillaume Desmottes <[email protected]>", ); let sink_caps = gst::Caps::new_simple("video/x-cdg", &[("parsed", &true)]); let sink_pad_template = gst::PadTemplate::new( "sink", gst::PadDirection::Sink, gst::PadPresence::Always, &sink_caps, ) .unwrap(); klass.add_pad_template(sink_pad_template); let src_caps = gst::Caps::new_simple( "video/x-raw", &[ ("format", &gst_video::VideoFormat::Rgba.to_str()), ("width", &(CDG_WIDTH as i32)), ("height", &(CDG_HEIGHT as i32)), ("framerate", &gst::Fraction::new(0, 1)), ], ); let src_pad_template = gst::PadTemplate::new( "src", gst::PadDirection::Src, gst::PadPresence::Always, &src_caps, ) .unwrap(); klass.add_pad_template(src_pad_template); } } impl ObjectImpl for CdgDec {} impl ElementImpl for CdgDec {} impl VideoDecoderImpl for CdgDec { fn start(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> { let mut out_info = self.output_info.lock().unwrap(); *out_info = None; self.parent_start(element) } fn stop(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> { { let mut cdg_inter = self.cdg_inter.lock().unwrap(); cdg_inter.reset(true); } self.parent_stop(element) } fn handle_frame( &self, element: &Self::Type, mut frame: gst_video::VideoCodecFrame, ) -> Result<gst::FlowSuccess, gst::FlowError> { { let mut out_info = self.output_info.lock().unwrap(); if out_info.is_none() { let output_state = element.set_output_state( gst_video::VideoFormat::Rgba, CDG_WIDTH, CDG_HEIGHT, None, )?; element.negotiate(output_state)?; let out_state = element.get_output_state().unwrap(); *out_info = Some(out_state.get_info()); } } let cmd = { let input = frame.get_input_buffer().unwrap(); let map = input.map_readable().map_err(|_| { gst_element_error!( element, gst::CoreError::Failed, ["Failed to map input buffer readable"] ); gst::FlowError::Error })?; let data = map.as_slice(); cdg::decode_subchannel_cmd(&data) }; let cmd = match cmd { Some(cmd) => cmd, None => { // Not a CDG command element.release_frame(frame); return Ok(gst::FlowSuccess::Ok); } }; let mut cdg_inter = self.cdg_inter.lock().unwrap(); cdg_inter.handle_cmd(cmd); element.allocate_output_frame(&mut frame, None)?; { let output = frame.get_output_buffer_mut().unwrap(); let info = self.output_info.lock().unwrap(); let mut out_frame = gst_video::VideoFrameRef::from_buffer_ref_writable(output, info.as_ref().unwrap()) .map_err(|_| { gst_element_error!( element, gst::CoreError::Failed, ["Failed to map output buffer writable"] ); gst::FlowError::Error })?; let out_stride = out_frame.plane_stride()[0] as usize; for (y, line) in out_frame .plane_data_mut(0) .unwrap() .chunks_exact_mut(out_stride) .take(CDG_HEIGHT as usize) .enumerate() { for (x, pixel) in line .chunks_exact_mut(4) .take(CDG_WIDTH as usize) .enumerate() { let p = cdg_inter.get_pixel(x as u32, y as u32); pixel.copy_from_slice(&p.0); } } } gst_debug!(CAT, obj: element, "Finish frame pts={}", frame.get_pts()); element.finish_frame(frame) } fn decide_allocation( &self, element: &Self::Type, query: &mut gst::QueryRef, ) -> Result<(), gst::ErrorMessage> { if let gst::query::QueryView::Allocation(allocation) = query.view() { if allocation .find_allocation_meta::<gst_video::VideoMeta>() .is_some() { let pools = allocation.get_allocation_pools(); if let Some((ref pool, _, _, _)) = pools.first() { if let Some(pool) = pool { let mut config = pool.get_config(); config.add_option(&gst_video::BUFFER_POOL_OPTION_VIDEO_META); pool.set_config(config).map_err(|e| { gst::gst_error_msg!(gst::CoreError::Negotiation, [&e.message]) })?; } } } } self.parent_decide_allocation(element, query) } fn
(&self, element: &Self::Type) -> bool { gst_debug!(CAT, obj: element, "flushing, reset CDG interpreter"); let mut cdg_inter = self.cdg_inter.lock().unwrap(); cdg_inter.reset(false); true } }
flush
identifier_name
imp.rs
// Copyright (C) 2019 Guillaume Desmottes <[email protected]> // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use glib::subclass; use glib::subclass::prelude::*; use gst::subclass::prelude::*; use gst::{gst_debug, gst_element_error}; use gst_video::prelude::VideoDecoderExtManual; use gst_video::prelude::*; use gst_video::subclass::prelude::*; use image::GenericImageView; use once_cell::sync::Lazy; use std::sync::Mutex; use crate::constants::{CDG_HEIGHT, CDG_WIDTH}; static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| { gst::DebugCategory::new("cdgdec", gst::DebugColorFlags::empty(), Some("CDG decoder")) }); pub struct CdgDec { cdg_inter: Mutex<Box<cdg_renderer::CdgInterpreter>>, output_info: Mutex<Option<gst_video::VideoInfo>>, } impl ObjectSubclass for CdgDec { const NAME: &'static str = "CdgDec"; type Type = super::CdgDec; type ParentType = gst_video::VideoDecoder; type Instance = gst::subclass::ElementInstanceStruct<Self>; type Class = subclass::simple::ClassStruct<Self>; glib::glib_object_subclass!(); fn new() -> Self { Self { cdg_inter: Mutex::new(Box::new(cdg_renderer::CdgInterpreter::new())), output_info: Mutex::new(None), } } fn class_init(klass: &mut Self::Class)
&[ ("format", &gst_video::VideoFormat::Rgba.to_str()), ("width", &(CDG_WIDTH as i32)), ("height", &(CDG_HEIGHT as i32)), ("framerate", &gst::Fraction::new(0, 1)), ], ); let src_pad_template = gst::PadTemplate::new( "src", gst::PadDirection::Src, gst::PadPresence::Always, &src_caps, ) .unwrap(); klass.add_pad_template(src_pad_template); } } impl ObjectImpl for CdgDec {} impl ElementImpl for CdgDec {} impl VideoDecoderImpl for CdgDec { fn start(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> { let mut out_info = self.output_info.lock().unwrap(); *out_info = None; self.parent_start(element) } fn stop(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> { { let mut cdg_inter = self.cdg_inter.lock().unwrap(); cdg_inter.reset(true); } self.parent_stop(element) } fn handle_frame( &self, element: &Self::Type, mut frame: gst_video::VideoCodecFrame, ) -> Result<gst::FlowSuccess, gst::FlowError> { { let mut out_info = self.output_info.lock().unwrap(); if out_info.is_none() { let output_state = element.set_output_state( gst_video::VideoFormat::Rgba, CDG_WIDTH, CDG_HEIGHT, None, )?; element.negotiate(output_state)?; let out_state = element.get_output_state().unwrap(); *out_info = Some(out_state.get_info()); } } let cmd = { let input = frame.get_input_buffer().unwrap(); let map = input.map_readable().map_err(|_| { gst_element_error!( element, gst::CoreError::Failed, ["Failed to map input buffer readable"] ); gst::FlowError::Error })?; let data = map.as_slice(); cdg::decode_subchannel_cmd(&data) }; let cmd = match cmd { Some(cmd) => cmd, None => { // Not a CDG command element.release_frame(frame); return Ok(gst::FlowSuccess::Ok); } }; let mut cdg_inter = self.cdg_inter.lock().unwrap(); cdg_inter.handle_cmd(cmd); element.allocate_output_frame(&mut frame, None)?; { let output = frame.get_output_buffer_mut().unwrap(); let info = self.output_info.lock().unwrap(); let mut out_frame = gst_video::VideoFrameRef::from_buffer_ref_writable(output, info.as_ref().unwrap()) .map_err(|_| { gst_element_error!( element, gst::CoreError::Failed, ["Failed to map output buffer writable"] ); gst::FlowError::Error })?; let out_stride = out_frame.plane_stride()[0] as usize; for (y, line) in out_frame .plane_data_mut(0) .unwrap() .chunks_exact_mut(out_stride) .take(CDG_HEIGHT as usize) .enumerate() { for (x, pixel) in line .chunks_exact_mut(4) .take(CDG_WIDTH as usize) .enumerate() { let p = cdg_inter.get_pixel(x as u32, y as u32); pixel.copy_from_slice(&p.0); } } } gst_debug!(CAT, obj: element, "Finish frame pts={}", frame.get_pts()); element.finish_frame(frame) } fn decide_allocation( &self, element: &Self::Type, query: &mut gst::QueryRef, ) -> Result<(), gst::ErrorMessage> { if let gst::query::QueryView::Allocation(allocation) = query.view() { if allocation .find_allocation_meta::<gst_video::VideoMeta>() .is_some() { let pools = allocation.get_allocation_pools(); if let Some((ref pool, _, _, _)) = pools.first() { if let Some(pool) = pool { let mut config = pool.get_config(); config.add_option(&gst_video::BUFFER_POOL_OPTION_VIDEO_META); pool.set_config(config).map_err(|e| { gst::gst_error_msg!(gst::CoreError::Negotiation, [&e.message]) })?; } } } } self.parent_decide_allocation(element, query) } fn flush(&self, element: &Self::Type) -> bool { gst_debug!(CAT, obj: element, "flushing, reset CDG interpreter"); let mut cdg_inter = self.cdg_inter.lock().unwrap(); cdg_inter.reset(false); true } }
{ klass.set_metadata( "CDG decoder", "Decoder/Video", "CDG decoder", "Guillaume Desmottes <[email protected]>", ); let sink_caps = gst::Caps::new_simple("video/x-cdg", &[("parsed", &true)]); let sink_pad_template = gst::PadTemplate::new( "sink", gst::PadDirection::Sink, gst::PadPresence::Always, &sink_caps, ) .unwrap(); klass.add_pad_template(sink_pad_template); let src_caps = gst::Caps::new_simple( "video/x-raw",
identifier_body
projectStore.ts
import { createSlice, PayloadAction } from '@reduxjs/toolkit' import { File } from '../../types' import { fetchProject } from './projectActions' import { NormalizedProjectStore } from './ProjectStoreTypes' const initialState: NormalizedProjectStore = { entities: { projects: {}, files: {}, }, ids: [], } export const projectSlice = createSlice({ name: 'projects', initialState, reducers: { updateProjectFiles: ( state,
extraReducers: (builder) => { builder.addCase(fetchProject.fulfilled, (state, { payload }) => { state.entities = payload.entities state.ids = typeof payload.result === 'string' ? [payload.result] : payload.result }) }, }) export const projectActions = projectSlice.actions export type ProjectActions = typeof projectActions
{ payload }: PayloadAction<Record<string, File>> ) => { state.entities.files = payload }, },
random_line_split
base.py
# Peerz - P2P python library using ZeroMQ sockets and gevent # Copyright (C) 2014-2015 Steve Henderson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import time from transitions import Machine class MessageState(object): states = ['initialised', 'waiting response', 'complete', 'timedout']
def __init__(self, engine, txid, msg, callback=None, max_duration=5000, max_concurrency=3): self.engine = engine self.callback = callback self.machine = Machine(model=self, states=self.states, transitions=self.transitions, initial='initialised') self.start = self.last_change = time.time() * 1000 self.max_duration = max_duration self.max_concurrency = max_concurrency self.txid = txid self.times = {} self.parse_message(msg) self.query() def query(self): pass def parse_message(self, msg): self.val = msg.pop(0) def is_complete(self): return self.state in ['complete', 'timedout'] def pack_request(self): return None @staticmethod def unpack_response(content): return None @staticmethod def pack_response(content): return None def _update(self): now = time.time() * 1000 self.times.setdefault(self.state, 0.0) self.times[self.state] += (now - self.last_change) self.last_change = now def duration(self): return time.time() * 1000 - self.start def latency(self): return self.times.setdefault('waiting response', 0.0) def _send_query(self): pass def _completed(self): pass
transitions = [ {'trigger': 'query', 'source': 'initialised', 'dest': 'waiting response', 'before': '_update', 'after': '_send_query'}, {'trigger': 'response', 'source': 'waiting response', 'dest': 'complete', 'before': '_update', 'after': '_completed'}, {'trigger': 'timeout', 'source': '*', 'dest': 'timedout', 'before': '_update', 'after': '_completed', }, ]
random_line_split
base.py
# Peerz - P2P python library using ZeroMQ sockets and gevent # Copyright (C) 2014-2015 Steve Henderson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import time from transitions import Machine class MessageState(object): states = ['initialised', 'waiting response', 'complete', 'timedout'] transitions = [ {'trigger': 'query', 'source': 'initialised', 'dest': 'waiting response', 'before': '_update', 'after': '_send_query'}, {'trigger': 'response', 'source': 'waiting response', 'dest': 'complete', 'before': '_update', 'after': '_completed'}, {'trigger': 'timeout', 'source': '*', 'dest': 'timedout', 'before': '_update', 'after': '_completed', }, ] def __init__(self, engine, txid, msg, callback=None, max_duration=5000, max_concurrency=3): self.engine = engine self.callback = callback self.machine = Machine(model=self, states=self.states, transitions=self.transitions, initial='initialised') self.start = self.last_change = time.time() * 1000 self.max_duration = max_duration self.max_concurrency = max_concurrency self.txid = txid self.times = {} self.parse_message(msg) self.query() def query(self): pass def parse_message(self, msg): self.val = msg.pop(0) def is_complete(self): return self.state in ['complete', 'timedout'] def pack_request(self): return None @staticmethod def unpack_response(content): return None @staticmethod def pack_response(content): return None def
(self): now = time.time() * 1000 self.times.setdefault(self.state, 0.0) self.times[self.state] += (now - self.last_change) self.last_change = now def duration(self): return time.time() * 1000 - self.start def latency(self): return self.times.setdefault('waiting response', 0.0) def _send_query(self): pass def _completed(self): pass
_update
identifier_name
base.py
# Peerz - P2P python library using ZeroMQ sockets and gevent # Copyright (C) 2014-2015 Steve Henderson # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import time from transitions import Machine class MessageState(object): states = ['initialised', 'waiting response', 'complete', 'timedout'] transitions = [ {'trigger': 'query', 'source': 'initialised', 'dest': 'waiting response', 'before': '_update', 'after': '_send_query'}, {'trigger': 'response', 'source': 'waiting response', 'dest': 'complete', 'before': '_update', 'after': '_completed'}, {'trigger': 'timeout', 'source': '*', 'dest': 'timedout', 'before': '_update', 'after': '_completed', }, ] def __init__(self, engine, txid, msg, callback=None, max_duration=5000, max_concurrency=3): self.engine = engine self.callback = callback self.machine = Machine(model=self, states=self.states, transitions=self.transitions, initial='initialised') self.start = self.last_change = time.time() * 1000 self.max_duration = max_duration self.max_concurrency = max_concurrency self.txid = txid self.times = {} self.parse_message(msg) self.query() def query(self): pass def parse_message(self, msg): self.val = msg.pop(0) def is_complete(self):
def pack_request(self): return None @staticmethod def unpack_response(content): return None @staticmethod def pack_response(content): return None def _update(self): now = time.time() * 1000 self.times.setdefault(self.state, 0.0) self.times[self.state] += (now - self.last_change) self.last_change = now def duration(self): return time.time() * 1000 - self.start def latency(self): return self.times.setdefault('waiting response', 0.0) def _send_query(self): pass def _completed(self): pass
return self.state in ['complete', 'timedout']
identifier_body
url.rs
USVString}; use crate::dom::blob::Blob; use crate::dom::globalscope::GlobalScope; use crate::dom::urlhelper::UrlHelper; use crate::dom::urlsearchparams::URLSearchParams; use dom_struct::dom_struct; use net_traits::blob_url_store::{get_blob_origin, parse_blob_url}; use net_traits::filemanager_thread::FileManagerThreadMsg; use net_traits::{CoreResourceMsg, IpcSend}; use profile_traits::ipc; use servo_url::ServoUrl; use std::default::Default; use uuid::Uuid; // https://url.spec.whatwg.org/#url #[dom_struct] pub struct URL { reflector_: Reflector, // https://url.spec.whatwg.org/#concept-url-url url: DomRefCell<ServoUrl>, // https://url.spec.whatwg.org/#dom-url-searchparams search_params: MutNullableDom<URLSearchParams>, } impl URL { fn new_inherited(url: ServoUrl) -> URL { URL { reflector_: Reflector::new(), url: DomRefCell::new(url), search_params: Default::default(), } } pub fn new(global: &GlobalScope, url: ServoUrl) -> DomRoot<URL> { reflect_dom_object(Box::new(URL::new_inherited(url)), global, URLBinding::Wrap) } pub fn query_pairs(&self) -> Vec<(String, String)> { self.url .borrow() .as_url() .query_pairs() .into_owned() .collect() } pub fn set_query_pairs(&self, pairs: &[(String, String)]) { let mut url = self.url.borrow_mut(); if pairs.is_empty() { url.as_mut_url().set_query(None); } else { url.as_mut_url() .query_pairs_mut() .clear() .extend_pairs(pairs); } } } impl URL { // https://url.spec.whatwg.org/#constructors pub fn Constructor( global: &GlobalScope, url: USVString, base: Option<USVString>, ) -> Fallible<DomRoot<URL>> { let parsed_base = match base { None => { // Step 1. None }, Some(base) => // Step 2.1. { match ServoUrl::parse(&base.0) { Ok(base) => Some(base), Err(error) => { // Step 2.2. return Err(Error::Type(format!("could not parse base: {}", error))); }, } } }; // Step 3. let parsed_url = match ServoUrl::parse_with_base(parsed_base.as_ref(), &url.0) { Ok(url) => url, Err(error) => { // Step 4. return Err(Error::Type(format!("could not parse URL: {}", error))); }, }; // Step 5: Skip (see step 8 below). // Steps 6-7. let result = URL::new(global, parsed_url); // Step 8: Instead of construcing a new `URLSearchParams` object here, construct it // on-demand inside `URL::SearchParams`. // Step 9. Ok(result) } // https://w3c.github.io/FileAPI/#dfn-createObjectURL pub fn CreateObjectURL(global: &GlobalScope, blob: &Blob) -> DOMString { // XXX: Second field is an unicode-serialized Origin, it is a temporary workaround // and should not be trusted. See issue https://github.com/servo/servo/issues/11722 let origin = get_blob_origin(&global.get_url()); let id = blob.get_blob_url_id(); DOMString::from(URL::unicode_serialization_blob_url(&origin, &id)) } // https://w3c.github.io/FileAPI/#dfn-revokeObjectURL pub fn RevokeObjectURL(global: &GlobalScope, url: DOMString) { // If the value provided for the url argument is not a Blob URL OR // if the value provided for the url argument does not have an entry in the Blob URL Store, // this method call does nothing. User agents may display a message on the error console. let origin = get_blob_origin(&global.get_url()); if let Ok(url) = ServoUrl::parse(&url) { if let Ok((id, _)) = parse_blob_url(&url) { let resource_threads = global.resource_threads(); let (tx, rx) = ipc::channel(global.time_profiler_chan().clone()).unwrap(); let msg = FileManagerThreadMsg::RevokeBlobURL(id, origin, tx); let _ = resource_threads.send(CoreResourceMsg::ToFileManager(msg)); let _ = rx.recv().unwrap(); } } } // https://w3c.github.io/FileAPI/#unicodeSerializationOfBlobURL fn unicode_serialization_blob_url(origin: &str, id: &Uuid) -> String { // Step 1, 2 let mut result = "blob:".to_string(); // Step 3 result.push_str(origin); // Step 4 result.push('/'); // Step 5 result.push_str(&id.to_simple().to_string()); result } } impl URLMethods for URL { // https://url.spec.whatwg.org/#dom-url-hash fn Hash(&self) -> USVString { UrlHelper::Hash(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-hash fn SetHash(&self, value: USVString) { UrlHelper::SetHash(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-host fn Host(&self) -> USVString { UrlHelper::Host(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-host fn SetHost(&self, value: USVString) { UrlHelper::SetHost(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-hostname fn Hostname(&self) -> USVString { UrlHelper::Hostname(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-hostname fn SetHostname(&self, value: USVString) { UrlHelper::SetHostname(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-href fn Href(&self) -> USVString { UrlHelper::Href(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-href fn SetHref(&self, value: USVString) -> ErrorResult { match ServoUrl::parse(&value.0) { Ok(url) => { *self.url.borrow_mut() = url; self.search_params.set(None); // To be re-initialized in the SearchParams getter. Ok(()) }, Err(error) => Err(Error::Type(format!("could not parse URL: {}", error))), } } // https://url.spec.whatwg.org/#dom-url-password fn Password(&self) -> USVString { UrlHelper::Password(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-password fn SetPassword(&self, value: USVString) { UrlHelper::SetPassword(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-pathname fn Pathname(&self) -> USVString { UrlHelper::Pathname(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-pathname fn SetPathname(&self, value: USVString) { UrlHelper::SetPathname(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-port fn Port(&self) -> USVString { UrlHelper::Port(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-port fn SetPort(&self, value: USVString) { UrlHelper::SetPort(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-protocol fn Protocol(&self) -> USVString { UrlHelper::Protocol(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-protocol fn SetProtocol(&self, value: USVString) { UrlHelper::SetProtocol(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-origin fn Origin(&self) -> USVString { UrlHelper::Origin(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-search fn Search(&self) -> USVString { UrlHelper::Search(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-search fn SetSearch(&self, value: USVString) { UrlHelper::SetSearch(&mut self.url.borrow_mut(), value); if let Some(search_params) = self.search_params.get()
{ search_params.set_list(self.query_pairs()); }
conditional_block
url.rs
::bindings::reflector::{reflect_dom_object, DomObject, Reflector}; use crate::dom::bindings::root::{DomRoot, MutNullableDom}; use crate::dom::bindings::str::{DOMString, USVString}; use crate::dom::blob::Blob; use crate::dom::globalscope::GlobalScope; use crate::dom::urlhelper::UrlHelper; use crate::dom::urlsearchparams::URLSearchParams; use dom_struct::dom_struct; use net_traits::blob_url_store::{get_blob_origin, parse_blob_url}; use net_traits::filemanager_thread::FileManagerThreadMsg; use net_traits::{CoreResourceMsg, IpcSend}; use profile_traits::ipc; use servo_url::ServoUrl; use std::default::Default; use uuid::Uuid; // https://url.spec.whatwg.org/#url #[dom_struct] pub struct URL { reflector_: Reflector, // https://url.spec.whatwg.org/#concept-url-url url: DomRefCell<ServoUrl>, // https://url.spec.whatwg.org/#dom-url-searchparams search_params: MutNullableDom<URLSearchParams>, } impl URL { fn new_inherited(url: ServoUrl) -> URL { URL { reflector_: Reflector::new(), url: DomRefCell::new(url), search_params: Default::default(), } } pub fn new(global: &GlobalScope, url: ServoUrl) -> DomRoot<URL> { reflect_dom_object(Box::new(URL::new_inherited(url)), global, URLBinding::Wrap) } pub fn query_pairs(&self) -> Vec<(String, String)> { self.url .borrow() .as_url() .query_pairs() .into_owned() .collect() } pub fn set_query_pairs(&self, pairs: &[(String, String)]) { let mut url = self.url.borrow_mut(); if pairs.is_empty() { url.as_mut_url().set_query(None); } else { url.as_mut_url() .query_pairs_mut() .clear() .extend_pairs(pairs); } } } impl URL { // https://url.spec.whatwg.org/#constructors pub fn Constructor( global: &GlobalScope, url: USVString, base: Option<USVString>, ) -> Fallible<DomRoot<URL>> { let parsed_base = match base { None => { // Step 1. None }, Some(base) => // Step 2.1.
Err(error) => { // Step 2.2. return Err(Error::Type(format!("could not parse base: {}", error))); }, } } }; // Step 3. let parsed_url = match ServoUrl::parse_with_base(parsed_base.as_ref(), &url.0) { Ok(url) => url, Err(error) => { // Step 4. return Err(Error::Type(format!("could not parse URL: {}", error))); }, }; // Step 5: Skip (see step 8 below). // Steps 6-7. let result = URL::new(global, parsed_url); // Step 8: Instead of construcing a new `URLSearchParams` object here, construct it // on-demand inside `URL::SearchParams`. // Step 9. Ok(result) } // https://w3c.github.io/FileAPI/#dfn-createObjectURL pub fn CreateObjectURL(global: &GlobalScope, blob: &Blob) -> DOMString { // XXX: Second field is an unicode-serialized Origin, it is a temporary workaround // and should not be trusted. See issue https://github.com/servo/servo/issues/11722 let origin = get_blob_origin(&global.get_url()); let id = blob.get_blob_url_id(); DOMString::from(URL::unicode_serialization_blob_url(&origin, &id)) } // https://w3c.github.io/FileAPI/#dfn-revokeObjectURL pub fn RevokeObjectURL(global: &GlobalScope, url: DOMString) { // If the value provided for the url argument is not a Blob URL OR // if the value provided for the url argument does not have an entry in the Blob URL Store, // this method call does nothing. User agents may display a message on the error console. let origin = get_blob_origin(&global.get_url()); if let Ok(url) = ServoUrl::parse(&url) { if let Ok((id, _)) = parse_blob_url(&url) { let resource_threads = global.resource_threads(); let (tx, rx) = ipc::channel(global.time_profiler_chan().clone()).unwrap(); let msg = FileManagerThreadMsg::RevokeBlobURL(id, origin, tx); let _ = resource_threads.send(CoreResourceMsg::ToFileManager(msg)); let _ = rx.recv().unwrap(); } } } // https://w3c.github.io/FileAPI/#unicodeSerializationOfBlobURL fn unicode_serialization_blob_url(origin: &str, id: &Uuid) -> String { // Step 1, 2 let mut result = "blob:".to_string(); // Step 3 result.push_str(origin); // Step 4 result.push('/'); // Step 5 result.push_str(&id.to_simple().to_string()); result } } impl URLMethods for URL { // https://url.spec.whatwg.org/#dom-url-hash fn Hash(&self) -> USVString { UrlHelper::Hash(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-hash fn SetHash(&self, value: USVString) { UrlHelper::SetHash(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-host fn Host(&self) -> USVString { UrlHelper::Host(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-host fn SetHost(&self, value: USVString) { UrlHelper::SetHost(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-hostname fn Hostname(&self) -> USVString { UrlHelper::Hostname(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-hostname fn SetHostname(&self, value: USVString) { UrlHelper::SetHostname(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-href fn Href(&self) -> USVString { UrlHelper::Href(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-href fn SetHref(&self, value: USVString) -> ErrorResult { match ServoUrl::parse(&value.0) { Ok(url) => { *self.url.borrow_mut() = url; self.search_params.set(None); // To be re-initialized in the SearchParams getter. Ok(()) }, Err(error) => Err(Error::Type(format!("could not parse URL: {}", error))), } } // https://url.spec.whatwg.org/#dom-url-password fn Password(&self) -> USVString { UrlHelper::Password(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-password fn SetPassword(&self, value: USVString) { UrlHelper::SetPassword(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-pathname fn Pathname(&self) -> USVString { UrlHelper::Pathname(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-pathname fn SetPathname(&self, value: USVString) { UrlHelper::SetPathname(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-port fn Port(&self) -> USVString { UrlHelper::Port(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-port fn SetPort(&self, value: USVString) { UrlHelper::SetPort(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-protocol fn Protocol(&self) -> USVString { UrlHelper::Protocol(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-protocol fn SetProtocol(&self, value: USVString) { UrlHelper::SetProtocol(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-origin fn Origin(&self) -> USVString { UrlHelper::Origin(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-search fn Search(&self) -> USVString { UrlHelper::Search(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-search fn SetSearch(&self, value: USV
{ match ServoUrl::parse(&base.0) { Ok(base) => Some(base),
random_line_split
url.rs
::bindings::reflector::{reflect_dom_object, DomObject, Reflector}; use crate::dom::bindings::root::{DomRoot, MutNullableDom}; use crate::dom::bindings::str::{DOMString, USVString}; use crate::dom::blob::Blob; use crate::dom::globalscope::GlobalScope; use crate::dom::urlhelper::UrlHelper; use crate::dom::urlsearchparams::URLSearchParams; use dom_struct::dom_struct; use net_traits::blob_url_store::{get_blob_origin, parse_blob_url}; use net_traits::filemanager_thread::FileManagerThreadMsg; use net_traits::{CoreResourceMsg, IpcSend}; use profile_traits::ipc; use servo_url::ServoUrl; use std::default::Default; use uuid::Uuid; // https://url.spec.whatwg.org/#url #[dom_struct] pub struct URL { reflector_: Reflector, // https://url.spec.whatwg.org/#concept-url-url url: DomRefCell<ServoUrl>, // https://url.spec.whatwg.org/#dom-url-searchparams search_params: MutNullableDom<URLSearchParams>, } impl URL { fn new_inherited(url: ServoUrl) -> URL { URL { reflector_: Reflector::new(), url: DomRefCell::new(url), search_params: Default::default(), } } pub fn new(global: &GlobalScope, url: ServoUrl) -> DomRoot<URL> { reflect_dom_object(Box::new(URL::new_inherited(url)), global, URLBinding::Wrap) } pub fn query_pairs(&self) -> Vec<(String, String)> { self.url .borrow() .as_url() .query_pairs() .into_owned() .collect() } pub fn set_query_pairs(&self, pairs: &[(String, String)]) { let mut url = self.url.borrow_mut(); if pairs.is_empty() { url.as_mut_url().set_query(None); } else { url.as_mut_url() .query_pairs_mut() .clear() .extend_pairs(pairs); } } } impl URL { // https://url.spec.whatwg.org/#constructors pub fn Constructor( global: &GlobalScope, url: USVString, base: Option<USVString>, ) -> Fallible<DomRoot<URL>> { let parsed_base = match base { None => { // Step 1. None }, Some(base) => // Step 2.1. { match ServoUrl::parse(&base.0) { Ok(base) => Some(base), Err(error) => { // Step 2.2. return Err(Error::Type(format!("could not parse base: {}", error))); }, } } }; // Step 3. let parsed_url = match ServoUrl::parse_with_base(parsed_base.as_ref(), &url.0) { Ok(url) => url, Err(error) => { // Step 4. return Err(Error::Type(format!("could not parse URL: {}", error))); }, }; // Step 5: Skip (see step 8 below). // Steps 6-7. let result = URL::new(global, parsed_url); // Step 8: Instead of construcing a new `URLSearchParams` object here, construct it // on-demand inside `URL::SearchParams`. // Step 9. Ok(result) } // https://w3c.github.io/FileAPI/#dfn-createObjectURL pub fn CreateObjectURL(global: &GlobalScope, blob: &Blob) -> DOMString { // XXX: Second field is an unicode-serialized Origin, it is a temporary workaround // and should not be trusted. See issue https://github.com/servo/servo/issues/11722 let origin = get_blob_origin(&global.get_url()); let id = blob.get_blob_url_id(); DOMString::from(URL::unicode_serialization_blob_url(&origin, &id)) } // https://w3c.github.io/FileAPI/#dfn-revokeObjectURL pub fn RevokeObjectURL(global: &GlobalScope, url: DOMString) { // If the value provided for the url argument is not a Blob URL OR // if the value provided for the url argument does not have an entry in the Blob URL Store, // this method call does nothing. User agents may display a message on the error console. let origin = get_blob_origin(&global.get_url()); if let Ok(url) = ServoUrl::parse(&url) { if let Ok((id, _)) = parse_blob_url(&url) { let resource_threads = global.resource_threads(); let (tx, rx) = ipc::channel(global.time_profiler_chan().clone()).unwrap(); let msg = FileManagerThreadMsg::RevokeBlobURL(id, origin, tx); let _ = resource_threads.send(CoreResourceMsg::ToFileManager(msg)); let _ = rx.recv().unwrap(); } } } // https://w3c.github.io/FileAPI/#unicodeSerializationOfBlobURL fn unicode_serialization_blob_url(origin: &str, id: &Uuid) -> String { // Step 1, 2 let mut result = "blob:".to_string(); // Step 3 result.push_str(origin); // Step 4 result.push('/'); // Step 5 result.push_str(&id.to_simple().to_string()); result } } impl URLMethods for URL { // https://url.spec.whatwg.org/#dom-url-hash fn Hash(&self) -> USVString { UrlHelper::Hash(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-hash fn SetHash(&self, value: USVString) { UrlHelper::SetHash(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-host fn Host(&self) -> USVString { UrlHelper::Host(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-host fn
(&self, value: USVString) { UrlHelper::SetHost(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-hostname fn Hostname(&self) -> USVString { UrlHelper::Hostname(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-hostname fn SetHostname(&self, value: USVString) { UrlHelper::SetHostname(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-href fn Href(&self) -> USVString { UrlHelper::Href(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-href fn SetHref(&self, value: USVString) -> ErrorResult { match ServoUrl::parse(&value.0) { Ok(url) => { *self.url.borrow_mut() = url; self.search_params.set(None); // To be re-initialized in the SearchParams getter. Ok(()) }, Err(error) => Err(Error::Type(format!("could not parse URL: {}", error))), } } // https://url.spec.whatwg.org/#dom-url-password fn Password(&self) -> USVString { UrlHelper::Password(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-password fn SetPassword(&self, value: USVString) { UrlHelper::SetPassword(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-pathname fn Pathname(&self) -> USVString { UrlHelper::Pathname(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-pathname fn SetPathname(&self, value: USVString) { UrlHelper::SetPathname(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-port fn Port(&self) -> USVString { UrlHelper::Port(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-port fn SetPort(&self, value: USVString) { UrlHelper::SetPort(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-protocol fn Protocol(&self) -> USVString { UrlHelper::Protocol(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-protocol fn SetProtocol(&self, value: USVString) { UrlHelper::SetProtocol(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-origin fn Origin(&self) -> USVString { UrlHelper::Origin(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-search fn Search(&self) -> USVString { UrlHelper::Search(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-search fn SetSearch(&self, value: US
SetHost
identifier_name
url.rs
bindings::reflector::{reflect_dom_object, DomObject, Reflector}; use crate::dom::bindings::root::{DomRoot, MutNullableDom}; use crate::dom::bindings::str::{DOMString, USVString}; use crate::dom::blob::Blob; use crate::dom::globalscope::GlobalScope; use crate::dom::urlhelper::UrlHelper; use crate::dom::urlsearchparams::URLSearchParams; use dom_struct::dom_struct; use net_traits::blob_url_store::{get_blob_origin, parse_blob_url}; use net_traits::filemanager_thread::FileManagerThreadMsg; use net_traits::{CoreResourceMsg, IpcSend}; use profile_traits::ipc; use servo_url::ServoUrl; use std::default::Default; use uuid::Uuid; // https://url.spec.whatwg.org/#url #[dom_struct] pub struct URL { reflector_: Reflector, // https://url.spec.whatwg.org/#concept-url-url url: DomRefCell<ServoUrl>, // https://url.spec.whatwg.org/#dom-url-searchparams search_params: MutNullableDom<URLSearchParams>, } impl URL { fn new_inherited(url: ServoUrl) -> URL
pub fn new(global: &GlobalScope, url: ServoUrl) -> DomRoot<URL> { reflect_dom_object(Box::new(URL::new_inherited(url)), global, URLBinding::Wrap) } pub fn query_pairs(&self) -> Vec<(String, String)> { self.url .borrow() .as_url() .query_pairs() .into_owned() .collect() } pub fn set_query_pairs(&self, pairs: &[(String, String)]) { let mut url = self.url.borrow_mut(); if pairs.is_empty() { url.as_mut_url().set_query(None); } else { url.as_mut_url() .query_pairs_mut() .clear() .extend_pairs(pairs); } } } impl URL { // https://url.spec.whatwg.org/#constructors pub fn Constructor( global: &GlobalScope, url: USVString, base: Option<USVString>, ) -> Fallible<DomRoot<URL>> { let parsed_base = match base { None => { // Step 1. None }, Some(base) => // Step 2.1. { match ServoUrl::parse(&base.0) { Ok(base) => Some(base), Err(error) => { // Step 2.2. return Err(Error::Type(format!("could not parse base: {}", error))); }, } } }; // Step 3. let parsed_url = match ServoUrl::parse_with_base(parsed_base.as_ref(), &url.0) { Ok(url) => url, Err(error) => { // Step 4. return Err(Error::Type(format!("could not parse URL: {}", error))); }, }; // Step 5: Skip (see step 8 below). // Steps 6-7. let result = URL::new(global, parsed_url); // Step 8: Instead of construcing a new `URLSearchParams` object here, construct it // on-demand inside `URL::SearchParams`. // Step 9. Ok(result) } // https://w3c.github.io/FileAPI/#dfn-createObjectURL pub fn CreateObjectURL(global: &GlobalScope, blob: &Blob) -> DOMString { // XXX: Second field is an unicode-serialized Origin, it is a temporary workaround // and should not be trusted. See issue https://github.com/servo/servo/issues/11722 let origin = get_blob_origin(&global.get_url()); let id = blob.get_blob_url_id(); DOMString::from(URL::unicode_serialization_blob_url(&origin, &id)) } // https://w3c.github.io/FileAPI/#dfn-revokeObjectURL pub fn RevokeObjectURL(global: &GlobalScope, url: DOMString) { // If the value provided for the url argument is not a Blob URL OR // if the value provided for the url argument does not have an entry in the Blob URL Store, // this method call does nothing. User agents may display a message on the error console. let origin = get_blob_origin(&global.get_url()); if let Ok(url) = ServoUrl::parse(&url) { if let Ok((id, _)) = parse_blob_url(&url) { let resource_threads = global.resource_threads(); let (tx, rx) = ipc::channel(global.time_profiler_chan().clone()).unwrap(); let msg = FileManagerThreadMsg::RevokeBlobURL(id, origin, tx); let _ = resource_threads.send(CoreResourceMsg::ToFileManager(msg)); let _ = rx.recv().unwrap(); } } } // https://w3c.github.io/FileAPI/#unicodeSerializationOfBlobURL fn unicode_serialization_blob_url(origin: &str, id: &Uuid) -> String { // Step 1, 2 let mut result = "blob:".to_string(); // Step 3 result.push_str(origin); // Step 4 result.push('/'); // Step 5 result.push_str(&id.to_simple().to_string()); result } } impl URLMethods for URL { // https://url.spec.whatwg.org/#dom-url-hash fn Hash(&self) -> USVString { UrlHelper::Hash(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-hash fn SetHash(&self, value: USVString) { UrlHelper::SetHash(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-host fn Host(&self) -> USVString { UrlHelper::Host(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-host fn SetHost(&self, value: USVString) { UrlHelper::SetHost(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-hostname fn Hostname(&self) -> USVString { UrlHelper::Hostname(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-hostname fn SetHostname(&self, value: USVString) { UrlHelper::SetHostname(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-href fn Href(&self) -> USVString { UrlHelper::Href(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-href fn SetHref(&self, value: USVString) -> ErrorResult { match ServoUrl::parse(&value.0) { Ok(url) => { *self.url.borrow_mut() = url; self.search_params.set(None); // To be re-initialized in the SearchParams getter. Ok(()) }, Err(error) => Err(Error::Type(format!("could not parse URL: {}", error))), } } // https://url.spec.whatwg.org/#dom-url-password fn Password(&self) -> USVString { UrlHelper::Password(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-password fn SetPassword(&self, value: USVString) { UrlHelper::SetPassword(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-pathname fn Pathname(&self) -> USVString { UrlHelper::Pathname(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-pathname fn SetPathname(&self, value: USVString) { UrlHelper::SetPathname(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-port fn Port(&self) -> USVString { UrlHelper::Port(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-port fn SetPort(&self, value: USVString) { UrlHelper::SetPort(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-protocol fn Protocol(&self) -> USVString { UrlHelper::Protocol(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-protocol fn SetProtocol(&self, value: USVString) { UrlHelper::SetProtocol(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-origin fn Origin(&self) -> USVString { UrlHelper::Origin(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-search fn Search(&self) -> USVString { UrlHelper::Search(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-search fn SetSearch(&self, value: US
{ URL { reflector_: Reflector::new(), url: DomRefCell::new(url), search_params: Default::default(), } }
identifier_body
fetch-auth-token.ts
import { exec, execaCommand, IResponse } from './util'; import * as common from './'; import * as util from './util'; import config from './config'; const REFRESH_URL = `https://${config.AUTH0_DOMAIN}/delegation`; interface IAuth0Response { id_token: string; } export async function fetchToken( params: { refreshToken?: string } = {}, ): Promise<string | null> { const refreshToken = (params && params.refreshToken) || config.AUTH0_REFRESH_TOKEN; if (!refreshToken)
const payload = { client_id: config.AUTH0_CLIENT_ID, refresh_token: refreshToken, grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', api_type: 'app', }; let response: IResponse<IAuth0Response>; try { response = await util.http.post<IAuth0Response>(REFRESH_URL, payload); if (response.status === 200 && response.data) { return response.data.id_token; } else { util.log.error( `${response.status} Failed to refresh token. ${response.statusText}`, ); throw new Error('Failed to fetch token'); } } catch (error) { util.log.error(`Failed to refresh token. ${error.message}`); } return null; }
{ throw new Error('Need a refresh token. Is your .env set up correctly?'); }
conditional_block
fetch-auth-token.ts
import { exec, execaCommand, IResponse } from './util'; import * as common from './'; import * as util from './util'; import config from './config'; const REFRESH_URL = `https://${config.AUTH0_DOMAIN}/delegation`; interface IAuth0Response { id_token: string; } export async function fetchToken( params: { refreshToken?: string } = {}, ): Promise<string | null>
} else { util.log.error( `${response.status} Failed to refresh token. ${response.statusText}`, ); throw new Error('Failed to fetch token'); } } catch (error) { util.log.error(`Failed to refresh token. ${error.message}`); } return null; }
{ const refreshToken = (params && params.refreshToken) || config.AUTH0_REFRESH_TOKEN; if (!refreshToken) { throw new Error('Need a refresh token. Is your .env set up correctly?'); } const payload = { client_id: config.AUTH0_CLIENT_ID, refresh_token: refreshToken, grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', api_type: 'app', }; let response: IResponse<IAuth0Response>; try { response = await util.http.post<IAuth0Response>(REFRESH_URL, payload); if (response.status === 200 && response.data) { return response.data.id_token;
identifier_body
fetch-auth-token.ts
import { exec, execaCommand, IResponse } from './util'; import * as common from './'; import * as util from './util'; import config from './config'; const REFRESH_URL = `https://${config.AUTH0_DOMAIN}/delegation`; interface IAuth0Response { id_token: string; } export async function
( params: { refreshToken?: string } = {}, ): Promise<string | null> { const refreshToken = (params && params.refreshToken) || config.AUTH0_REFRESH_TOKEN; if (!refreshToken) { throw new Error('Need a refresh token. Is your .env set up correctly?'); } const payload = { client_id: config.AUTH0_CLIENT_ID, refresh_token: refreshToken, grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', api_type: 'app', }; let response: IResponse<IAuth0Response>; try { response = await util.http.post<IAuth0Response>(REFRESH_URL, payload); if (response.status === 200 && response.data) { return response.data.id_token; } else { util.log.error( `${response.status} Failed to refresh token. ${response.statusText}`, ); throw new Error('Failed to fetch token'); } } catch (error) { util.log.error(`Failed to refresh token. ${error.message}`); } return null; }
fetchToken
identifier_name
fetch-auth-token.ts
import { exec, execaCommand, IResponse } from './util'; import * as common from './'; import * as util from './util'; import config from './config'; const REFRESH_URL = `https://${config.AUTH0_DOMAIN}/delegation`; interface IAuth0Response { id_token: string; } export async function fetchToken( params: { refreshToken?: string } = {}, ): Promise<string | null> { const refreshToken = (params && params.refreshToken) || config.AUTH0_REFRESH_TOKEN; if (!refreshToken) { throw new Error('Need a refresh token. Is your .env set up correctly?'); }
client_id: config.AUTH0_CLIENT_ID, refresh_token: refreshToken, grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', api_type: 'app', }; let response: IResponse<IAuth0Response>; try { response = await util.http.post<IAuth0Response>(REFRESH_URL, payload); if (response.status === 200 && response.data) { return response.data.id_token; } else { util.log.error( `${response.status} Failed to refresh token. ${response.statusText}`, ); throw new Error('Failed to fetch token'); } } catch (error) { util.log.error(`Failed to refresh token. ${error.message}`); } return null; }
const payload = {
random_line_split
lot.py
# -*- coding: utf-8 -*- from openprocurement.auctions.core.utils import ( apply_patch, context_unpack, get_now, json_view, opresource, save_auction, ) from openprocurement.auctions.core.validation import ( validate_lot_data, validate_patch_lot_data, ) from openprocurement.auctions.core.views.mixins import AuctionLotResource @opresource(name='dgfOtherAssets:Auction Lots', collection_path='/auctions/{auction_id}/lots', path='/auctions/{auction_id}/lots/{lot_id}', auctionsprocurementMethodType="dgfOtherAssets", description="Auction lots") class AuctionLotResource(AuctionLotResource): @json_view(content_type="application/json", validators=(validate_lot_data,), permission='edit_auction') def collection_post(self): """Add a lot """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']: self.request.errors.add('body', 'data', 'Can\'t add lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return lot = self.request.validated['lot'] lot.date = get_now() auction.lots.append(lot) if save_auction(self.request): self.LOGGER.info('Created auction lot {}'.format(lot.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_create'}, {'lot_id': lot.id})) self.request.response.status = 201 route = self.request.matched_route.name.replace("collection_", "") self.request.response.headers['Location'] = self.request.current_route_url(_route_name=route, lot_id=lot.id, _query={}) return {'data': lot.serialize("view")} @json_view(content_type="application/json", validators=(validate_patch_lot_data,), permission='edit_auction') def
(self): """Update of lot """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']: self.request.errors.add('body', 'data', 'Can\'t update lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return if apply_patch(self.request, src=self.request.context.serialize()): self.LOGGER.info('Updated auction lot {}'.format(self.request.context.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_patch'})) return {'data': self.request.context.serialize("view")} @json_view(permission='edit_auction') def delete(self): """Lot deleting """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']: self.request.errors.add('body', 'data', 'Can\'t delete lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return lot = self.request.context res = lot.serialize("view") auction.lots.remove(lot) if save_auction(self.request): self.LOGGER.info('Deleted auction lot {}'.format(self.request.context.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_delete'})) return {'data': res}
patch
identifier_name
lot.py
# -*- coding: utf-8 -*- from openprocurement.auctions.core.utils import ( apply_patch, context_unpack, get_now, json_view, opresource, save_auction, ) from openprocurement.auctions.core.validation import ( validate_lot_data, validate_patch_lot_data, ) from openprocurement.auctions.core.views.mixins import AuctionLotResource @opresource(name='dgfOtherAssets:Auction Lots', collection_path='/auctions/{auction_id}/lots', path='/auctions/{auction_id}/lots/{lot_id}', auctionsprocurementMethodType="dgfOtherAssets", description="Auction lots") class AuctionLotResource(AuctionLotResource): @json_view(content_type="application/json", validators=(validate_lot_data,), permission='edit_auction') def collection_post(self): """Add a lot """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']: self.request.errors.add('body', 'data', 'Can\'t add lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return lot = self.request.validated['lot'] lot.date = get_now() auction.lots.append(lot) if save_auction(self.request): self.LOGGER.info('Created auction lot {}'.format(lot.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_create'}, {'lot_id': lot.id})) self.request.response.status = 201 route = self.request.matched_route.name.replace("collection_", "") self.request.response.headers['Location'] = self.request.current_route_url(_route_name=route, lot_id=lot.id, _query={}) return {'data': lot.serialize("view")} @json_view(content_type="application/json", validators=(validate_patch_lot_data,), permission='edit_auction') def patch(self): """Update of lot """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']: self.request.errors.add('body', 'data', 'Can\'t update lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return if apply_patch(self.request, src=self.request.context.serialize()): self.LOGGER.info('Updated auction lot {}'.format(self.request.context.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_patch'}))
return {'data': self.request.context.serialize("view")} @json_view(permission='edit_auction') def delete(self): """Lot deleting """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']: self.request.errors.add('body', 'data', 'Can\'t delete lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return lot = self.request.context res = lot.serialize("view") auction.lots.remove(lot) if save_auction(self.request): self.LOGGER.info('Deleted auction lot {}'.format(self.request.context.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_delete'})) return {'data': res}
random_line_split
lot.py
# -*- coding: utf-8 -*- from openprocurement.auctions.core.utils import ( apply_patch, context_unpack, get_now, json_view, opresource, save_auction, ) from openprocurement.auctions.core.validation import ( validate_lot_data, validate_patch_lot_data, ) from openprocurement.auctions.core.views.mixins import AuctionLotResource @opresource(name='dgfOtherAssets:Auction Lots', collection_path='/auctions/{auction_id}/lots', path='/auctions/{auction_id}/lots/{lot_id}', auctionsprocurementMethodType="dgfOtherAssets", description="Auction lots") class AuctionLotResource(AuctionLotResource): @json_view(content_type="application/json", validators=(validate_lot_data,), permission='edit_auction') def collection_post(self): """Add a lot """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']: self.request.errors.add('body', 'data', 'Can\'t add lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return lot = self.request.validated['lot'] lot.date = get_now() auction.lots.append(lot) if save_auction(self.request): self.LOGGER.info('Created auction lot {}'.format(lot.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_create'}, {'lot_id': lot.id})) self.request.response.status = 201 route = self.request.matched_route.name.replace("collection_", "") self.request.response.headers['Location'] = self.request.current_route_url(_route_name=route, lot_id=lot.id, _query={}) return {'data': lot.serialize("view")} @json_view(content_type="application/json", validators=(validate_patch_lot_data,), permission='edit_auction') def patch(self): """Update of lot """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']:
if apply_patch(self.request, src=self.request.context.serialize()): self.LOGGER.info('Updated auction lot {}'.format(self.request.context.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_patch'})) return {'data': self.request.context.serialize("view")} @json_view(permission='edit_auction') def delete(self): """Lot deleting """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']: self.request.errors.add('body', 'data', 'Can\'t delete lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return lot = self.request.context res = lot.serialize("view") auction.lots.remove(lot) if save_auction(self.request): self.LOGGER.info('Deleted auction lot {}'.format(self.request.context.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_delete'})) return {'data': res}
self.request.errors.add('body', 'data', 'Can\'t update lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return
conditional_block
lot.py
# -*- coding: utf-8 -*- from openprocurement.auctions.core.utils import ( apply_patch, context_unpack, get_now, json_view, opresource, save_auction, ) from openprocurement.auctions.core.validation import ( validate_lot_data, validate_patch_lot_data, ) from openprocurement.auctions.core.views.mixins import AuctionLotResource @opresource(name='dgfOtherAssets:Auction Lots', collection_path='/auctions/{auction_id}/lots', path='/auctions/{auction_id}/lots/{lot_id}', auctionsprocurementMethodType="dgfOtherAssets", description="Auction lots") class AuctionLotResource(AuctionLotResource): @json_view(content_type="application/json", validators=(validate_lot_data,), permission='edit_auction') def collection_post(self): """Add a lot """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']: self.request.errors.add('body', 'data', 'Can\'t add lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return lot = self.request.validated['lot'] lot.date = get_now() auction.lots.append(lot) if save_auction(self.request): self.LOGGER.info('Created auction lot {}'.format(lot.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_create'}, {'lot_id': lot.id})) self.request.response.status = 201 route = self.request.matched_route.name.replace("collection_", "") self.request.response.headers['Location'] = self.request.current_route_url(_route_name=route, lot_id=lot.id, _query={}) return {'data': lot.serialize("view")} @json_view(content_type="application/json", validators=(validate_patch_lot_data,), permission='edit_auction') def patch(self): """Update of lot """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']: self.request.errors.add('body', 'data', 'Can\'t update lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return if apply_patch(self.request, src=self.request.context.serialize()): self.LOGGER.info('Updated auction lot {}'.format(self.request.context.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_patch'})) return {'data': self.request.context.serialize("view")} @json_view(permission='edit_auction') def delete(self):
"""Lot deleting """ auction = self.request.validated['auction'] if auction.status not in ['active.tendering']: self.request.errors.add('body', 'data', 'Can\'t delete lot in current ({}) auction status'.format(auction.status)) self.request.errors.status = 403 return lot = self.request.context res = lot.serialize("view") auction.lots.remove(lot) if save_auction(self.request): self.LOGGER.info('Deleted auction lot {}'.format(self.request.context.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_delete'})) return {'data': res}
identifier_body
create_trace_graphviz.py
""" Read in the output from the trace-inputlocator script and create a GraphViz file. Pass as input the path to the yaml output of the trace-inputlocator script via config file. The output is written to the trace-inputlocator location. WHY? because the trace-inputlocator only has the GraphViz output of the last call to the script. This version re-creates the trace-data from the (merged) yaml file (the yaml output is merged if pre-existing in the output file). """ import yaml import cea.config from cea.tests.trace_inputlocator import create_graphviz_output def main(config):
if __name__ == '__main__': main(cea.config.Configuration())
with open(config.trace_inputlocator.yaml_output_file, 'r') as f: yaml_data = yaml.safe_load(f) trace_data = [] for script in yaml_data.keys(): for direction in ('input', 'output'): for locator, file in yaml_data[script][direction]: trace_data.append((direction, script, locator, file)) create_graphviz_output(trace_data, config.trace_inputlocator.graphviz_output_file)
identifier_body
create_trace_graphviz.py
""" Read in the output from the trace-inputlocator script and create a GraphViz file. Pass as input the path to the yaml output of the trace-inputlocator script via config file. The output is written to the trace-inputlocator location. WHY? because the trace-inputlocator only has the GraphViz output of the last call to the script. This version re-creates the trace-data from the (merged) yaml file (the yaml output is merged if pre-existing in the output file). """ import yaml import cea.config from cea.tests.trace_inputlocator import create_graphviz_output
trace_data = [] for script in yaml_data.keys(): for direction in ('input', 'output'): for locator, file in yaml_data[script][direction]: trace_data.append((direction, script, locator, file)) create_graphviz_output(trace_data, config.trace_inputlocator.graphviz_output_file) if __name__ == '__main__': main(cea.config.Configuration())
def main(config): with open(config.trace_inputlocator.yaml_output_file, 'r') as f: yaml_data = yaml.safe_load(f)
random_line_split
create_trace_graphviz.py
""" Read in the output from the trace-inputlocator script and create a GraphViz file. Pass as input the path to the yaml output of the trace-inputlocator script via config file. The output is written to the trace-inputlocator location. WHY? because the trace-inputlocator only has the GraphViz output of the last call to the script. This version re-creates the trace-data from the (merged) yaml file (the yaml output is merged if pre-existing in the output file). """ import yaml import cea.config from cea.tests.trace_inputlocator import create_graphviz_output def
(config): with open(config.trace_inputlocator.yaml_output_file, 'r') as f: yaml_data = yaml.safe_load(f) trace_data = [] for script in yaml_data.keys(): for direction in ('input', 'output'): for locator, file in yaml_data[script][direction]: trace_data.append((direction, script, locator, file)) create_graphviz_output(trace_data, config.trace_inputlocator.graphviz_output_file) if __name__ == '__main__': main(cea.config.Configuration())
main
identifier_name
create_trace_graphviz.py
""" Read in the output from the trace-inputlocator script and create a GraphViz file. Pass as input the path to the yaml output of the trace-inputlocator script via config file. The output is written to the trace-inputlocator location. WHY? because the trace-inputlocator only has the GraphViz output of the last call to the script. This version re-creates the trace-data from the (merged) yaml file (the yaml output is merged if pre-existing in the output file). """ import yaml import cea.config from cea.tests.trace_inputlocator import create_graphviz_output def main(config): with open(config.trace_inputlocator.yaml_output_file, 'r') as f: yaml_data = yaml.safe_load(f) trace_data = [] for script in yaml_data.keys(): for direction in ('input', 'output'): for locator, file in yaml_data[script][direction]:
create_graphviz_output(trace_data, config.trace_inputlocator.graphviz_output_file) if __name__ == '__main__': main(cea.config.Configuration())
trace_data.append((direction, script, locator, file))
conditional_block
keyboardInteractiveAuthPanel.component.ts
import { Component, Input, Output, EventEmitter, ViewChild, ElementRef, ChangeDetectionStrategy } from '@angular/core' import { KeyboardInteractivePrompt } from '../session/ssh' @Component({ selector: 'keyboard-interactive-auth-panel', template: require('./keyboardInteractiveAuthPanel.component.pug'), styles: [require('./keyboardInteractiveAuthPanel.component.scss')], changeDetection: ChangeDetectionStrategy.OnPush, }) export class
{ @Input() prompt: KeyboardInteractivePrompt @Input() step = 0 @Output() done = new EventEmitter() @ViewChild('input') input: ElementRef isPassword (): boolean { return this.prompt.prompts[this.step].prompt.toLowerCase().includes('password') || !this.prompt.prompts[this.step].echo } previous (): void { if (this.step > 0) { this.step-- } this.input.nativeElement.focus() } next (): void { if (this.step === this.prompt.prompts.length - 1) { this.prompt.respond() this.done.emit() return } this.step++ this.input.nativeElement.focus() } }
KeyboardInteractiveAuthComponent
identifier_name
keyboardInteractiveAuthPanel.component.ts
import { Component, Input, Output, EventEmitter, ViewChild, ElementRef, ChangeDetectionStrategy } from '@angular/core' import { KeyboardInteractivePrompt } from '../session/ssh' @Component({ selector: 'keyboard-interactive-auth-panel', template: require('./keyboardInteractiveAuthPanel.component.pug'), styles: [require('./keyboardInteractiveAuthPanel.component.scss')], changeDetection: ChangeDetectionStrategy.OnPush, }) export class KeyboardInteractiveAuthComponent { @Input() prompt: KeyboardInteractivePrompt @Input() step = 0 @Output() done = new EventEmitter() @ViewChild('input') input: ElementRef isPassword (): boolean { return this.prompt.prompts[this.step].prompt.toLowerCase().includes('password') || !this.prompt.prompts[this.step].echo } previous (): void { if (this.step > 0)
this.input.nativeElement.focus() } next (): void { if (this.step === this.prompt.prompts.length - 1) { this.prompt.respond() this.done.emit() return } this.step++ this.input.nativeElement.focus() } }
{ this.step-- }
conditional_block
keyboardInteractiveAuthPanel.component.ts
import { Component, Input, Output, EventEmitter, ViewChild, ElementRef, ChangeDetectionStrategy } from '@angular/core' import { KeyboardInteractivePrompt } from '../session/ssh' @Component({
}) export class KeyboardInteractiveAuthComponent { @Input() prompt: KeyboardInteractivePrompt @Input() step = 0 @Output() done = new EventEmitter() @ViewChild('input') input: ElementRef isPassword (): boolean { return this.prompt.prompts[this.step].prompt.toLowerCase().includes('password') || !this.prompt.prompts[this.step].echo } previous (): void { if (this.step > 0) { this.step-- } this.input.nativeElement.focus() } next (): void { if (this.step === this.prompt.prompts.length - 1) { this.prompt.respond() this.done.emit() return } this.step++ this.input.nativeElement.focus() } }
selector: 'keyboard-interactive-auth-panel', template: require('./keyboardInteractiveAuthPanel.component.pug'), styles: [require('./keyboardInteractiveAuthPanel.component.scss')], changeDetection: ChangeDetectionStrategy.OnPush,
random_line_split
keyboardInteractiveAuthPanel.component.ts
import { Component, Input, Output, EventEmitter, ViewChild, ElementRef, ChangeDetectionStrategy } from '@angular/core' import { KeyboardInteractivePrompt } from '../session/ssh' @Component({ selector: 'keyboard-interactive-auth-panel', template: require('./keyboardInteractiveAuthPanel.component.pug'), styles: [require('./keyboardInteractiveAuthPanel.component.scss')], changeDetection: ChangeDetectionStrategy.OnPush, }) export class KeyboardInteractiveAuthComponent { @Input() prompt: KeyboardInteractivePrompt @Input() step = 0 @Output() done = new EventEmitter() @ViewChild('input') input: ElementRef isPassword (): boolean { return this.prompt.prompts[this.step].prompt.toLowerCase().includes('password') || !this.prompt.prompts[this.step].echo } previous (): void
next (): void { if (this.step === this.prompt.prompts.length - 1) { this.prompt.respond() this.done.emit() return } this.step++ this.input.nativeElement.focus() } }
{ if (this.step > 0) { this.step-- } this.input.nativeElement.focus() }
identifier_body
mod.rs
//! Generators for file formats that can be derived from the intercom //! libraries. use std::collections::HashMap; use intercom::type_system::TypeSystemName; use intercom::typelib::{Interface, TypeInfo, TypeLib}; /// A common error type for all the generators. #[derive(Fail, Debug)] pub enum GeneratorError { #[fail(display = "IoError: {}", _0)] IoError(#[cause] ::std::io::Error), #[fail(display = "Invalid type library: {}", _0)] LibraryError(String), } impl From<::std::io::Error> for GeneratorError { fn from(e: ::std::io::Error) -> GeneratorError { GeneratorError::IoError(e) } } impl From<String> for GeneratorError { fn from(s: String) -> GeneratorError { GeneratorError::LibraryError(s) } } pub struct ModelOptions { pub type_systems: Vec<TypeSystemOptions>, } pub struct TypeSystemOptions { pub ts: TypeSystemName, pub use_full_name: bool, } pub struct LibraryContext<'a> { pub itfs_by_ref: HashMap<String, &'a Interface>, pub itfs_by_name: HashMap<String, &'a Interface>, } impl<'a> LibraryContext<'a> { fn try_from(lib: &'a TypeLib) -> Result<LibraryContext<'a>, GeneratorError> { let itfs_by_name: HashMap<String, &Interface> = lib .types .iter() .filter_map(|t| match t { TypeInfo::Interface(itf) => Some(itf), _ => None, }) .map(|itf| (itf.as_ref().name.to_string(), &**(itf.as_ref()))) .collect(); let itfs_by_ref: HashMap<String, &Interface> = lib .types .iter() .filter_map(|t| match t { TypeInfo::Class(cls) => Some(cls), _ => None, }) .flat_map(|cls| &cls.as_ref().interfaces) .map(|itf_ref| { ( itf_ref.name.to_string(), itfs_by_name[itf_ref.name.as_ref()], ) }) .collect(); Ok(LibraryContext { itfs_by_name, itfs_by_ref, }) } } /// Convert the Rust identifier from `snake_case` to `PascalCase` pub fn pascal_case<T: AsRef<str>>(input: T) -> String { let input = input.as_ref(); // Allocate the output string. We'll never increase the amount of // characters so we can reserve string buffer using the input string length. let mut output = String::new(); output.reserve(input.len()); // Process each character from the input. let mut capitalize = true; for c in input.chars() { // Check the capitalization requirement. if c == '_' { // Skip '_' but capitalize the following character. capitalize = true; } else if capitalize { // Capitalize. Add the uppercase characters. for c_up in c.to_uppercase() { output.push(c_up) } // No need to capitalize any more. capitalize = false; } else { // No need to capitalize. Just add the character as is. output.push(c); } } output }
pub mod idl;
pub mod cpp;
random_line_split
mod.rs
//! Generators for file formats that can be derived from the intercom //! libraries. use std::collections::HashMap; use intercom::type_system::TypeSystemName; use intercom::typelib::{Interface, TypeInfo, TypeLib}; /// A common error type for all the generators. #[derive(Fail, Debug)] pub enum GeneratorError { #[fail(display = "IoError: {}", _0)] IoError(#[cause] ::std::io::Error), #[fail(display = "Invalid type library: {}", _0)] LibraryError(String), } impl From<::std::io::Error> for GeneratorError { fn from(e: ::std::io::Error) -> GeneratorError { GeneratorError::IoError(e) } } impl From<String> for GeneratorError { fn from(s: String) -> GeneratorError { GeneratorError::LibraryError(s) } } pub struct ModelOptions { pub type_systems: Vec<TypeSystemOptions>, } pub struct TypeSystemOptions { pub ts: TypeSystemName, pub use_full_name: bool, } pub struct
<'a> { pub itfs_by_ref: HashMap<String, &'a Interface>, pub itfs_by_name: HashMap<String, &'a Interface>, } impl<'a> LibraryContext<'a> { fn try_from(lib: &'a TypeLib) -> Result<LibraryContext<'a>, GeneratorError> { let itfs_by_name: HashMap<String, &Interface> = lib .types .iter() .filter_map(|t| match t { TypeInfo::Interface(itf) => Some(itf), _ => None, }) .map(|itf| (itf.as_ref().name.to_string(), &**(itf.as_ref()))) .collect(); let itfs_by_ref: HashMap<String, &Interface> = lib .types .iter() .filter_map(|t| match t { TypeInfo::Class(cls) => Some(cls), _ => None, }) .flat_map(|cls| &cls.as_ref().interfaces) .map(|itf_ref| { ( itf_ref.name.to_string(), itfs_by_name[itf_ref.name.as_ref()], ) }) .collect(); Ok(LibraryContext { itfs_by_name, itfs_by_ref, }) } } /// Convert the Rust identifier from `snake_case` to `PascalCase` pub fn pascal_case<T: AsRef<str>>(input: T) -> String { let input = input.as_ref(); // Allocate the output string. We'll never increase the amount of // characters so we can reserve string buffer using the input string length. let mut output = String::new(); output.reserve(input.len()); // Process each character from the input. let mut capitalize = true; for c in input.chars() { // Check the capitalization requirement. if c == '_' { // Skip '_' but capitalize the following character. capitalize = true; } else if capitalize { // Capitalize. Add the uppercase characters. for c_up in c.to_uppercase() { output.push(c_up) } // No need to capitalize any more. capitalize = false; } else { // No need to capitalize. Just add the character as is. output.push(c); } } output } pub mod cpp; pub mod idl;
LibraryContext
identifier_name
mainSocket.js
$(function() { var FADE_TIME = 150; // ms var TYPING_TIMER_LENGTH = 400; // ms var COLORS = [ '#e21400', '#91580f', '#f8a700', '#f78b00', '#58dc00', '#287b00', '#a8f07a', '#4ae8c4', '#3b88eb', '#3824aa', '#a700ff', '#d300e7' ]; // Initialize variables var $window = $(window); var $usernameInput = $('.usernameInput'); // Input for username var $messages = $('.messages'); // Messages area var $inputMessage = $('.inputMessage'); // Input message input box var $loginPage = $('.login.page'); // The login page var $chatPage = $('.chat.page'); // The chatroom page // Prompt for setting a username var username; var connected = false; var typing = false; var lastTypingTime; var $currentInput = $usernameInput.focus(); var socket = io(); function addParticipantsMessage (data) { var message = ''; if (data.numUsers === 1) { message += "there's 1 participant"; } else { message += "there are " + data.numUsers + " participants"; } log(message); } // Sets the client's username function setUsername () { username = cleanInput($usernameInput.val().trim()); // If the username is valid if (username) { $loginPage.fadeOut(); $chatPage.show(); $loginPage.off('click'); $currentInput = $inputMessage.focus(); // Tell the server your username socket.emit('add user', username); } } // Sends a chat message function sendMessage () { var message = $inputMessage.val(); // Prevent markup from being injected into the message message = cleanInput(message); // if there is a non-empty message and a socket connection if (message && connected) { $inputMessage.val(''); addChatMessage({ username: username, message: message }); // tell server to execute 'new message' and send along one parameter socket.emit('new message', message); } } // Log a message function
(message, options) { var $el = $('<li>').addClass('log').text(message); addMessageElement($el, options); } // Adds the visual chat message to the message list function addChatMessage (data, options) { // Don't fade the message in if there is an 'X was typing' var $typingMessages = getTypingMessages(data); options = options || {}; if ($typingMessages.length !== 0) { options.fade = false; $typingMessages.remove(); } var $usernameDiv = $('<span class="username"/>') .text(data.username) .css('color', getUsernameColor(data.username)); var $messageBodyDiv = $('<span class="messageBody">') .text(data.message); var typingClass = data.typing ? 'typing' : ''; var $messageDiv = $('<li class="message"/>') .data('username', data.username) .addClass(typingClass) .append($usernameDiv, $messageBodyDiv); addMessageElement($messageDiv, options); } // Adds the visual chat typing message function addChatTyping (data) { data.typing = true; data.message = 'is typing'; addChatMessage(data); } // Removes the visual chat typing message function removeChatTyping (data) { getTypingMessages(data).fadeOut(function () { $(this).remove(); }); } // Adds a message element to the messages and scrolls to the bottom // el - The element to add as a message // options.fade - If the element should fade-in (default = true) // options.prepend - If the element should prepend // all other messages (default = false) function addMessageElement (el, options) { var $el = $(el); // Setup default options if (!options) { options = {}; } if (typeof options.fade === 'undefined') { options.fade = true; } if (typeof options.prepend === 'undefined') { options.prepend = false; } // Apply options if (options.fade) { $el.hide().fadeIn(FADE_TIME); } if (options.prepend) { $messages.prepend($el); } else { $messages.append($el); } $messages[0].scrollTop = $messages[0].scrollHeight; } // Prevents input from having injected markup function cleanInput (input) { return $('<div/>').text(input).text(); } // Updates the typing event function updateTyping () { if (connected) { if (!typing) { typing = true; socket.emit('typing'); } lastTypingTime = (new Date()).getTime(); setTimeout(function () { var typingTimer = (new Date()).getTime(); var timeDiff = typingTimer - lastTypingTime; if (timeDiff >= TYPING_TIMER_LENGTH && typing) { socket.emit('stop typing'); typing = false; } }, TYPING_TIMER_LENGTH); } } // Gets the 'X is typing' messages of a user function getTypingMessages (data) { return $('.typing.message').filter(function (i) { return $(this).data('username') === data.username; }); } // Gets the color of a username through our hash function function getUsernameColor (username) { // Compute hash code var hash = 7; for (var i = 0; i < username.length; i++) { hash = username.charCodeAt(i) + (hash << 5) - hash; } // Calculate color var index = Math.abs(hash % COLORS.length); return COLORS[index]; } // Keyboard events $window.keydown(function (event) { // Auto-focus the current input when a key is typed if (!(event.ctrlKey || event.metaKey || event.altKey)) { $currentInput.focus(); } // When the client hits ENTER on their keyboard if (event.which === 13) { if (username) { sendMessage(); socket.emit('stop typing'); typing = false; } else { setUsername(); } } }); $inputMessage.on('input', function() { updateTyping(); }); // Click events // Focus input when clicking anywhere on login page $loginPage.click(function () { $currentInput.focus(); }); // Focus input when clicking on the message input's border $inputMessage.click(function () { $inputMessage.focus(); }); // Socket events // Whenever the server emits 'login', log the login message socket.on('login', function (data) { connected = true; // Display the welcome message var message = "Welcome to Socket.IO Chat – "; log(message, { prepend: true }); addParticipantsMessage(data); }); // Whenever the server emits 'new message', update the chat body socket.on('new message', function (data) { addChatMessage(data); }); // Whenever the server emits 'user joined', log it in the chat body socket.on('user joined', function (data) { log(data.username + ' joined'); addParticipantsMessage(data); }); // Whenever the server emits 'user left', log it in the chat body socket.on('user left', function (data) { log(data.username + ' left'); addParticipantsMessage(data); removeChatTyping(data); }); // Whenever the server emits 'typing', show the typing message socket.on('typing', function (data) { addChatTyping(data); }); // Whenever the server emits 'stop typing', kill the typing message socket.on('stop typing', function (data) { removeChatTyping(data); }); socket.on('disconnect', function () { log('you have been disconnected'); }); socket.on('reconnect', function () { log('you have been reconnected'); if (username) { socket.emit('add user', username); } }); socket.on('reconnect_error', function () { log('attempt to reconnect has failed'); }); });
log
identifier_name
mainSocket.js
$(function() { var FADE_TIME = 150; // ms var TYPING_TIMER_LENGTH = 400; // ms var COLORS = [ '#e21400', '#91580f', '#f8a700', '#f78b00', '#58dc00', '#287b00', '#a8f07a', '#4ae8c4', '#3b88eb', '#3824aa', '#a700ff', '#d300e7' ]; // Initialize variables var $window = $(window); var $usernameInput = $('.usernameInput'); // Input for username var $messages = $('.messages'); // Messages area var $inputMessage = $('.inputMessage'); // Input message input box var $loginPage = $('.login.page'); // The login page var $chatPage = $('.chat.page'); // The chatroom page // Prompt for setting a username var username; var connected = false; var typing = false; var lastTypingTime; var $currentInput = $usernameInput.focus(); var socket = io(); function addParticipantsMessage (data) { var message = ''; if (data.numUsers === 1) { message += "there's 1 participant"; } else { message += "there are " + data.numUsers + " participants"; } log(message); } // Sets the client's username function setUsername () { username = cleanInput($usernameInput.val().trim()); // If the username is valid if (username) { $loginPage.fadeOut(); $chatPage.show(); $loginPage.off('click'); $currentInput = $inputMessage.focus(); // Tell the server your username socket.emit('add user', username); } } // Sends a chat message function sendMessage () { var message = $inputMessage.val(); // Prevent markup from being injected into the message message = cleanInput(message); // if there is a non-empty message and a socket connection if (message && connected) { $inputMessage.val(''); addChatMessage({ username: username, message: message }); // tell server to execute 'new message' and send along one parameter socket.emit('new message', message); } } // Log a message function log (message, options) { var $el = $('<li>').addClass('log').text(message); addMessageElement($el, options); } // Adds the visual chat message to the message list function addChatMessage (data, options) { // Don't fade the message in if there is an 'X was typing' var $typingMessages = getTypingMessages(data); options = options || {}; if ($typingMessages.length !== 0) { options.fade = false; $typingMessages.remove(); } var $usernameDiv = $('<span class="username"/>') .text(data.username) .css('color', getUsernameColor(data.username)); var $messageBodyDiv = $('<span class="messageBody">') .text(data.message); var typingClass = data.typing ? 'typing' : ''; var $messageDiv = $('<li class="message"/>') .data('username', data.username) .addClass(typingClass) .append($usernameDiv, $messageBodyDiv); addMessageElement($messageDiv, options); } // Adds the visual chat typing message function addChatTyping (data) { data.typing = true; data.message = 'is typing'; addChatMessage(data); } // Removes the visual chat typing message function removeChatTyping (data) { getTypingMessages(data).fadeOut(function () { $(this).remove(); }); } // Adds a message element to the messages and scrolls to the bottom // el - The element to add as a message // options.fade - If the element should fade-in (default = true) // options.prepend - If the element should prepend // all other messages (default = false) function addMessageElement (el, options) { var $el = $(el); // Setup default options if (!options) { options = {}; } if (typeof options.fade === 'undefined') { options.fade = true; } if (typeof options.prepend === 'undefined') { options.prepend = false; } // Apply options if (options.fade) { $el.hide().fadeIn(FADE_TIME); } if (options.prepend) { $messages.prepend($el); } else { $messages.append($el); } $messages[0].scrollTop = $messages[0].scrollHeight; } // Prevents input from having injected markup function cleanInput (input) { return $('<div/>').text(input).text(); } // Updates the typing event function updateTyping () { if (connected) { if (!typing) { typing = true; socket.emit('typing'); } lastTypingTime = (new Date()).getTime(); setTimeout(function () { var typingTimer = (new Date()).getTime(); var timeDiff = typingTimer - lastTypingTime; if (timeDiff >= TYPING_TIMER_LENGTH && typing) { socket.emit('stop typing'); typing = false; } }, TYPING_TIMER_LENGTH); } } // Gets the 'X is typing' messages of a user function getTypingMessages (data) { return $('.typing.message').filter(function (i) { return $(this).data('username') === data.username; }); } // Gets the color of a username through our hash function function getUsernameColor (username) { // Compute hash code var hash = 7; for (var i = 0; i < username.length; i++) { hash = username.charCodeAt(i) + (hash << 5) - hash; } // Calculate color var index = Math.abs(hash % COLORS.length); return COLORS[index]; } // Keyboard events $window.keydown(function (event) { // Auto-focus the current input when a key is typed if (!(event.ctrlKey || event.metaKey || event.altKey)) { $currentInput.focus(); } // When the client hits ENTER on their keyboard if (event.which === 13) { if (username) { sendMessage(); socket.emit('stop typing'); typing = false; } else { setUsername(); } } }); $inputMessage.on('input', function() { updateTyping(); }); // Click events // Focus input when clicking anywhere on login page $loginPage.click(function () { $currentInput.focus(); }); // Focus input when clicking on the message input's border $inputMessage.click(function () { $inputMessage.focus(); }); // Socket events // Whenever the server emits 'login', log the login message socket.on('login', function (data) { connected = true; // Display the welcome message var message = "Welcome to Socket.IO Chat – "; log(message, { prepend: true }); addParticipantsMessage(data); }); // Whenever the server emits 'new message', update the chat body socket.on('new message', function (data) { addChatMessage(data); }); // Whenever the server emits 'user joined', log it in the chat body socket.on('user joined', function (data) { log(data.username + ' joined'); addParticipantsMessage(data); }); // Whenever the server emits 'user left', log it in the chat body socket.on('user left', function (data) { log(data.username + ' left'); addParticipantsMessage(data); removeChatTyping(data); }); // Whenever the server emits 'typing', show the typing message socket.on('typing', function (data) { addChatTyping(data); }); // Whenever the server emits 'stop typing', kill the typing message socket.on('stop typing', function (data) { removeChatTyping(data); });
log('you have been disconnected'); }); socket.on('reconnect', function () { log('you have been reconnected'); if (username) { socket.emit('add user', username); } }); socket.on('reconnect_error', function () { log('attempt to reconnect has failed'); }); });
socket.on('disconnect', function () {
random_line_split
mainSocket.js
$(function() { var FADE_TIME = 150; // ms var TYPING_TIMER_LENGTH = 400; // ms var COLORS = [ '#e21400', '#91580f', '#f8a700', '#f78b00', '#58dc00', '#287b00', '#a8f07a', '#4ae8c4', '#3b88eb', '#3824aa', '#a700ff', '#d300e7' ]; // Initialize variables var $window = $(window); var $usernameInput = $('.usernameInput'); // Input for username var $messages = $('.messages'); // Messages area var $inputMessage = $('.inputMessage'); // Input message input box var $loginPage = $('.login.page'); // The login page var $chatPage = $('.chat.page'); // The chatroom page // Prompt for setting a username var username; var connected = false; var typing = false; var lastTypingTime; var $currentInput = $usernameInput.focus(); var socket = io(); function addParticipantsMessage (data) { var message = ''; if (data.numUsers === 1) { message += "there's 1 participant"; } else { message += "there are " + data.numUsers + " participants"; } log(message); } // Sets the client's username function setUsername () { username = cleanInput($usernameInput.val().trim()); // If the username is valid if (username)
} // Sends a chat message function sendMessage () { var message = $inputMessage.val(); // Prevent markup from being injected into the message message = cleanInput(message); // if there is a non-empty message and a socket connection if (message && connected) { $inputMessage.val(''); addChatMessage({ username: username, message: message }); // tell server to execute 'new message' and send along one parameter socket.emit('new message', message); } } // Log a message function log (message, options) { var $el = $('<li>').addClass('log').text(message); addMessageElement($el, options); } // Adds the visual chat message to the message list function addChatMessage (data, options) { // Don't fade the message in if there is an 'X was typing' var $typingMessages = getTypingMessages(data); options = options || {}; if ($typingMessages.length !== 0) { options.fade = false; $typingMessages.remove(); } var $usernameDiv = $('<span class="username"/>') .text(data.username) .css('color', getUsernameColor(data.username)); var $messageBodyDiv = $('<span class="messageBody">') .text(data.message); var typingClass = data.typing ? 'typing' : ''; var $messageDiv = $('<li class="message"/>') .data('username', data.username) .addClass(typingClass) .append($usernameDiv, $messageBodyDiv); addMessageElement($messageDiv, options); } // Adds the visual chat typing message function addChatTyping (data) { data.typing = true; data.message = 'is typing'; addChatMessage(data); } // Removes the visual chat typing message function removeChatTyping (data) { getTypingMessages(data).fadeOut(function () { $(this).remove(); }); } // Adds a message element to the messages and scrolls to the bottom // el - The element to add as a message // options.fade - If the element should fade-in (default = true) // options.prepend - If the element should prepend // all other messages (default = false) function addMessageElement (el, options) { var $el = $(el); // Setup default options if (!options) { options = {}; } if (typeof options.fade === 'undefined') { options.fade = true; } if (typeof options.prepend === 'undefined') { options.prepend = false; } // Apply options if (options.fade) { $el.hide().fadeIn(FADE_TIME); } if (options.prepend) { $messages.prepend($el); } else { $messages.append($el); } $messages[0].scrollTop = $messages[0].scrollHeight; } // Prevents input from having injected markup function cleanInput (input) { return $('<div/>').text(input).text(); } // Updates the typing event function updateTyping () { if (connected) { if (!typing) { typing = true; socket.emit('typing'); } lastTypingTime = (new Date()).getTime(); setTimeout(function () { var typingTimer = (new Date()).getTime(); var timeDiff = typingTimer - lastTypingTime; if (timeDiff >= TYPING_TIMER_LENGTH && typing) { socket.emit('stop typing'); typing = false; } }, TYPING_TIMER_LENGTH); } } // Gets the 'X is typing' messages of a user function getTypingMessages (data) { return $('.typing.message').filter(function (i) { return $(this).data('username') === data.username; }); } // Gets the color of a username through our hash function function getUsernameColor (username) { // Compute hash code var hash = 7; for (var i = 0; i < username.length; i++) { hash = username.charCodeAt(i) + (hash << 5) - hash; } // Calculate color var index = Math.abs(hash % COLORS.length); return COLORS[index]; } // Keyboard events $window.keydown(function (event) { // Auto-focus the current input when a key is typed if (!(event.ctrlKey || event.metaKey || event.altKey)) { $currentInput.focus(); } // When the client hits ENTER on their keyboard if (event.which === 13) { if (username) { sendMessage(); socket.emit('stop typing'); typing = false; } else { setUsername(); } } }); $inputMessage.on('input', function() { updateTyping(); }); // Click events // Focus input when clicking anywhere on login page $loginPage.click(function () { $currentInput.focus(); }); // Focus input when clicking on the message input's border $inputMessage.click(function () { $inputMessage.focus(); }); // Socket events // Whenever the server emits 'login', log the login message socket.on('login', function (data) { connected = true; // Display the welcome message var message = "Welcome to Socket.IO Chat – "; log(message, { prepend: true }); addParticipantsMessage(data); }); // Whenever the server emits 'new message', update the chat body socket.on('new message', function (data) { addChatMessage(data); }); // Whenever the server emits 'user joined', log it in the chat body socket.on('user joined', function (data) { log(data.username + ' joined'); addParticipantsMessage(data); }); // Whenever the server emits 'user left', log it in the chat body socket.on('user left', function (data) { log(data.username + ' left'); addParticipantsMessage(data); removeChatTyping(data); }); // Whenever the server emits 'typing', show the typing message socket.on('typing', function (data) { addChatTyping(data); }); // Whenever the server emits 'stop typing', kill the typing message socket.on('stop typing', function (data) { removeChatTyping(data); }); socket.on('disconnect', function () { log('you have been disconnected'); }); socket.on('reconnect', function () { log('you have been reconnected'); if (username) { socket.emit('add user', username); } }); socket.on('reconnect_error', function () { log('attempt to reconnect has failed'); }); });
{ $loginPage.fadeOut(); $chatPage.show(); $loginPage.off('click'); $currentInput = $inputMessage.focus(); // Tell the server your username socket.emit('add user', username); }
conditional_block
mainSocket.js
$(function() { var FADE_TIME = 150; // ms var TYPING_TIMER_LENGTH = 400; // ms var COLORS = [ '#e21400', '#91580f', '#f8a700', '#f78b00', '#58dc00', '#287b00', '#a8f07a', '#4ae8c4', '#3b88eb', '#3824aa', '#a700ff', '#d300e7' ]; // Initialize variables var $window = $(window); var $usernameInput = $('.usernameInput'); // Input for username var $messages = $('.messages'); // Messages area var $inputMessage = $('.inputMessage'); // Input message input box var $loginPage = $('.login.page'); // The login page var $chatPage = $('.chat.page'); // The chatroom page // Prompt for setting a username var username; var connected = false; var typing = false; var lastTypingTime; var $currentInput = $usernameInput.focus(); var socket = io(); function addParticipantsMessage (data) { var message = ''; if (data.numUsers === 1) { message += "there's 1 participant"; } else { message += "there are " + data.numUsers + " participants"; } log(message); } // Sets the client's username function setUsername () { username = cleanInput($usernameInput.val().trim()); // If the username is valid if (username) { $loginPage.fadeOut(); $chatPage.show(); $loginPage.off('click'); $currentInput = $inputMessage.focus(); // Tell the server your username socket.emit('add user', username); } } // Sends a chat message function sendMessage () { var message = $inputMessage.val(); // Prevent markup from being injected into the message message = cleanInput(message); // if there is a non-empty message and a socket connection if (message && connected) { $inputMessage.val(''); addChatMessage({ username: username, message: message }); // tell server to execute 'new message' and send along one parameter socket.emit('new message', message); } } // Log a message function log (message, options) { var $el = $('<li>').addClass('log').text(message); addMessageElement($el, options); } // Adds the visual chat message to the message list function addChatMessage (data, options) { // Don't fade the message in if there is an 'X was typing' var $typingMessages = getTypingMessages(data); options = options || {}; if ($typingMessages.length !== 0) { options.fade = false; $typingMessages.remove(); } var $usernameDiv = $('<span class="username"/>') .text(data.username) .css('color', getUsernameColor(data.username)); var $messageBodyDiv = $('<span class="messageBody">') .text(data.message); var typingClass = data.typing ? 'typing' : ''; var $messageDiv = $('<li class="message"/>') .data('username', data.username) .addClass(typingClass) .append($usernameDiv, $messageBodyDiv); addMessageElement($messageDiv, options); } // Adds the visual chat typing message function addChatTyping (data) { data.typing = true; data.message = 'is typing'; addChatMessage(data); } // Removes the visual chat typing message function removeChatTyping (data) { getTypingMessages(data).fadeOut(function () { $(this).remove(); }); } // Adds a message element to the messages and scrolls to the bottom // el - The element to add as a message // options.fade - If the element should fade-in (default = true) // options.prepend - If the element should prepend // all other messages (default = false) function addMessageElement (el, options) { var $el = $(el); // Setup default options if (!options) { options = {}; } if (typeof options.fade === 'undefined') { options.fade = true; } if (typeof options.prepend === 'undefined') { options.prepend = false; } // Apply options if (options.fade) { $el.hide().fadeIn(FADE_TIME); } if (options.prepend) { $messages.prepend($el); } else { $messages.append($el); } $messages[0].scrollTop = $messages[0].scrollHeight; } // Prevents input from having injected markup function cleanInput (input) { return $('<div/>').text(input).text(); } // Updates the typing event function updateTyping () { if (connected) { if (!typing) { typing = true; socket.emit('typing'); } lastTypingTime = (new Date()).getTime(); setTimeout(function () { var typingTimer = (new Date()).getTime(); var timeDiff = typingTimer - lastTypingTime; if (timeDiff >= TYPING_TIMER_LENGTH && typing) { socket.emit('stop typing'); typing = false; } }, TYPING_TIMER_LENGTH); } } // Gets the 'X is typing' messages of a user function getTypingMessages (data) { return $('.typing.message').filter(function (i) { return $(this).data('username') === data.username; }); } // Gets the color of a username through our hash function function getUsernameColor (username)
// Keyboard events $window.keydown(function (event) { // Auto-focus the current input when a key is typed if (!(event.ctrlKey || event.metaKey || event.altKey)) { $currentInput.focus(); } // When the client hits ENTER on their keyboard if (event.which === 13) { if (username) { sendMessage(); socket.emit('stop typing'); typing = false; } else { setUsername(); } } }); $inputMessage.on('input', function() { updateTyping(); }); // Click events // Focus input when clicking anywhere on login page $loginPage.click(function () { $currentInput.focus(); }); // Focus input when clicking on the message input's border $inputMessage.click(function () { $inputMessage.focus(); }); // Socket events // Whenever the server emits 'login', log the login message socket.on('login', function (data) { connected = true; // Display the welcome message var message = "Welcome to Socket.IO Chat – "; log(message, { prepend: true }); addParticipantsMessage(data); }); // Whenever the server emits 'new message', update the chat body socket.on('new message', function (data) { addChatMessage(data); }); // Whenever the server emits 'user joined', log it in the chat body socket.on('user joined', function (data) { log(data.username + ' joined'); addParticipantsMessage(data); }); // Whenever the server emits 'user left', log it in the chat body socket.on('user left', function (data) { log(data.username + ' left'); addParticipantsMessage(data); removeChatTyping(data); }); // Whenever the server emits 'typing', show the typing message socket.on('typing', function (data) { addChatTyping(data); }); // Whenever the server emits 'stop typing', kill the typing message socket.on('stop typing', function (data) { removeChatTyping(data); }); socket.on('disconnect', function () { log('you have been disconnected'); }); socket.on('reconnect', function () { log('you have been reconnected'); if (username) { socket.emit('add user', username); } }); socket.on('reconnect_error', function () { log('attempt to reconnect has failed'); }); });
{ // Compute hash code var hash = 7; for (var i = 0; i < username.length; i++) { hash = username.charCodeAt(i) + (hash << 5) - hash; } // Calculate color var index = Math.abs(hash % COLORS.length); return COLORS[index]; }
identifier_body
hDBSessionMaker.py
# create a Session object by sessionmaker import os import ConfigParser import sqlalchemy.orm # get path to taskmanager. it is assumed that this script is in the lib directory of # the taskmanager package. tmpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/..' ) ) etcpath = '%s/etc' % tmpath # for configuration files # library is in the same folder from hDatabase import Base class hDBSessionMaker( object ): def __init__( self, configFileName=None, createTables=False, echo=False ): if not configFileName: # use default config file
# read config file if os.path.exists( configFileName ): config = ConfigParser.ConfigParser() config.read( configFileName ) else: sys.stderr.write( "ERROR: Could not find Config file {c}!".format( c=configFileName) ) sys.exit( -1 ) databaseDialect = config.get( 'DATABASE', 'database_dialect' ) databaseHost = config.get( 'DATABASE', 'database_host' ) databasePort = config.get( 'DATABASE', 'database_port' ) databaseName = config.get( 'DATABASE', 'database_name' ) databaseUsername = config.get( 'DATABASE', 'database_username' ) databasePassword = config.get( 'DATABASE', 'database_password' ) ## @var engine #The engine that is connected to the database #use "echo=True" for SQL printing statements to stdout self.engine = sqlalchemy.create_engine( "{dialect}://{user}:{password}@{host}:{port}/{name}".format( dialect=databaseDialect, user=databaseUsername, password=databasePassword, host=databaseHost, port=databasePort, name=databaseName), pool_size=50, # number of connections to keep open inside the connection pool max_overflow=100, # number of connections to allow in connection pool "overflow", that is connections that can be opened above and beyond the pool_size setting, which defaults to five. pool_recycle=3600, # this setting causes the pool to recycle connections after the given number of seconds has passed. echo=False ) # Create all tables in the engine. This is equivalent to "Create Table" # statements in raw SQL. Base.metadata.create_all( self.engine ) ## @var DBsession # define a Session class which will serve as a factory for new Session objects # # http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html: # Session is a regular Python class which can be directly instantiated. However, to standardize how sessions are # configured and acquired, the sessionmaker class is normally used to create a top level Session configuration # which can then be used throughout an application without the need to repeat the configurational arguments. # sessionmaker() is a Session factory. A factory is just something that produces a new object when called. # # Thread local factory for sessions. See http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html#contextual-thread-local-sessions # SessionFactory = sqlalchemy.orm.sessionmaker( bind = self.engine ) self.DBSession = sqlalchemy.orm.scoped_session( SessionFactory )
etcpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/../etc' ) ) # default config file for database connection configFileName = "{etcPath}/serversettings.cfg".format(etcPath=etcpath)
conditional_block
hDBSessionMaker.py
# create a Session object by sessionmaker import os import ConfigParser import sqlalchemy.orm # get path to taskmanager. it is assumed that this script is in the lib directory of # the taskmanager package. tmpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/..' ) ) etcpath = '%s/etc' % tmpath # for configuration files # library is in the same folder from hDatabase import Base class hDBSessionMaker( object ): def __init__( self, configFileName=None, createTables=False, echo=False ): if not configFileName: # use default config file etcpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/../etc' ) ) # default config file for database connection configFileName = "{etcPath}/serversettings.cfg".format(etcPath=etcpath) # read config file if os.path.exists( configFileName ): config = ConfigParser.ConfigParser() config.read( configFileName ) else: sys.stderr.write( "ERROR: Could not find Config file {c}!".format( c=configFileName) ) sys.exit( -1 ) databaseDialect = config.get( 'DATABASE', 'database_dialect' ) databaseHost = config.get( 'DATABASE', 'database_host' ) databasePort = config.get( 'DATABASE', 'database_port' ) databaseName = config.get( 'DATABASE', 'database_name' ) databaseUsername = config.get( 'DATABASE', 'database_username' ) databasePassword = config.get( 'DATABASE', 'database_password' ) ## @var engine #The engine that is connected to the database #use "echo=True" for SQL printing statements to stdout self.engine = sqlalchemy.create_engine( "{dialect}://{user}:{password}@{host}:{port}/{name}".format( dialect=databaseDialect, user=databaseUsername, password=databasePassword,
pool_size=50, # number of connections to keep open inside the connection pool max_overflow=100, # number of connections to allow in connection pool "overflow", that is connections that can be opened above and beyond the pool_size setting, which defaults to five. pool_recycle=3600, # this setting causes the pool to recycle connections after the given number of seconds has passed. echo=False ) # Create all tables in the engine. This is equivalent to "Create Table" # statements in raw SQL. Base.metadata.create_all( self.engine ) ## @var DBsession # define a Session class which will serve as a factory for new Session objects # # http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html: # Session is a regular Python class which can be directly instantiated. However, to standardize how sessions are # configured and acquired, the sessionmaker class is normally used to create a top level Session configuration # which can then be used throughout an application without the need to repeat the configurational arguments. # sessionmaker() is a Session factory. A factory is just something that produces a new object when called. # # Thread local factory for sessions. See http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html#contextual-thread-local-sessions # SessionFactory = sqlalchemy.orm.sessionmaker( bind = self.engine ) self.DBSession = sqlalchemy.orm.scoped_session( SessionFactory )
host=databaseHost, port=databasePort, name=databaseName),
random_line_split
hDBSessionMaker.py
# create a Session object by sessionmaker import os import ConfigParser import sqlalchemy.orm # get path to taskmanager. it is assumed that this script is in the lib directory of # the taskmanager package. tmpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/..' ) ) etcpath = '%s/etc' % tmpath # for configuration files # library is in the same folder from hDatabase import Base class hDBSessionMaker( object ): def
( self, configFileName=None, createTables=False, echo=False ): if not configFileName: # use default config file etcpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/../etc' ) ) # default config file for database connection configFileName = "{etcPath}/serversettings.cfg".format(etcPath=etcpath) # read config file if os.path.exists( configFileName ): config = ConfigParser.ConfigParser() config.read( configFileName ) else: sys.stderr.write( "ERROR: Could not find Config file {c}!".format( c=configFileName) ) sys.exit( -1 ) databaseDialect = config.get( 'DATABASE', 'database_dialect' ) databaseHost = config.get( 'DATABASE', 'database_host' ) databasePort = config.get( 'DATABASE', 'database_port' ) databaseName = config.get( 'DATABASE', 'database_name' ) databaseUsername = config.get( 'DATABASE', 'database_username' ) databasePassword = config.get( 'DATABASE', 'database_password' ) ## @var engine #The engine that is connected to the database #use "echo=True" for SQL printing statements to stdout self.engine = sqlalchemy.create_engine( "{dialect}://{user}:{password}@{host}:{port}/{name}".format( dialect=databaseDialect, user=databaseUsername, password=databasePassword, host=databaseHost, port=databasePort, name=databaseName), pool_size=50, # number of connections to keep open inside the connection pool max_overflow=100, # number of connections to allow in connection pool "overflow", that is connections that can be opened above and beyond the pool_size setting, which defaults to five. pool_recycle=3600, # this setting causes the pool to recycle connections after the given number of seconds has passed. echo=False ) # Create all tables in the engine. This is equivalent to "Create Table" # statements in raw SQL. Base.metadata.create_all( self.engine ) ## @var DBsession # define a Session class which will serve as a factory for new Session objects # # http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html: # Session is a regular Python class which can be directly instantiated. However, to standardize how sessions are # configured and acquired, the sessionmaker class is normally used to create a top level Session configuration # which can then be used throughout an application without the need to repeat the configurational arguments. # sessionmaker() is a Session factory. A factory is just something that produces a new object when called. # # Thread local factory for sessions. See http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html#contextual-thread-local-sessions # SessionFactory = sqlalchemy.orm.sessionmaker( bind = self.engine ) self.DBSession = sqlalchemy.orm.scoped_session( SessionFactory )
__init__
identifier_name
hDBSessionMaker.py
# create a Session object by sessionmaker import os import ConfigParser import sqlalchemy.orm # get path to taskmanager. it is assumed that this script is in the lib directory of # the taskmanager package. tmpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/..' ) ) etcpath = '%s/etc' % tmpath # for configuration files # library is in the same folder from hDatabase import Base class hDBSessionMaker( object ): def __init__( self, configFileName=None, createTables=False, echo=False ):
databasePassword = config.get( 'DATABASE', 'database_password' ) ## @var engine #The engine that is connected to the database #use "echo=True" for SQL printing statements to stdout self.engine = sqlalchemy.create_engine( "{dialect}://{user}:{password}@{host}:{port}/{name}".format( dialect=databaseDialect, user=databaseUsername, password=databasePassword, host=databaseHost, port=databasePort, name=databaseName), pool_size=50, # number of connections to keep open inside the connection pool max_overflow=100, # number of connections to allow in connection pool "overflow", that is connections that can be opened above and beyond the pool_size setting, which defaults to five. pool_recycle=3600, # this setting causes the pool to recycle connections after the given number of seconds has passed. echo=False ) # Create all tables in the engine. This is equivalent to "Create Table" # statements in raw SQL. Base.metadata.create_all( self.engine ) ## @var DBsession # define a Session class which will serve as a factory for new Session objects # # http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html: # Session is a regular Python class which can be directly instantiated. However, to standardize how sessions are # configured and acquired, the sessionmaker class is normally used to create a top level Session configuration # which can then be used throughout an application without the need to repeat the configurational arguments. # sessionmaker() is a Session factory. A factory is just something that produces a new object when called. # # Thread local factory for sessions. See http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html#contextual-thread-local-sessions # SessionFactory = sqlalchemy.orm.sessionmaker( bind = self.engine ) self.DBSession = sqlalchemy.orm.scoped_session( SessionFactory )
if not configFileName: # use default config file etcpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/../etc' ) ) # default config file for database connection configFileName = "{etcPath}/serversettings.cfg".format(etcPath=etcpath) # read config file if os.path.exists( configFileName ): config = ConfigParser.ConfigParser() config.read( configFileName ) else: sys.stderr.write( "ERROR: Could not find Config file {c}!".format( c=configFileName) ) sys.exit( -1 ) databaseDialect = config.get( 'DATABASE', 'database_dialect' ) databaseHost = config.get( 'DATABASE', 'database_host' ) databasePort = config.get( 'DATABASE', 'database_port' ) databaseName = config.get( 'DATABASE', 'database_name' ) databaseUsername = config.get( 'DATABASE', 'database_username' )
identifier_body
index.js
import { createFilter, makeLegalIdentifier } from 'rollup-pluginutils'; export default function json(options = {})
if (Object.prototype.toString.call(data) !== '[object Object]') { code = `export default ${json};`; ast.body.push({ type: 'ExportDefaultDeclaration', start: 0, end: code.length, declaration: { type: 'Literal', start: 15, end: code.length - 1, value: null, raw: 'null' } }); } else { const indent = 'indent' in options ? options.indent : '\t'; const validKeys = []; const invalidKeys = []; Object.keys(data).forEach(key => { if (key === makeLegalIdentifier(key)) { validKeys.push(key); } else { invalidKeys.push(key); } }); let char = 0; validKeys.forEach(key => { const declarationType = options.preferConst ? 'const' : 'var'; const declaration = `export ${declarationType} ${key} = ${JSON.stringify(data[key])};`; const start = char; const end = start + declaration.length; // generate fake AST node while we're here ast.body.push({ type: 'ExportNamedDeclaration', start: char, end: char + declaration.length, declaration: { type: 'VariableDeclaration', start: start + 7, // 'export '.length end, declarations: [ { type: 'VariableDeclarator', start: start + 7 + declarationType.length + 1, // `export ${declarationType} `.length end: end - 1, id: { type: 'Identifier', start: start + 7 + declarationType.length + 1, // `export ${declarationType} `.length end: start + 7 + declarationType.length + 1 + key.length, // `export ${declarationType} ${key}`.length name: key }, init: { type: 'Literal', start: start + 7 + declarationType.length + 1 + key.length + 3, // `export ${declarationType} ${key} = `.length end: end - 1, value: null, raw: 'null' } } ], kind: declarationType }, specifiers: [], source: null }); char = end + 1; code += `${declaration}\n`; }); const defaultExportNode = { type: 'ExportDefaultDeclaration', start: char, end: null, declaration: { type: 'ObjectExpression', start: char + 15, end: null, properties: [] } }; char += 17 + indent.length; // 'export default {\n\t'.length' const defaultExportRows = validKeys .map(key => { const row = `${key}: ${key}`; const start = char; const end = start + row.length; defaultExportNode.declaration.properties.push({ type: 'Property', start, end, method: false, shorthand: false, computed: false, key: { type: 'Identifier', start, end: start + key.length, name: key }, value: { type: 'Identifier', start: start + key.length + 2, end, name: key }, kind: 'init' }); char += row.length + (2 + indent.length); // ',\n\t'.length return row; }) .concat( invalidKeys.map(key => `"${key}": ${JSON.stringify(data[key])}`) ); code += `export default {\n${indent}${defaultExportRows.join(`,\n${indent}`)}\n};`; ast.body.push(defaultExportNode); const end = code.length; defaultExportNode.declaration.end = end - 1; defaultExportNode.end = end; } ast.end = code.length; return { ast, code, map: { mappings: '' } }; } }; }
{ const filter = createFilter(options.include, options.exclude); return { name: 'json', transform(json, id) { if (id.slice(-5) !== '.json') return null; if (!filter(id)) return null; const data = JSON.parse(json); let code = ''; const ast = { type: 'Program', sourceType: 'module', start: 0, end: null, body: [] };
identifier_body
index.js
import { createFilter, makeLegalIdentifier } from 'rollup-pluginutils'; export default function
(options = {}) { const filter = createFilter(options.include, options.exclude); return { name: 'json', transform(json, id) { if (id.slice(-5) !== '.json') return null; if (!filter(id)) return null; const data = JSON.parse(json); let code = ''; const ast = { type: 'Program', sourceType: 'module', start: 0, end: null, body: [] }; if (Object.prototype.toString.call(data) !== '[object Object]') { code = `export default ${json};`; ast.body.push({ type: 'ExportDefaultDeclaration', start: 0, end: code.length, declaration: { type: 'Literal', start: 15, end: code.length - 1, value: null, raw: 'null' } }); } else { const indent = 'indent' in options ? options.indent : '\t'; const validKeys = []; const invalidKeys = []; Object.keys(data).forEach(key => { if (key === makeLegalIdentifier(key)) { validKeys.push(key); } else { invalidKeys.push(key); } }); let char = 0; validKeys.forEach(key => { const declarationType = options.preferConst ? 'const' : 'var'; const declaration = `export ${declarationType} ${key} = ${JSON.stringify(data[key])};`; const start = char; const end = start + declaration.length; // generate fake AST node while we're here ast.body.push({ type: 'ExportNamedDeclaration', start: char, end: char + declaration.length, declaration: { type: 'VariableDeclaration', start: start + 7, // 'export '.length end, declarations: [ { type: 'VariableDeclarator', start: start + 7 + declarationType.length + 1, // `export ${declarationType} `.length end: end - 1, id: { type: 'Identifier', start: start + 7 + declarationType.length + 1, // `export ${declarationType} `.length end: start + 7 + declarationType.length + 1 + key.length, // `export ${declarationType} ${key}`.length name: key }, init: { type: 'Literal', start: start + 7 + declarationType.length + 1 + key.length + 3, // `export ${declarationType} ${key} = `.length end: end - 1, value: null, raw: 'null' } } ], kind: declarationType }, specifiers: [], source: null }); char = end + 1; code += `${declaration}\n`; }); const defaultExportNode = { type: 'ExportDefaultDeclaration', start: char, end: null, declaration: { type: 'ObjectExpression', start: char + 15, end: null, properties: [] } }; char += 17 + indent.length; // 'export default {\n\t'.length' const defaultExportRows = validKeys .map(key => { const row = `${key}: ${key}`; const start = char; const end = start + row.length; defaultExportNode.declaration.properties.push({ type: 'Property', start, end, method: false, shorthand: false, computed: false, key: { type: 'Identifier', start, end: start + key.length, name: key }, value: { type: 'Identifier', start: start + key.length + 2, end, name: key }, kind: 'init' }); char += row.length + (2 + indent.length); // ',\n\t'.length return row; }) .concat( invalidKeys.map(key => `"${key}": ${JSON.stringify(data[key])}`) ); code += `export default {\n${indent}${defaultExportRows.join(`,\n${indent}`)}\n};`; ast.body.push(defaultExportNode); const end = code.length; defaultExportNode.declaration.end = end - 1; defaultExportNode.end = end; } ast.end = code.length; return { ast, code, map: { mappings: '' } }; } }; }
json
identifier_name