file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
day20.rs | extern crate clap;
extern crate regex;
use clap::App;
fn main() {
let matches = App::new("day20")
.version("v1.0")
.author("Andrew Rink <[email protected]>")
.args_from_usage("<NUM> 'Minimum present number'")
.get_matches();
let num = matches.value_of("NUM").unwrap().parse::<usize>().unwrap();
find_house_number(num);
find_house_number_part2(num);
}
fn find_house_number_part2(presents : usize) {
let sz = presents/10;
let mut v = vec![0; sz];
for i in 1..sz+1 {
let mut j = i;
let mut cnt = 0;
while j <= sz && cnt < 50 {
let entry = v.get_mut(j-1).unwrap();
*entry += i*11;
j += i;
cnt += 1;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 2: House {} received {} presents", i+1, e);
break;
}
}
}
fn find_house_number(presents : usize) {
let target = presents / 10;
let mut v = vec![0; target];
for i in 1..target+1 {
let mut j = i;
while j <= target {
let entry = v.get_mut(j-1).unwrap();
*entry += i*10;
j += i; |
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 1: House {} received {} presents", i+1, e);
break;
}
}
} | }
} | random_line_split |
day20.rs | extern crate clap;
extern crate regex;
use clap::App;
fn main() {
let matches = App::new("day20")
.version("v1.0")
.author("Andrew Rink <[email protected]>")
.args_from_usage("<NUM> 'Minimum present number'")
.get_matches();
let num = matches.value_of("NUM").unwrap().parse::<usize>().unwrap();
find_house_number(num);
find_house_number_part2(num);
}
fn find_house_number_part2(presents : usize) | break;
}
}
}
fn find_house_number(presents : usize) {
let target = presents / 10;
let mut v = vec![0; target];
for i in 1..target+1 {
let mut j = i;
while j <= target {
let entry = v.get_mut(j-1).unwrap();
*entry += i*10;
j += i;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 1: House {} received {} presents", i+1, e);
break;
}
}
}
| {
let sz = presents/10;
let mut v = vec![0; sz];
for i in 1..sz+1 {
let mut j = i;
let mut cnt = 0;
while j <= sz && cnt < 50 {
let entry = v.get_mut(j-1).unwrap();
*entry += i*11;
j += i;
cnt += 1;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 2: House {} received {} presents", i+1, e); | identifier_body |
day20.rs | extern crate clap;
extern crate regex;
use clap::App;
fn main() {
let matches = App::new("day20")
.version("v1.0")
.author("Andrew Rink <[email protected]>")
.args_from_usage("<NUM> 'Minimum present number'")
.get_matches();
let num = matches.value_of("NUM").unwrap().parse::<usize>().unwrap();
find_house_number(num);
find_house_number_part2(num);
}
fn | (presents : usize) {
let sz = presents/10;
let mut v = vec![0; sz];
for i in 1..sz+1 {
let mut j = i;
let mut cnt = 0;
while j <= sz && cnt < 50 {
let entry = v.get_mut(j-1).unwrap();
*entry += i*11;
j += i;
cnt += 1;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 2: House {} received {} presents", i+1, e);
break;
}
}
}
fn find_house_number(presents : usize) {
let target = presents / 10;
let mut v = vec![0; target];
for i in 1..target+1 {
let mut j = i;
while j <= target {
let entry = v.get_mut(j-1).unwrap();
*entry += i*10;
j += i;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 1: House {} received {} presents", i+1, e);
break;
}
}
}
| find_house_number_part2 | identifier_name |
day20.rs | extern crate clap;
extern crate regex;
use clap::App;
fn main() {
let matches = App::new("day20")
.version("v1.0")
.author("Andrew Rink <[email protected]>")
.args_from_usage("<NUM> 'Minimum present number'")
.get_matches();
let num = matches.value_of("NUM").unwrap().parse::<usize>().unwrap();
find_house_number(num);
find_house_number_part2(num);
}
fn find_house_number_part2(presents : usize) {
let sz = presents/10;
let mut v = vec![0; sz];
for i in 1..sz+1 {
let mut j = i;
let mut cnt = 0;
while j <= sz && cnt < 50 {
let entry = v.get_mut(j-1).unwrap();
*entry += i*11;
j += i;
cnt += 1;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents |
}
}
fn find_house_number(presents : usize) {
let target = presents / 10;
let mut v = vec![0; target];
for i in 1..target+1 {
let mut j = i;
while j <= target {
let entry = v.get_mut(j-1).unwrap();
*entry += i*10;
j += i;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 1: House {} received {} presents", i+1, e);
break;
}
}
}
| {
println!("Part 2: House {} received {} presents", i+1, e);
break;
} | conditional_block |
server_usage.py | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
authorize = extensions.soft_extension_authorizer('compute', 'server_usage')
class ServerUsageController(wsgi.Controller):
| server = resp_obj.obj['server']
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show' method.
self._extend_server(server, db_instance)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if authorize(context):
# Attach our slave template to the response object
resp_obj.attach(xml=ServerUsagesTemplate())
servers = list(resp_obj.obj['servers'])
for server in servers:
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'detail' method.
self._extend_server(server, db_instance)
class Server_usage(extensions.ExtensionDescriptor):
"""Adds launched_at and terminated_at on Servers."""
name = "ServerUsage"
alias = "OS-SRV-USG"
namespace = ("http://docs.openstack.org/compute/ext/"
"server_usage/api/v1.1")
updated = "2013-04-29T00:00:00Z"
def get_controller_extensions(self):
controller = ServerUsageController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
def make_server(elem):
elem.set('{%s}launched_at' % Server_usage.namespace,
'%s:launched_at' % Server_usage.alias)
elem.set('{%s}terminated_at' % Server_usage.namespace,
'%s:terminated_at' % Server_usage.alias)
class ServerUsageTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('server', selector='server')
make_server(root)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Server_usage.alias: Server_usage.namespace})
class ServerUsagesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('servers')
elem = xmlutil.SubTemplateElement(root, 'server', selector='servers')
make_server(elem)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Server_usage.alias: Server_usage.namespace})
| def __init__(self, *args, **kwargs):
super(ServerUsageController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
def _extend_server(self, server, instance):
for k in ['launched_at', 'terminated_at']:
key = "%s:%s" % (Server_usage.alias, k)
# NOTE(danms): Historically, this timestamp has been generated
# merely by grabbing str(datetime) of a TZ-naive object. The
# only way we can keep that with instance objects is to strip
# the tzinfo from the stamp and str() it.
server[key] = (instance[k].replace(tzinfo=None)
if instance[k] else None)
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if authorize(context):
# Attach our slave template to the response object
resp_obj.attach(xml=ServerUsageTemplate()) | identifier_body |
server_usage.py | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
authorize = extensions.soft_extension_authorizer('compute', 'server_usage')
class ServerUsageController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(ServerUsageController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
def _extend_server(self, server, instance):
for k in ['launched_at', 'terminated_at']:
key = "%s:%s" % (Server_usage.alias, k)
# NOTE(danms): Historically, this timestamp has been generated
# merely by grabbing str(datetime) of a TZ-naive object. The
# only way we can keep that with instance objects is to strip
# the tzinfo from the stamp and str() it.
server[key] = (instance[k].replace(tzinfo=None)
if instance[k] else None)
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if authorize(context):
# Attach our slave template to the response object
|
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if authorize(context):
# Attach our slave template to the response object
resp_obj.attach(xml=ServerUsagesTemplate())
servers = list(resp_obj.obj['servers'])
for server in servers:
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'detail' method.
self._extend_server(server, db_instance)
class Server_usage(extensions.ExtensionDescriptor):
"""Adds launched_at and terminated_at on Servers."""
name = "ServerUsage"
alias = "OS-SRV-USG"
namespace = ("http://docs.openstack.org/compute/ext/"
"server_usage/api/v1.1")
updated = "2013-04-29T00:00:00Z"
def get_controller_extensions(self):
controller = ServerUsageController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
def make_server(elem):
elem.set('{%s}launched_at' % Server_usage.namespace,
'%s:launched_at' % Server_usage.alias)
elem.set('{%s}terminated_at' % Server_usage.namespace,
'%s:terminated_at' % Server_usage.alias)
class ServerUsageTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('server', selector='server')
make_server(root)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Server_usage.alias: Server_usage.namespace})
class ServerUsagesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('servers')
elem = xmlutil.SubTemplateElement(root, 'server', selector='servers')
make_server(elem)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Server_usage.alias: Server_usage.namespace})
| resp_obj.attach(xml=ServerUsageTemplate())
server = resp_obj.obj['server']
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show' method.
self._extend_server(server, db_instance) | conditional_block |
server_usage.py | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
authorize = extensions.soft_extension_authorizer('compute', 'server_usage')
| self.compute_api = compute.API()
def _extend_server(self, server, instance):
for k in ['launched_at', 'terminated_at']:
key = "%s:%s" % (Server_usage.alias, k)
# NOTE(danms): Historically, this timestamp has been generated
# merely by grabbing str(datetime) of a TZ-naive object. The
# only way we can keep that with instance objects is to strip
# the tzinfo from the stamp and str() it.
server[key] = (instance[k].replace(tzinfo=None)
if instance[k] else None)
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if authorize(context):
# Attach our slave template to the response object
resp_obj.attach(xml=ServerUsageTemplate())
server = resp_obj.obj['server']
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show' method.
self._extend_server(server, db_instance)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if authorize(context):
# Attach our slave template to the response object
resp_obj.attach(xml=ServerUsagesTemplate())
servers = list(resp_obj.obj['servers'])
for server in servers:
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'detail' method.
self._extend_server(server, db_instance)
class Server_usage(extensions.ExtensionDescriptor):
"""Adds launched_at and terminated_at on Servers."""
name = "ServerUsage"
alias = "OS-SRV-USG"
namespace = ("http://docs.openstack.org/compute/ext/"
"server_usage/api/v1.1")
updated = "2013-04-29T00:00:00Z"
def get_controller_extensions(self):
controller = ServerUsageController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
def make_server(elem):
elem.set('{%s}launched_at' % Server_usage.namespace,
'%s:launched_at' % Server_usage.alias)
elem.set('{%s}terminated_at' % Server_usage.namespace,
'%s:terminated_at' % Server_usage.alias)
class ServerUsageTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('server', selector='server')
make_server(root)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Server_usage.alias: Server_usage.namespace})
class ServerUsagesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('servers')
elem = xmlutil.SubTemplateElement(root, 'server', selector='servers')
make_server(elem)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Server_usage.alias: Server_usage.namespace}) | class ServerUsageController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(ServerUsageController, self).__init__(*args, **kwargs) | random_line_split |
server_usage.py | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
authorize = extensions.soft_extension_authorizer('compute', 'server_usage')
class ServerUsageController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(ServerUsageController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
def _extend_server(self, server, instance):
for k in ['launched_at', 'terminated_at']:
key = "%s:%s" % (Server_usage.alias, k)
# NOTE(danms): Historically, this timestamp has been generated
# merely by grabbing str(datetime) of a TZ-naive object. The
# only way we can keep that with instance objects is to strip
# the tzinfo from the stamp and str() it.
server[key] = (instance[k].replace(tzinfo=None)
if instance[k] else None)
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if authorize(context):
# Attach our slave template to the response object
resp_obj.attach(xml=ServerUsageTemplate())
server = resp_obj.obj['server']
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show' method.
self._extend_server(server, db_instance)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if authorize(context):
# Attach our slave template to the response object
resp_obj.attach(xml=ServerUsagesTemplate())
servers = list(resp_obj.obj['servers'])
for server in servers:
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'detail' method.
self._extend_server(server, db_instance)
class Server_usage(extensions.ExtensionDescriptor):
"""Adds launched_at and terminated_at on Servers."""
name = "ServerUsage"
alias = "OS-SRV-USG"
namespace = ("http://docs.openstack.org/compute/ext/"
"server_usage/api/v1.1")
updated = "2013-04-29T00:00:00Z"
def get_controller_extensions(self):
controller = ServerUsageController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
def | (elem):
elem.set('{%s}launched_at' % Server_usage.namespace,
'%s:launched_at' % Server_usage.alias)
elem.set('{%s}terminated_at' % Server_usage.namespace,
'%s:terminated_at' % Server_usage.alias)
class ServerUsageTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('server', selector='server')
make_server(root)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Server_usage.alias: Server_usage.namespace})
class ServerUsagesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('servers')
elem = xmlutil.SubTemplateElement(root, 'server', selector='servers')
make_server(elem)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Server_usage.alias: Server_usage.namespace})
| make_server | identifier_name |
functions_66.js | var searchData=
[
['file',['File',['../classsol_1_1_file.html#adadcbaa4e64c50bf4416b77318c90b1f',1,'sol::File::File(const std::string &filename, const std::string &mode)'],['../classsol_1_1_file.html#a6ec6975155acae9a784ee39252ccd974',1,'sol::File::File()=default']]],
['fillbuffer',['fillBuffer',['../classsol_1_1_audio.html#aef8ec1c7845f21b9c99fbe5f5c91144b',1,'sol::Audio']]],
['filter',['Filter',['../classsol_1_1_filter.html#ae150f88a367fbac22875fdfe1473f5a9',1,'sol::Filter']]],
['flipflop',['flipflop',['../structsol_1_1_vec2.html#afb3995ae1b0be7c66e55556b796b68a0',1,'sol::Vec2']]], | ['framebuffer',['Framebuffer',['../classsol_1_1_framebuffer.html#a494d491b79efe568626b025624595da6',1,'sol::Framebuffer']]]
]; | random_line_split |
|
Matrix.page.ts | import { element, by, $, $$, ElementFinder, ElementArrayFinder, browser, ExpectedConditions as EC } from 'protractor';
import { FamilyImage } from './Components';
import { AbstractPage } from './Abstract.page';
export class MatrixPage {
static url = `${AbstractPage.url}/matrix`;
static filterByThing: ElementFinder = $('.things-filter-button-content');
static filterByCountry: ElementFinder = $('.countries-filter-button');
static thingLinkInSearch: ElementArrayFinder = $$('.thing-name');
static searchInFilterByThing: ElementFinder = $('input[placeholder*="things"]');
static thingNameOnFilter: ElementFinder = $$('.things-filter-button-content>span').first();
static familyLink: ElementArrayFinder = $$('matrix-images div[class*="image-content"]');
static familyImages: ElementArrayFinder = $$('matrix-images div[class*="image-content"] .cell-inner');
static placePagelink: ElementFinder = $('div[class*="mini-matrix-link"]');
static thingInFilter: ElementFinder = $('.thing-name');
static bigImageFromBigSection: ElementFinder = $('.view-image-container>img');
static homeLink: ElementFinder = $('.home-description-container>a[angulartics2on="click"]');
static getThingLinkInSearchInAllTopics: ElementFinder = $$('.other-things-content .thing-name').first();
static getFloatFooterText: ElementArrayFinder = $$('div[class*="float-footer"] span');
static getAngleUp: ElementFinder = $$('.back-to-top').first();
static hamburgerMenu: ElementFinder = $('span[class="menu-icon"]');
static maybeLaterBtnOnWelcomeHeader: ElementFinder = $$('div[class*="quick-guide"] button[type*="button"]').last();
static zoomIncrease: ElementFinder = $$('button .sign').first();
static zoomDecrease: ElementFinder = $$('button .sign').get(1);
static countryInFilter: ElementArrayFinder = $$('.name');
static okButtonInCountryFilter: ElementFinder = $('.ok-img');
static familyName: ElementFinder = $('.home-description-container>h3');
static previewCloseBtn: ElementFinder = $('.close-container');
static fancyPreview: ElementFinder = $('.fancyBox-image');
static spinner: ElementFinder = $('[class="load"]');
static imagesContainer: ElementFinder = $('.images-container .flex-container');
static visitThisHomeBtn: ElementFinder = $('.home-description-container > a:nth-child(4)'); // TODO add tests class
static allFamiliesInCountryBtn: ElementFinder = $('.home-description-container > a:nth-child(5)'); // TODO add tests class
static countryInImageDescription: ElementArrayFinder = $$('.place-image-box-country');
static minimap: ElementFinder = $('#map-content');
static photographerName: ElementFinder = $('.photographer-container a:nth-child(2)'); // TODO add test class
static familyIncomeOnImage: ElementArrayFinder = $$('.place-image-box-income');
static familyIncomeInPreview: ElementFinder = $('.matrix-view-block .header-container');
static getFamily(index = 0): FamilyImage {
this.waitForSpinner();
return new FamilyImage(this.url, index);
}
static async getAllImages(): Promise<FamilyImage[]> {
return (await this.familyLink.asElementFinders_()).map((family, i) => this.getFamily(i));
}
/**
* Embed feature
*/
static heartIconsOnImage: ElementArrayFinder = $$('matrix-images .heart-circle');
static pinHeader: ElementFinder = $('.pin-header');
static getThingLinkInSearch(thingNumber: number): ElementFinder {
return this.thingLinkInSearch.get(thingNumber);
}
static getLastThing(): ElementFinder {
return this.familyLink.last();
}
static getFilter(type: string): ElementFinder |
static getShareButtonInHamburgerMenu(social: string): ElementFinder {
return $(`main-menu div[class*="share-button ${social}"]`);
}
static async waitForSpinner(): Promise<{}> {
return browser.wait(EC.invisibilityOf(this.spinner), 10000);
}
static async getFamilyIncome(index: number): Promise<number> {
return this.familyIncomeOnImage
.get(index)
.getText()
.then(income => Number(income.replace(/\D/g, '')));
}
static async getFamilyIncomeFromPreviw(index: number): Promise<number> {
return this.familyIncomeInPreview
.get(index)
.getText()
.then(income => Number(income.replace(/\D/g, '')));
}
}
| {
return element(by.id(`${type}-filter`));
} | identifier_body |
Matrix.page.ts | import { element, by, $, $$, ElementFinder, ElementArrayFinder, browser, ExpectedConditions as EC } from 'protractor';
import { FamilyImage } from './Components';
import { AbstractPage } from './Abstract.page';
export class MatrixPage {
static url = `${AbstractPage.url}/matrix`;
static filterByThing: ElementFinder = $('.things-filter-button-content');
static filterByCountry: ElementFinder = $('.countries-filter-button');
static thingLinkInSearch: ElementArrayFinder = $$('.thing-name');
static searchInFilterByThing: ElementFinder = $('input[placeholder*="things"]');
static thingNameOnFilter: ElementFinder = $$('.things-filter-button-content>span').first();
static familyLink: ElementArrayFinder = $$('matrix-images div[class*="image-content"]');
static familyImages: ElementArrayFinder = $$('matrix-images div[class*="image-content"] .cell-inner');
static placePagelink: ElementFinder = $('div[class*="mini-matrix-link"]');
static thingInFilter: ElementFinder = $('.thing-name');
static bigImageFromBigSection: ElementFinder = $('.view-image-container>img');
static homeLink: ElementFinder = $('.home-description-container>a[angulartics2on="click"]');
static getThingLinkInSearchInAllTopics: ElementFinder = $$('.other-things-content .thing-name').first();
static getFloatFooterText: ElementArrayFinder = $$('div[class*="float-footer"] span');
static getAngleUp: ElementFinder = $$('.back-to-top').first();
static hamburgerMenu: ElementFinder = $('span[class="menu-icon"]');
static maybeLaterBtnOnWelcomeHeader: ElementFinder = $$('div[class*="quick-guide"] button[type*="button"]').last();
static zoomIncrease: ElementFinder = $$('button .sign').first();
static zoomDecrease: ElementFinder = $$('button .sign').get(1);
static countryInFilter: ElementArrayFinder = $$('.name');
static okButtonInCountryFilter: ElementFinder = $('.ok-img');
static familyName: ElementFinder = $('.home-description-container>h3');
static previewCloseBtn: ElementFinder = $('.close-container');
static fancyPreview: ElementFinder = $('.fancyBox-image');
static spinner: ElementFinder = $('[class="load"]');
static imagesContainer: ElementFinder = $('.images-container .flex-container');
static visitThisHomeBtn: ElementFinder = $('.home-description-container > a:nth-child(4)'); // TODO add tests class
static allFamiliesInCountryBtn: ElementFinder = $('.home-description-container > a:nth-child(5)'); // TODO add tests class
static countryInImageDescription: ElementArrayFinder = $$('.place-image-box-country');
static minimap: ElementFinder = $('#map-content');
static photographerName: ElementFinder = $('.photographer-container a:nth-child(2)'); // TODO add test class
static familyIncomeOnImage: ElementArrayFinder = $$('.place-image-box-income');
static familyIncomeInPreview: ElementFinder = $('.matrix-view-block .header-container');
static getFamily(index = 0): FamilyImage {
this.waitForSpinner();
return new FamilyImage(this.url, index);
}
static async getAllImages(): Promise<FamilyImage[]> {
return (await this.familyLink.asElementFinders_()).map((family, i) => this.getFamily(i));
}
/**
* Embed feature
*/
static heartIconsOnImage: ElementArrayFinder = $$('matrix-images .heart-circle');
static pinHeader: ElementFinder = $('.pin-header');
static getThingLinkInSearch(thingNumber: number): ElementFinder {
return this.thingLinkInSearch.get(thingNumber);
}
static getLastThing(): ElementFinder {
return this.familyLink.last();
}
static getFilter(type: string): ElementFinder {
return element(by.id(`${type}-filter`));
}
static getShareButtonInHamburgerMenu(social: string): ElementFinder {
return $(`main-menu div[class*="share-button ${social}"]`);
}
static async | (): Promise<{}> {
return browser.wait(EC.invisibilityOf(this.spinner), 10000);
}
static async getFamilyIncome(index: number): Promise<number> {
return this.familyIncomeOnImage
.get(index)
.getText()
.then(income => Number(income.replace(/\D/g, '')));
}
static async getFamilyIncomeFromPreviw(index: number): Promise<number> {
return this.familyIncomeInPreview
.get(index)
.getText()
.then(income => Number(income.replace(/\D/g, '')));
}
}
| waitForSpinner | identifier_name |
Matrix.page.ts | import { element, by, $, $$, ElementFinder, ElementArrayFinder, browser, ExpectedConditions as EC } from 'protractor';
import { FamilyImage } from './Components';
import { AbstractPage } from './Abstract.page';
export class MatrixPage {
static url = `${AbstractPage.url}/matrix`;
static filterByThing: ElementFinder = $('.things-filter-button-content');
static filterByCountry: ElementFinder = $('.countries-filter-button');
static thingLinkInSearch: ElementArrayFinder = $$('.thing-name');
static searchInFilterByThing: ElementFinder = $('input[placeholder*="things"]');
static thingNameOnFilter: ElementFinder = $$('.things-filter-button-content>span').first();
static familyLink: ElementArrayFinder = $$('matrix-images div[class*="image-content"]');
static familyImages: ElementArrayFinder = $$('matrix-images div[class*="image-content"] .cell-inner');
static placePagelink: ElementFinder = $('div[class*="mini-matrix-link"]');
static thingInFilter: ElementFinder = $('.thing-name');
static bigImageFromBigSection: ElementFinder = $('.view-image-container>img');
static homeLink: ElementFinder = $('.home-description-container>a[angulartics2on="click"]');
static getThingLinkInSearchInAllTopics: ElementFinder = $$('.other-things-content .thing-name').first();
static getFloatFooterText: ElementArrayFinder = $$('div[class*="float-footer"] span');
static getAngleUp: ElementFinder = $$('.back-to-top').first();
static hamburgerMenu: ElementFinder = $('span[class="menu-icon"]');
static maybeLaterBtnOnWelcomeHeader: ElementFinder = $$('div[class*="quick-guide"] button[type*="button"]').last();
static zoomIncrease: ElementFinder = $$('button .sign').first();
static zoomDecrease: ElementFinder = $$('button .sign').get(1);
static countryInFilter: ElementArrayFinder = $$('.name');
static okButtonInCountryFilter: ElementFinder = $('.ok-img');
static familyName: ElementFinder = $('.home-description-container>h3');
static previewCloseBtn: ElementFinder = $('.close-container');
static fancyPreview: ElementFinder = $('.fancyBox-image');
static spinner: ElementFinder = $('[class="load"]');
static imagesContainer: ElementFinder = $('.images-container .flex-container');
static visitThisHomeBtn: ElementFinder = $('.home-description-container > a:nth-child(4)'); // TODO add tests class
static allFamiliesInCountryBtn: ElementFinder = $('.home-description-container > a:nth-child(5)'); // TODO add tests class
static countryInImageDescription: ElementArrayFinder = $$('.place-image-box-country');
static minimap: ElementFinder = $('#map-content');
static photographerName: ElementFinder = $('.photographer-container a:nth-child(2)'); // TODO add test class
static familyIncomeOnImage: ElementArrayFinder = $$('.place-image-box-income');
static familyIncomeInPreview: ElementFinder = $('.matrix-view-block .header-container');
static getFamily(index = 0): FamilyImage {
this.waitForSpinner();
return new FamilyImage(this.url, index);
}
static async getAllImages(): Promise<FamilyImage[]> {
return (await this.familyLink.asElementFinders_()).map((family, i) => this.getFamily(i));
}
/**
* Embed feature
*/
static heartIconsOnImage: ElementArrayFinder = $$('matrix-images .heart-circle');
static pinHeader: ElementFinder = $('.pin-header');
static getThingLinkInSearch(thingNumber: number): ElementFinder {
return this.thingLinkInSearch.get(thingNumber);
}
static getLastThing(): ElementFinder {
return this.familyLink.last();
}
| }
static getShareButtonInHamburgerMenu(social: string): ElementFinder {
return $(`main-menu div[class*="share-button ${social}"]`);
}
static async waitForSpinner(): Promise<{}> {
return browser.wait(EC.invisibilityOf(this.spinner), 10000);
}
static async getFamilyIncome(index: number): Promise<number> {
return this.familyIncomeOnImage
.get(index)
.getText()
.then(income => Number(income.replace(/\D/g, '')));
}
static async getFamilyIncomeFromPreviw(index: number): Promise<number> {
return this.familyIncomeInPreview
.get(index)
.getText()
.then(income => Number(income.replace(/\D/g, '')));
}
} | static getFilter(type: string): ElementFinder {
return element(by.id(`${type}-filter`)); | random_line_split |
parser.rs | /* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use std;
fn | (input: &str) -> Result<u32, std::num::ParseIntError> {
input.parse::<u32>()
}
named!(pub parse_message<String>,
do_parse!(
len: map_res!(
map_res!(take_until!(":"), std::str::from_utf8), parse_len) >>
_sep: take!(1) >>
msg: take_str!(len) >>
(
msg.to_string()
)
));
#[cfg(test)]
mod tests {
use nom::*;
use super::*;
/// Simple test of some valid data.
#[test]
fn test_parse_valid() {
let buf = b"12:Hello World!4:Bye.";
let result = parse_message(buf);
match result {
Ok((remainder, message)) => {
// Check the first message.
assert_eq!(message, "Hello World!");
// And we should have 6 bytes left.
assert_eq!(remainder.len(), 6);
}
Err(Err::Incomplete(_)) => {
panic!("Result should not have been incomplete.");
}
Err(Err::Error(err)) |
Err(Err::Failure(err)) => {
panic!("Result should not be an error: {:?}.", err);
}
}
}
}
| parse_len | identifier_name |
parser.rs | /* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use std;
fn parse_len(input: &str) -> Result<u32, std::num::ParseIntError> {
input.parse::<u32>()
}
named!(pub parse_message<String>,
do_parse!(
len: map_res!(
map_res!(take_until!(":"), std::str::from_utf8), parse_len) >>
_sep: take!(1) >>
msg: take_str!(len) >>
(
msg.to_string()
)
));
#[cfg(test)]
mod tests {
use nom::*;
use super::*;
/// Simple test of some valid data.
#[test]
fn test_parse_valid() | }
}
| {
let buf = b"12:Hello World!4:Bye.";
let result = parse_message(buf);
match result {
Ok((remainder, message)) => {
// Check the first message.
assert_eq!(message, "Hello World!");
// And we should have 6 bytes left.
assert_eq!(remainder.len(), 6);
}
Err(Err::Incomplete(_)) => {
panic!("Result should not have been incomplete.");
}
Err(Err::Error(err)) |
Err(Err::Failure(err)) => {
panic!("Result should not be an error: {:?}.", err);
}
} | identifier_body |
parser.rs | /* Copyright (C) 2018 Open Information Security Foundation
* | * the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use std;
fn parse_len(input: &str) -> Result<u32, std::num::ParseIntError> {
input.parse::<u32>()
}
named!(pub parse_message<String>,
do_parse!(
len: map_res!(
map_res!(take_until!(":"), std::str::from_utf8), parse_len) >>
_sep: take!(1) >>
msg: take_str!(len) >>
(
msg.to_string()
)
));
#[cfg(test)]
mod tests {
use nom::*;
use super::*;
/// Simple test of some valid data.
#[test]
fn test_parse_valid() {
let buf = b"12:Hello World!4:Bye.";
let result = parse_message(buf);
match result {
Ok((remainder, message)) => {
// Check the first message.
assert_eq!(message, "Hello World!");
// And we should have 6 bytes left.
assert_eq!(remainder.len(), 6);
}
Err(Err::Incomplete(_)) => {
panic!("Result should not have been incomplete.");
}
Err(Err::Error(err)) |
Err(Err::Failure(err)) => {
panic!("Result should not be an error: {:?}.", err);
}
}
}
} | * You can copy, redistribute or modify this Program under the terms of | random_line_split |
mtphelper.js | "use strict";
//
// Copyright (c) 2017 Ricoh Company, Ltd. All Rights Reserved.
// See LICENSE for more information.
//
//console.log(exports, require, module, __filename, __dirname, process, global);
const MTPDevice = function(deviceId) {
this.deviceId = deviceId;
}
const MTPHelper = {
version: "1.0.0",
helper_version: "unknown",
child: null,
queue: null,
onDeviceRemoved: null, // (deviceId) => { ... }
onDeviceAdded: null, // (deviceId) => { ... }
stderr_proc: function(data0) {
for (var data of data0.split(/[\r\n]/)) {
console.log("mtphelper[stderr]: " + data);
if (data[0] === "{" && data[data.length-1] === "}") {
var e;
try {
e = JSON.parse(data);
} catch (x) {
console.log(x);
e = {}
}
switch (e.event) {
case "DeviceAdded":
if (MTPHelper.onDeviceAdded) MTPHelper.onDeviceAdded(e.deviceId);
break;
case "DeviceRemoved":
if (MTPHelper.onDeviceRemoved) MTPHelper.onDeviceRemoved(e.deviceId);
break;
}
}
}
},
stdout_proc: function(data) {
console.log("mtphelper[stdout]: " + data);
},
/* start MTP-helper exe
@IN require('queue') object
@return Promise-object
@Promise-resolve String of MTPHelper-version
*/
start: function(queue) {
if (queue) {
MTPHelper.queue = queue({concurrency: 1});
}
if (!MTPHelper.queue) {
console.log("specify `require('queue')` first.");
return Promise.reject();
}
return new Promise((resolve, reject)=>{
MTPHelper.queue.push((cb)=>{
if (MTPHelper.child === null) { | MTPHelper.child.readlineIn.on('line', MTPHelper.stdout_proc);
MTPHelper.child.readlineErr = readline.createInterface(MTPHelper.child.stderr, {});
MTPHelper.child.readlineErr.on('line', MTPHelper.stderr_proc);
child.execFile(MTPHelper.executable, ["-v"], (err, sout, serr)=>{
MTPHelper.helper_version = sout.split("\n")[0];
resolve(MTPHelper.helper_version);
});
}
cb();
});
MTPHelper.queue.start();
});
},
/* stop MTP-helper exe
@return Promise-object
@Promise-resolve (empty)
*/
stop: function() {
return new Promise((resolve, reject)=>{
MTPHelper.queue.push((cb)=>{
if (MTPHelper.child) {
MTPHelper.child.stdin.end();
MTPHelper.child = null;
}
cb()
resolve();
});
MTPHelper.queue.start();
});
},
/* request MTP-helper command
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
request: function(command) {
return new Promise((resolve, reject)=>{
MTPHelper.queue.push((cb)=>{
MTPHelper.child.readlineIn.once('line', (data)=>{
try {
resolve(JSON.parse(data));
} catch (e) {
reject(e);
}
finally {
cb()
}
});
MTPHelper.child.stdin.write(command+"\n");
});
MTPHelper.queue.start();
});
},
/* get deviceList and deviceInfo
@return Promise-object
@Promise-resolve Hash of devices or {deviceId: {status: != OK }}
@Promise-reject Exception
*/
deviceList: function() {
return new Promise((resolve, reject)=>{
MTPHelper.request("deviceList").then((data)=>{
var result = {};
if (data.status !== "OK") {
reject(data);
return;
}
Promise.all(data.devices.map((did)=>{
if (did === undefined) {
return undefined;
}
return new Promise((resolve2, reject2)=>{
var dev = new MTPDevice(did);
dev.deviceInfo().then((info)=>{
result[did] = info;
resolve2();
});
});
})).then(()=>{
resolve(result);
});
}).catch((e)=>{
reject(e);
});
});
},
Device: MTPDevice,
platform: process.platform,
dirname: __dirname,
process: process,
global: global,
executable: __dirname + "/MtpHelper" + ((process.platform=="win32")? ".exe": "")
};
/* get dveiceInfo
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.deviceInfo = function() {
return MTPHelper.request("deviceInfo " + this.deviceId);
}
/* get deviceProp desc
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.getPropDesc = function(propName) {
return MTPHelper.request("desc " + this.deviceId + " " + propName);
}
/* get deviceProp value
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.getPropValue = function(propName) {
return MTPHelper.request("get " + this.deviceId + " " + propName);
}
/* set deviceProp value
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.setPropValue = function(propName, propValue) {
return MTPHelper.request("set " + this.deviceId + " " + propName + " " + propValue);
}
/* send config object
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.sendConfigObject = function(filename) {
return MTPHelper.request("sendConfig " + this.deviceId + " " + filename);
}
/* get config object
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.getConfigObject = function(filename) {
return MTPHelper.request("getConfig " + this.deviceId + " " + filename);
}
/* update firmware
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.firmwareUpdate = function(filename) {
return MTPHelper.request("firmwareUpdate " + this.deviceId + " " + filename);
}
module.exports = MTPHelper; | console.log("mtphelper: spawn '" + MTPHelper.executable + "'");
const child = require('child_process');
const readline = require('readline');
MTPHelper.child = child.execFile(MTPHelper.executable);
MTPHelper.child.readlineIn = readline.createInterface(MTPHelper.child.stdout, {}); | random_line_split |
mtphelper.js | "use strict";
//
// Copyright (c) 2017 Ricoh Company, Ltd. All Rights Reserved.
// See LICENSE for more information.
//
//console.log(exports, require, module, __filename, __dirname, process, global);
const MTPDevice = function(deviceId) {
this.deviceId = deviceId;
}
const MTPHelper = {
version: "1.0.0",
helper_version: "unknown",
child: null,
queue: null,
onDeviceRemoved: null, // (deviceId) => { ... }
onDeviceAdded: null, // (deviceId) => { ... }
stderr_proc: function(data0) {
for (var data of data0.split(/[\r\n]/)) {
console.log("mtphelper[stderr]: " + data);
if (data[0] === "{" && data[data.length-1] === "}") {
var e;
try {
e = JSON.parse(data);
} catch (x) {
console.log(x);
e = {}
}
switch (e.event) {
case "DeviceAdded":
if (MTPHelper.onDeviceAdded) MTPHelper.onDeviceAdded(e.deviceId);
break;
case "DeviceRemoved":
if (MTPHelper.onDeviceRemoved) MTPHelper.onDeviceRemoved(e.deviceId);
break;
}
}
}
},
stdout_proc: function(data) {
console.log("mtphelper[stdout]: " + data);
},
/* start MTP-helper exe
@IN require('queue') object
@return Promise-object
@Promise-resolve String of MTPHelper-version
*/
start: function(queue) {
if (queue) {
MTPHelper.queue = queue({concurrency: 1});
}
if (!MTPHelper.queue) {
console.log("specify `require('queue')` first.");
return Promise.reject();
}
return new Promise((resolve, reject)=>{
MTPHelper.queue.push((cb)=>{
if (MTPHelper.child === null) {
console.log("mtphelper: spawn '" + MTPHelper.executable + "'");
const child = require('child_process');
const readline = require('readline');
MTPHelper.child = child.execFile(MTPHelper.executable);
MTPHelper.child.readlineIn = readline.createInterface(MTPHelper.child.stdout, {});
MTPHelper.child.readlineIn.on('line', MTPHelper.stdout_proc);
MTPHelper.child.readlineErr = readline.createInterface(MTPHelper.child.stderr, {});
MTPHelper.child.readlineErr.on('line', MTPHelper.stderr_proc);
child.execFile(MTPHelper.executable, ["-v"], (err, sout, serr)=>{
MTPHelper.helper_version = sout.split("\n")[0];
resolve(MTPHelper.helper_version);
});
}
cb();
});
MTPHelper.queue.start();
});
},
/* stop MTP-helper exe
@return Promise-object
@Promise-resolve (empty)
*/
stop: function() {
return new Promise((resolve, reject)=>{
MTPHelper.queue.push((cb)=>{
if (MTPHelper.child) {
MTPHelper.child.stdin.end();
MTPHelper.child = null;
}
cb()
resolve();
});
MTPHelper.queue.start();
});
},
/* request MTP-helper command
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
request: function(command) {
return new Promise((resolve, reject)=>{
MTPHelper.queue.push((cb)=>{
MTPHelper.child.readlineIn.once('line', (data)=>{
try {
resolve(JSON.parse(data));
} catch (e) {
reject(e);
}
finally {
cb()
}
});
MTPHelper.child.stdin.write(command+"\n");
});
MTPHelper.queue.start();
});
},
/* get deviceList and deviceInfo
@return Promise-object
@Promise-resolve Hash of devices or {deviceId: {status: != OK }}
@Promise-reject Exception
*/
deviceList: function() {
return new Promise((resolve, reject)=>{
MTPHelper.request("deviceList").then((data)=>{
var result = {};
if (data.status !== "OK") {
reject(data);
return;
}
Promise.all(data.devices.map((did)=>{
if (did === undefined) |
return new Promise((resolve2, reject2)=>{
var dev = new MTPDevice(did);
dev.deviceInfo().then((info)=>{
result[did] = info;
resolve2();
});
});
})).then(()=>{
resolve(result);
});
}).catch((e)=>{
reject(e);
});
});
},
Device: MTPDevice,
platform: process.platform,
dirname: __dirname,
process: process,
global: global,
executable: __dirname + "/MtpHelper" + ((process.platform=="win32")? ".exe": "")
};
/* get dveiceInfo
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.deviceInfo = function() {
return MTPHelper.request("deviceInfo " + this.deviceId);
}
/* get deviceProp desc
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.getPropDesc = function(propName) {
return MTPHelper.request("desc " + this.deviceId + " " + propName);
}
/* get deviceProp value
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.getPropValue = function(propName) {
return MTPHelper.request("get " + this.deviceId + " " + propName);
}
/* set deviceProp value
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.setPropValue = function(propName, propValue) {
return MTPHelper.request("set " + this.deviceId + " " + propName + " " + propValue);
}
/* send config object
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.sendConfigObject = function(filename) {
return MTPHelper.request("sendConfig " + this.deviceId + " " + filename);
}
/* get config object
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.getConfigObject = function(filename) {
return MTPHelper.request("getConfig " + this.deviceId + " " + filename);
}
/* update firmware
@return Promise-object
@Promise-resolve Hash of result
@Promise-reject Exception
*/
MTPDevice.prototype.firmwareUpdate = function(filename) {
return MTPHelper.request("firmwareUpdate " + this.deviceId + " " + filename);
}
module.exports = MTPHelper;
| {
return undefined;
} | conditional_block |
core.client.lodash.js | (function() {
'use strict';
angular
.module('core')
.run(run);
run.$inject = ['$rootScope'];
/* @ngInject */
function run($rootScope) | // //_.each(obj[opt], recurse);
// }
// recurse(obj);
//};
_.each(methods, function(method, methodName) {
_[methodName] = _.bind(method, _);
});
}
})();
| {
var _ = $rootScope,
methods = {};
methods.chunk = function(array, n) {
return _.transform(array, function(result, el, i, arr) {
return i % n === 0 ? result.push(arr.slice(i, i + n)) : null;
});
};
methods.stripBase64 = function(str) {
return str.replace(/^data:image\/png;base64,/, '');
};
//methods.recursive = function(obj, opt, iterator) {
// function recurse(obj, property) {
// iterator(obj, property);
// _.each(_.keys(obj), function(prop) {
// recurse(obj[prop]);
// }); | identifier_body |
core.client.lodash.js | (function() {
'use strict';
angular
.module('core')
.run(run);
| run.$inject = ['$rootScope'];
/* @ngInject */
function run($rootScope) {
var _ = $rootScope,
methods = {};
methods.chunk = function(array, n) {
return _.transform(array, function(result, el, i, arr) {
return i % n === 0 ? result.push(arr.slice(i, i + n)) : null;
});
};
methods.stripBase64 = function(str) {
return str.replace(/^data:image\/png;base64,/, '');
};
//methods.recursive = function(obj, opt, iterator) {
// function recurse(obj, property) {
// iterator(obj, property);
// _.each(_.keys(obj), function(prop) {
// recurse(obj[prop]);
// });
// //_.each(obj[opt], recurse);
// }
// recurse(obj);
//};
_.each(methods, function(method, methodName) {
_[methodName] = _.bind(method, _);
});
}
})(); | random_line_split |
|
core.client.lodash.js | (function() {
'use strict';
angular
.module('core')
.run(run);
run.$inject = ['$rootScope'];
/* @ngInject */
function | ($rootScope) {
var _ = $rootScope,
methods = {};
methods.chunk = function(array, n) {
return _.transform(array, function(result, el, i, arr) {
return i % n === 0 ? result.push(arr.slice(i, i + n)) : null;
});
};
methods.stripBase64 = function(str) {
return str.replace(/^data:image\/png;base64,/, '');
};
//methods.recursive = function(obj, opt, iterator) {
// function recurse(obj, property) {
// iterator(obj, property);
// _.each(_.keys(obj), function(prop) {
// recurse(obj[prop]);
// });
// //_.each(obj[opt], recurse);
// }
// recurse(obj);
//};
_.each(methods, function(method, methodName) {
_[methodName] = _.bind(method, _);
});
}
})();
| run | identifier_name |
proxyhandler.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities for the implementation of JSAPI proxy handlers.
#![deny(missing_docs)]
use dom::bindings::conversions::is_dom_proxy;
use dom::bindings::utils::delete_property_by_id;
use js::jsapi::{JSContext, jsid, JSPropertyDescriptor, JSObject, JSString};
use js::jsapi::{JS_GetPropertyDescriptorById, JS_NewStringCopyN};
use js::jsapi::{JS_DefinePropertyById, JS_NewObjectWithGivenProto};
use js::jsapi::{JS_ReportErrorFlagsAndNumber, JS_StrictPropertyStub};
use js::jsapi::{JSREPORT_WARNING, JSREPORT_STRICT, JSREPORT_STRICT_MODE_ERROR};
use js::jsval::ObjectValue;
use js::glue::GetProxyExtra;
use js::glue::{GetObjectProto, GetObjectParent, SetProxyExtra, GetProxyHandler};
use js::glue::InvokeGetOwnPropertyDescriptor;
use js::glue::RUST_js_GetErrorMessage;
use js::glue::AutoIdVector;
use js::{JSPROP_GETTER, JSPROP_ENUMERATE, JSPROP_READONLY, JSRESOLVE_QUALIFIED};
use libc;
use std::mem;
use std::ptr;
static JSPROXYSLOT_EXPANDO: u32 = 0;
/// Invoke the [[GetOwnProperty]] trap (`getOwnPropertyDescriptor`) on `proxy`,
/// with argument `id` and return the result, if it is not `undefined`.
/// Otherwise, walk along the prototype chain to find a property with that
/// name.
pub unsafe extern fn get_property_descriptor(cx: *mut JSContext,
proxy: *mut JSObject,
id: jsid, set: bool,
desc: *mut JSPropertyDescriptor)
-> bool {
let handler = GetProxyHandler(proxy);
if !InvokeGetOwnPropertyDescriptor(handler, cx, proxy, id, set, desc) {
return false;
}
if !(*desc).obj.is_null() {
return true;
}
//let proto = JS_GetPrototype(proxy);
let proto = GetObjectProto(proxy);
if proto.is_null() {
(*desc).obj = ptr::null_mut();
return true;
}
JS_GetPropertyDescriptorById(cx, proto, id, JSRESOLVE_QUALIFIED, desc) != 0
}
/// Defines an expando on the given `proxy`.
pub unsafe extern fn define_property(cx: *mut JSContext, proxy: *mut JSObject,
id: jsid, desc: *mut JSPropertyDescriptor)
-> bool {
static JSMSG_GETTER_ONLY: libc::c_uint = 160;
//FIXME: Workaround for https://github.com/mozilla/rust/issues/13385
let setter: *const libc::c_void = mem::transmute((*desc).setter);
let setter_stub: *const libc::c_void = mem::transmute(JS_StrictPropertyStub);
if ((*desc).attrs & JSPROP_GETTER) != 0 && setter == setter_stub {
return JS_ReportErrorFlagsAndNumber(cx,
JSREPORT_WARNING | JSREPORT_STRICT |
JSREPORT_STRICT_MODE_ERROR,
Some(RUST_js_GetErrorMessage), ptr::null_mut(),
JSMSG_GETTER_ONLY) != 0;
}
let expando = ensure_expando_object(cx, proxy);
return JS_DefinePropertyById(cx, expando, id, (*desc).value, (*desc).getter,
(*desc).setter, (*desc).attrs) != 0;
}
/// Deletes an expando off the given `proxy`.
pub unsafe extern fn delete(cx: *mut JSContext, proxy: *mut JSObject, id: jsid,
bp: *mut bool) -> bool {
let expando = get_expando_object(proxy);
if expando.is_null() {
*bp = true;
return true;
}
return delete_property_by_id(cx, expando, id, &mut *bp);
}
/// Returns the stringification of an object with class `name`.
pub fn object_to_string(cx: *mut JSContext, name: &str) -> *mut JSString {
unsafe {
let result = format!("[object {}]", name);
let chars = result.as_ptr() as *const libc::c_char;
let length = result.len() as libc::size_t;
let string = JS_NewStringCopyN(cx, chars, length);
assert!(!string.is_null());
return string;
}
}
/// Get the expando object, or null if there is none.
pub fn get_expando_object(obj: *mut JSObject) -> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let val = GetProxyExtra(obj, JSPROXYSLOT_EXPANDO);
if val.is_undefined() {
ptr::null_mut()
} else {
val.to_object()
}
}
}
/// Get the expando object, or create it if it doesn't exist yet.
/// Fails on JSAPI failure.
pub fn ensure_expando_object(cx: *mut JSContext, obj: *mut JSObject)
-> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let mut expando = get_expando_object(obj);
if expando.is_null() {
expando = JS_NewObjectWithGivenProto(cx, ptr::null_mut(),
ptr::null_mut(),
GetObjectParent(obj));
assert!(!expando.is_null());
SetProxyExtra(obj, JSPROXYSLOT_EXPANDO, ObjectValue(&*expando));
}
return expando;
}
}
/// Set the property descriptor's object to `obj` and set it to enumerable,
/// and writable if `readonly` is true.
pub fn fill_property_descriptor(desc: &mut JSPropertyDescriptor,
obj: *mut JSObject, readonly: bool) {
desc.obj = obj;
desc.attrs = if readonly { JSPROP_READONLY } else { 0 } | JSPROP_ENUMERATE;
desc.getter = None;
desc.setter = None;
desc.shortid = 0;
}
/// No-op required hook.
pub unsafe extern fn get_own_property_names(_cx: *mut JSContext,
_obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true | }
/// No-op required hook.
pub unsafe extern fn enumerate(_cx: *mut JSContext, _obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
} | random_line_split |
|
proxyhandler.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities for the implementation of JSAPI proxy handlers.
#![deny(missing_docs)]
use dom::bindings::conversions::is_dom_proxy;
use dom::bindings::utils::delete_property_by_id;
use js::jsapi::{JSContext, jsid, JSPropertyDescriptor, JSObject, JSString};
use js::jsapi::{JS_GetPropertyDescriptorById, JS_NewStringCopyN};
use js::jsapi::{JS_DefinePropertyById, JS_NewObjectWithGivenProto};
use js::jsapi::{JS_ReportErrorFlagsAndNumber, JS_StrictPropertyStub};
use js::jsapi::{JSREPORT_WARNING, JSREPORT_STRICT, JSREPORT_STRICT_MODE_ERROR};
use js::jsval::ObjectValue;
use js::glue::GetProxyExtra;
use js::glue::{GetObjectProto, GetObjectParent, SetProxyExtra, GetProxyHandler};
use js::glue::InvokeGetOwnPropertyDescriptor;
use js::glue::RUST_js_GetErrorMessage;
use js::glue::AutoIdVector;
use js::{JSPROP_GETTER, JSPROP_ENUMERATE, JSPROP_READONLY, JSRESOLVE_QUALIFIED};
use libc;
use std::mem;
use std::ptr;
static JSPROXYSLOT_EXPANDO: u32 = 0;
/// Invoke the [[GetOwnProperty]] trap (`getOwnPropertyDescriptor`) on `proxy`,
/// with argument `id` and return the result, if it is not `undefined`.
/// Otherwise, walk along the prototype chain to find a property with that
/// name.
pub unsafe extern fn get_property_descriptor(cx: *mut JSContext,
proxy: *mut JSObject,
id: jsid, set: bool,
desc: *mut JSPropertyDescriptor)
-> bool {
let handler = GetProxyHandler(proxy);
if !InvokeGetOwnPropertyDescriptor(handler, cx, proxy, id, set, desc) {
return false;
}
if !(*desc).obj.is_null() {
return true;
}
//let proto = JS_GetPrototype(proxy);
let proto = GetObjectProto(proxy);
if proto.is_null() {
(*desc).obj = ptr::null_mut();
return true;
}
JS_GetPropertyDescriptorById(cx, proto, id, JSRESOLVE_QUALIFIED, desc) != 0
}
/// Defines an expando on the given `proxy`.
pub unsafe extern fn define_property(cx: *mut JSContext, proxy: *mut JSObject,
id: jsid, desc: *mut JSPropertyDescriptor)
-> bool {
static JSMSG_GETTER_ONLY: libc::c_uint = 160;
//FIXME: Workaround for https://github.com/mozilla/rust/issues/13385
let setter: *const libc::c_void = mem::transmute((*desc).setter);
let setter_stub: *const libc::c_void = mem::transmute(JS_StrictPropertyStub);
if ((*desc).attrs & JSPROP_GETTER) != 0 && setter == setter_stub {
return JS_ReportErrorFlagsAndNumber(cx,
JSREPORT_WARNING | JSREPORT_STRICT |
JSREPORT_STRICT_MODE_ERROR,
Some(RUST_js_GetErrorMessage), ptr::null_mut(),
JSMSG_GETTER_ONLY) != 0;
}
let expando = ensure_expando_object(cx, proxy);
return JS_DefinePropertyById(cx, expando, id, (*desc).value, (*desc).getter,
(*desc).setter, (*desc).attrs) != 0;
}
/// Deletes an expando off the given `proxy`.
pub unsafe extern fn | (cx: *mut JSContext, proxy: *mut JSObject, id: jsid,
bp: *mut bool) -> bool {
let expando = get_expando_object(proxy);
if expando.is_null() {
*bp = true;
return true;
}
return delete_property_by_id(cx, expando, id, &mut *bp);
}
/// Returns the stringification of an object with class `name`.
pub fn object_to_string(cx: *mut JSContext, name: &str) -> *mut JSString {
unsafe {
let result = format!("[object {}]", name);
let chars = result.as_ptr() as *const libc::c_char;
let length = result.len() as libc::size_t;
let string = JS_NewStringCopyN(cx, chars, length);
assert!(!string.is_null());
return string;
}
}
/// Get the expando object, or null if there is none.
pub fn get_expando_object(obj: *mut JSObject) -> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let val = GetProxyExtra(obj, JSPROXYSLOT_EXPANDO);
if val.is_undefined() {
ptr::null_mut()
} else {
val.to_object()
}
}
}
/// Get the expando object, or create it if it doesn't exist yet.
/// Fails on JSAPI failure.
pub fn ensure_expando_object(cx: *mut JSContext, obj: *mut JSObject)
-> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let mut expando = get_expando_object(obj);
if expando.is_null() {
expando = JS_NewObjectWithGivenProto(cx, ptr::null_mut(),
ptr::null_mut(),
GetObjectParent(obj));
assert!(!expando.is_null());
SetProxyExtra(obj, JSPROXYSLOT_EXPANDO, ObjectValue(&*expando));
}
return expando;
}
}
/// Set the property descriptor's object to `obj` and set it to enumerable,
/// and writable if `readonly` is true.
pub fn fill_property_descriptor(desc: &mut JSPropertyDescriptor,
obj: *mut JSObject, readonly: bool) {
desc.obj = obj;
desc.attrs = if readonly { JSPROP_READONLY } else { 0 } | JSPROP_ENUMERATE;
desc.getter = None;
desc.setter = None;
desc.shortid = 0;
}
/// No-op required hook.
pub unsafe extern fn get_own_property_names(_cx: *mut JSContext,
_obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
/// No-op required hook.
pub unsafe extern fn enumerate(_cx: *mut JSContext, _obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
| delete | identifier_name |
proxyhandler.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities for the implementation of JSAPI proxy handlers.
#![deny(missing_docs)]
use dom::bindings::conversions::is_dom_proxy;
use dom::bindings::utils::delete_property_by_id;
use js::jsapi::{JSContext, jsid, JSPropertyDescriptor, JSObject, JSString};
use js::jsapi::{JS_GetPropertyDescriptorById, JS_NewStringCopyN};
use js::jsapi::{JS_DefinePropertyById, JS_NewObjectWithGivenProto};
use js::jsapi::{JS_ReportErrorFlagsAndNumber, JS_StrictPropertyStub};
use js::jsapi::{JSREPORT_WARNING, JSREPORT_STRICT, JSREPORT_STRICT_MODE_ERROR};
use js::jsval::ObjectValue;
use js::glue::GetProxyExtra;
use js::glue::{GetObjectProto, GetObjectParent, SetProxyExtra, GetProxyHandler};
use js::glue::InvokeGetOwnPropertyDescriptor;
use js::glue::RUST_js_GetErrorMessage;
use js::glue::AutoIdVector;
use js::{JSPROP_GETTER, JSPROP_ENUMERATE, JSPROP_READONLY, JSRESOLVE_QUALIFIED};
use libc;
use std::mem;
use std::ptr;
static JSPROXYSLOT_EXPANDO: u32 = 0;
/// Invoke the [[GetOwnProperty]] trap (`getOwnPropertyDescriptor`) on `proxy`,
/// with argument `id` and return the result, if it is not `undefined`.
/// Otherwise, walk along the prototype chain to find a property with that
/// name.
pub unsafe extern fn get_property_descriptor(cx: *mut JSContext,
proxy: *mut JSObject,
id: jsid, set: bool,
desc: *mut JSPropertyDescriptor)
-> bool {
let handler = GetProxyHandler(proxy);
if !InvokeGetOwnPropertyDescriptor(handler, cx, proxy, id, set, desc) {
return false;
}
if !(*desc).obj.is_null() {
return true;
}
//let proto = JS_GetPrototype(proxy);
let proto = GetObjectProto(proxy);
if proto.is_null() {
(*desc).obj = ptr::null_mut();
return true;
}
JS_GetPropertyDescriptorById(cx, proto, id, JSRESOLVE_QUALIFIED, desc) != 0
}
/// Defines an expando on the given `proxy`.
pub unsafe extern fn define_property(cx: *mut JSContext, proxy: *mut JSObject,
id: jsid, desc: *mut JSPropertyDescriptor)
-> bool {
static JSMSG_GETTER_ONLY: libc::c_uint = 160;
//FIXME: Workaround for https://github.com/mozilla/rust/issues/13385
let setter: *const libc::c_void = mem::transmute((*desc).setter);
let setter_stub: *const libc::c_void = mem::transmute(JS_StrictPropertyStub);
if ((*desc).attrs & JSPROP_GETTER) != 0 && setter == setter_stub {
return JS_ReportErrorFlagsAndNumber(cx,
JSREPORT_WARNING | JSREPORT_STRICT |
JSREPORT_STRICT_MODE_ERROR,
Some(RUST_js_GetErrorMessage), ptr::null_mut(),
JSMSG_GETTER_ONLY) != 0;
}
let expando = ensure_expando_object(cx, proxy);
return JS_DefinePropertyById(cx, expando, id, (*desc).value, (*desc).getter,
(*desc).setter, (*desc).attrs) != 0;
}
/// Deletes an expando off the given `proxy`.
pub unsafe extern fn delete(cx: *mut JSContext, proxy: *mut JSObject, id: jsid,
bp: *mut bool) -> bool {
let expando = get_expando_object(proxy);
if expando.is_null() {
*bp = true;
return true;
}
return delete_property_by_id(cx, expando, id, &mut *bp);
}
/// Returns the stringification of an object with class `name`.
pub fn object_to_string(cx: *mut JSContext, name: &str) -> *mut JSString {
unsafe {
let result = format!("[object {}]", name);
let chars = result.as_ptr() as *const libc::c_char;
let length = result.len() as libc::size_t;
let string = JS_NewStringCopyN(cx, chars, length);
assert!(!string.is_null());
return string;
}
}
/// Get the expando object, or null if there is none.
pub fn get_expando_object(obj: *mut JSObject) -> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let val = GetProxyExtra(obj, JSPROXYSLOT_EXPANDO);
if val.is_undefined() {
ptr::null_mut()
} else {
val.to_object()
}
}
}
/// Get the expando object, or create it if it doesn't exist yet.
/// Fails on JSAPI failure.
pub fn ensure_expando_object(cx: *mut JSContext, obj: *mut JSObject)
-> *mut JSObject |
/// Set the property descriptor's object to `obj` and set it to enumerable,
/// and writable if `readonly` is true.
pub fn fill_property_descriptor(desc: &mut JSPropertyDescriptor,
obj: *mut JSObject, readonly: bool) {
desc.obj = obj;
desc.attrs = if readonly { JSPROP_READONLY } else { 0 } | JSPROP_ENUMERATE;
desc.getter = None;
desc.setter = None;
desc.shortid = 0;
}
/// No-op required hook.
pub unsafe extern fn get_own_property_names(_cx: *mut JSContext,
_obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
/// No-op required hook.
pub unsafe extern fn enumerate(_cx: *mut JSContext, _obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
| {
unsafe {
assert!(is_dom_proxy(obj));
let mut expando = get_expando_object(obj);
if expando.is_null() {
expando = JS_NewObjectWithGivenProto(cx, ptr::null_mut(),
ptr::null_mut(),
GetObjectParent(obj));
assert!(!expando.is_null());
SetProxyExtra(obj, JSPROXYSLOT_EXPANDO, ObjectValue(&*expando));
}
return expando;
}
} | identifier_body |
proxyhandler.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities for the implementation of JSAPI proxy handlers.
#![deny(missing_docs)]
use dom::bindings::conversions::is_dom_proxy;
use dom::bindings::utils::delete_property_by_id;
use js::jsapi::{JSContext, jsid, JSPropertyDescriptor, JSObject, JSString};
use js::jsapi::{JS_GetPropertyDescriptorById, JS_NewStringCopyN};
use js::jsapi::{JS_DefinePropertyById, JS_NewObjectWithGivenProto};
use js::jsapi::{JS_ReportErrorFlagsAndNumber, JS_StrictPropertyStub};
use js::jsapi::{JSREPORT_WARNING, JSREPORT_STRICT, JSREPORT_STRICT_MODE_ERROR};
use js::jsval::ObjectValue;
use js::glue::GetProxyExtra;
use js::glue::{GetObjectProto, GetObjectParent, SetProxyExtra, GetProxyHandler};
use js::glue::InvokeGetOwnPropertyDescriptor;
use js::glue::RUST_js_GetErrorMessage;
use js::glue::AutoIdVector;
use js::{JSPROP_GETTER, JSPROP_ENUMERATE, JSPROP_READONLY, JSRESOLVE_QUALIFIED};
use libc;
use std::mem;
use std::ptr;
static JSPROXYSLOT_EXPANDO: u32 = 0;
/// Invoke the [[GetOwnProperty]] trap (`getOwnPropertyDescriptor`) on `proxy`,
/// with argument `id` and return the result, if it is not `undefined`.
/// Otherwise, walk along the prototype chain to find a property with that
/// name.
pub unsafe extern fn get_property_descriptor(cx: *mut JSContext,
proxy: *mut JSObject,
id: jsid, set: bool,
desc: *mut JSPropertyDescriptor)
-> bool {
let handler = GetProxyHandler(proxy);
if !InvokeGetOwnPropertyDescriptor(handler, cx, proxy, id, set, desc) {
return false;
}
if !(*desc).obj.is_null() {
return true;
}
//let proto = JS_GetPrototype(proxy);
let proto = GetObjectProto(proxy);
if proto.is_null() {
(*desc).obj = ptr::null_mut();
return true;
}
JS_GetPropertyDescriptorById(cx, proto, id, JSRESOLVE_QUALIFIED, desc) != 0
}
/// Defines an expando on the given `proxy`.
pub unsafe extern fn define_property(cx: *mut JSContext, proxy: *mut JSObject,
id: jsid, desc: *mut JSPropertyDescriptor)
-> bool {
static JSMSG_GETTER_ONLY: libc::c_uint = 160;
//FIXME: Workaround for https://github.com/mozilla/rust/issues/13385
let setter: *const libc::c_void = mem::transmute((*desc).setter);
let setter_stub: *const libc::c_void = mem::transmute(JS_StrictPropertyStub);
if ((*desc).attrs & JSPROP_GETTER) != 0 && setter == setter_stub {
return JS_ReportErrorFlagsAndNumber(cx,
JSREPORT_WARNING | JSREPORT_STRICT |
JSREPORT_STRICT_MODE_ERROR,
Some(RUST_js_GetErrorMessage), ptr::null_mut(),
JSMSG_GETTER_ONLY) != 0;
}
let expando = ensure_expando_object(cx, proxy);
return JS_DefinePropertyById(cx, expando, id, (*desc).value, (*desc).getter,
(*desc).setter, (*desc).attrs) != 0;
}
/// Deletes an expando off the given `proxy`.
pub unsafe extern fn delete(cx: *mut JSContext, proxy: *mut JSObject, id: jsid,
bp: *mut bool) -> bool {
let expando = get_expando_object(proxy);
if expando.is_null() {
*bp = true;
return true;
}
return delete_property_by_id(cx, expando, id, &mut *bp);
}
/// Returns the stringification of an object with class `name`.
pub fn object_to_string(cx: *mut JSContext, name: &str) -> *mut JSString {
unsafe {
let result = format!("[object {}]", name);
let chars = result.as_ptr() as *const libc::c_char;
let length = result.len() as libc::size_t;
let string = JS_NewStringCopyN(cx, chars, length);
assert!(!string.is_null());
return string;
}
}
/// Get the expando object, or null if there is none.
pub fn get_expando_object(obj: *mut JSObject) -> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let val = GetProxyExtra(obj, JSPROXYSLOT_EXPANDO);
if val.is_undefined() {
ptr::null_mut()
} else {
val.to_object()
}
}
}
/// Get the expando object, or create it if it doesn't exist yet.
/// Fails on JSAPI failure.
pub fn ensure_expando_object(cx: *mut JSContext, obj: *mut JSObject)
-> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let mut expando = get_expando_object(obj);
if expando.is_null() {
expando = JS_NewObjectWithGivenProto(cx, ptr::null_mut(),
ptr::null_mut(),
GetObjectParent(obj));
assert!(!expando.is_null());
SetProxyExtra(obj, JSPROXYSLOT_EXPANDO, ObjectValue(&*expando));
}
return expando;
}
}
/// Set the property descriptor's object to `obj` and set it to enumerable,
/// and writable if `readonly` is true.
pub fn fill_property_descriptor(desc: &mut JSPropertyDescriptor,
obj: *mut JSObject, readonly: bool) {
desc.obj = obj;
desc.attrs = if readonly | else { 0 } | JSPROP_ENUMERATE;
desc.getter = None;
desc.setter = None;
desc.shortid = 0;
}
/// No-op required hook.
pub unsafe extern fn get_own_property_names(_cx: *mut JSContext,
_obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
/// No-op required hook.
pub unsafe extern fn enumerate(_cx: *mut JSContext, _obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
| { JSPROP_READONLY } | conditional_block |
console.py | """a readline console module (unix only).
[email protected]
the module starts a subprocess for the readline console and
communicates through pipes (prompt/cmd).
the console is polled through a timer, which depends on PySide.
"""
from select import select
import os
import sys
import signal
if __name__ == '__main__':
import readline
# prompt input stream
fd_in = int(sys.argv[1])
file_in = os.fdopen( fd_in )
# cmd output stream
fd_out = int(sys.argv[2])
file_out = os.fdopen( fd_out, 'w' )
# some helpers
def send(data):
file_out.write(data + '\n')
file_out.flush()
def recv():
while True:
res = file_in.readline().rstrip('\n')
read, _, _ = select([ file_in ], [], [], 0)
if not read: return res
class History:
"""readline history safe open/close"""
def __init__(self, filename):
self.filename = os.path.expanduser( filename )
def __enter__(self):
try:
readline.read_history_file(self.filename)
# print 'loaded console history from', self.filename
except IOError:
pass
return self
def __exit__(self, type, value, traceback):
readline.write_history_file( self.filename )
def cleanup(*args):
print('console cleanup')
os.system('stty sane')
for sig in [signal.SIGQUIT,
signal.SIGTERM,
signal.SIGILL,
signal.SIGSEGV]:
old = signal.getsignal(sig)
def new(*args):
cleanup()
signal.signal(sig, old)
os.kill(os.getpid(), sig)
signal.signal(sig, new)
# main loop
try:
with History( "~/.sofa-console" ):
print 'console started'
while True:
send( raw_input( recv() ) )
except KeyboardInterrupt:
print 'console exited (SIGINT)'
except EOFError:
ppid = os.getppid()
try:
os.kill(os.getppid(), signal.SIGTERM)
print 'console exited (EOF), terminating parent process'
except OSError:
pass
else:
import subprocess
import code
import atexit
_cleanup = None
def _register( c ):
global _cleanup
if _cleanup: |
_cleanup = c
class Console(code.InteractiveConsole):
def __init__(self, locals = None, timeout = 100):
"""
python interpreter taking input from console subprocess
scope is provided through 'locals' (usually: locals() or globals())
'timeout' (in milliseconds) sets how often is the console polled.
"""
code.InteractiveConsole.__init__(self, locals)
if timeout >= 0:
def callback():
self.poll()
from PySide import QtCore
self.timer = QtCore.QTimer()
self.timer.timeout.connect( callback )
self.timer.start( timeout )
_register( lambda: self.timer.stop() )
# execute next command, blocks on console input
def next(self):
line = recv()
data = '>>> '
if self.push( line ):
data = '... '
send( data )
# convenience
def poll(self):
if ready(): self.next()
# send prompt to indicate we are ready
def send(data):
prompt_out.write(data + '\n')
prompt_out.flush()
# receive command line
def recv():
res = cmd_in.readline()
if res: return res.rstrip('\n')
return res
# is there any available command ?
def ready():
read, _, _ = select([ cmd_in ], [], [], 0)
return read
# communication pipes
prompt = os.pipe()
cmd = os.pipe()
# subprocess with in/out fd, and forwarding stdin
sub = subprocess.Popen(['python', __file__,
str(prompt[0]), str(cmd[1])],
stdin = sys.stdin)
# open the tubes !
prompt_out = os.fdopen(prompt[1], 'w')
cmd_in = os.fdopen(cmd[0], 'r')
# we're ready
send('>>> ')
# def cleanup(*args):
# print('console cleanup')
# os.system('stty sane')
# def exit(*args):
# print 'exit'
# cleanup()
# sys.exit(0) forces cleanup *from python* before the gui
# closes. otherwise pyside causes segfault on python finalize.
def handler(*args):
sub.terminate()
sub.wait()
sys.exit(0)
from PySide import QtCore
app = QtCore.QCoreApplication.instance()
app.aboutToQuit.connect( handler )
# import atexit
# atexit.register( handler )
# import atexit
# atexit.register( exit )
# for sig in [signal.SIGSEGV, signal.SIGILL]:
# old = signal.getsignal(sig)
# def h(*args):
# print args
# sub.terminate()
# signal.signal(sig, old)
# os.kill(os.getpid(), sig)
# signal.signal(sig, h)
| _cleanup() | conditional_block |
console.py | """a readline console module (unix only).
[email protected]
the module starts a subprocess for the readline console and
communicates through pipes (prompt/cmd).
the console is polled through a timer, which depends on PySide.
"""
from select import select
import os
import sys
import signal
if __name__ == '__main__':
import readline
# prompt input stream
fd_in = int(sys.argv[1])
file_in = os.fdopen( fd_in )
# cmd output stream
fd_out = int(sys.argv[2])
file_out = os.fdopen( fd_out, 'w' )
# some helpers
def send(data):
file_out.write(data + '\n')
file_out.flush()
def recv():
while True:
res = file_in.readline().rstrip('\n')
read, _, _ = select([ file_in ], [], [], 0)
if not read: return res
class History:
"""readline history safe open/close"""
def __init__(self, filename):
self.filename = os.path.expanduser( filename )
def __enter__(self):
try:
readline.read_history_file(self.filename) | def __exit__(self, type, value, traceback):
readline.write_history_file( self.filename )
def cleanup(*args):
print('console cleanup')
os.system('stty sane')
for sig in [signal.SIGQUIT,
signal.SIGTERM,
signal.SIGILL,
signal.SIGSEGV]:
old = signal.getsignal(sig)
def new(*args):
cleanup()
signal.signal(sig, old)
os.kill(os.getpid(), sig)
signal.signal(sig, new)
# main loop
try:
with History( "~/.sofa-console" ):
print 'console started'
while True:
send( raw_input( recv() ) )
except KeyboardInterrupt:
print 'console exited (SIGINT)'
except EOFError:
ppid = os.getppid()
try:
os.kill(os.getppid(), signal.SIGTERM)
print 'console exited (EOF), terminating parent process'
except OSError:
pass
else:
import subprocess
import code
import atexit
_cleanup = None
def _register( c ):
global _cleanup
if _cleanup: _cleanup()
_cleanup = c
class Console(code.InteractiveConsole):
def __init__(self, locals = None, timeout = 100):
"""
python interpreter taking input from console subprocess
scope is provided through 'locals' (usually: locals() or globals())
'timeout' (in milliseconds) sets how often is the console polled.
"""
code.InteractiveConsole.__init__(self, locals)
if timeout >= 0:
def callback():
self.poll()
from PySide import QtCore
self.timer = QtCore.QTimer()
self.timer.timeout.connect( callback )
self.timer.start( timeout )
_register( lambda: self.timer.stop() )
# execute next command, blocks on console input
def next(self):
line = recv()
data = '>>> '
if self.push( line ):
data = '... '
send( data )
# convenience
def poll(self):
if ready(): self.next()
# send prompt to indicate we are ready
def send(data):
prompt_out.write(data + '\n')
prompt_out.flush()
# receive command line
def recv():
res = cmd_in.readline()
if res: return res.rstrip('\n')
return res
# is there any available command ?
def ready():
read, _, _ = select([ cmd_in ], [], [], 0)
return read
# communication pipes
prompt = os.pipe()
cmd = os.pipe()
# subprocess with in/out fd, and forwarding stdin
sub = subprocess.Popen(['python', __file__,
str(prompt[0]), str(cmd[1])],
stdin = sys.stdin)
# open the tubes !
prompt_out = os.fdopen(prompt[1], 'w')
cmd_in = os.fdopen(cmd[0], 'r')
# we're ready
send('>>> ')
# def cleanup(*args):
# print('console cleanup')
# os.system('stty sane')
# def exit(*args):
# print 'exit'
# cleanup()
# sys.exit(0) forces cleanup *from python* before the gui
# closes. otherwise pyside causes segfault on python finalize.
def handler(*args):
sub.terminate()
sub.wait()
sys.exit(0)
from PySide import QtCore
app = QtCore.QCoreApplication.instance()
app.aboutToQuit.connect( handler )
# import atexit
# atexit.register( handler )
# import atexit
# atexit.register( exit )
# for sig in [signal.SIGSEGV, signal.SIGILL]:
# old = signal.getsignal(sig)
# def h(*args):
# print args
# sub.terminate()
# signal.signal(sig, old)
# os.kill(os.getpid(), sig)
# signal.signal(sig, h) | # print 'loaded console history from', self.filename
except IOError:
pass
return self
| random_line_split |
console.py | """a readline console module (unix only).
[email protected]
the module starts a subprocess for the readline console and
communicates through pipes (prompt/cmd).
the console is polled through a timer, which depends on PySide.
"""
from select import select
import os
import sys
import signal
if __name__ == '__main__':
import readline
# prompt input stream
fd_in = int(sys.argv[1])
file_in = os.fdopen( fd_in )
# cmd output stream
fd_out = int(sys.argv[2])
file_out = os.fdopen( fd_out, 'w' )
# some helpers
def send(data):
|
def recv():
while True:
res = file_in.readline().rstrip('\n')
read, _, _ = select([ file_in ], [], [], 0)
if not read: return res
class History:
"""readline history safe open/close"""
def __init__(self, filename):
self.filename = os.path.expanduser( filename )
def __enter__(self):
try:
readline.read_history_file(self.filename)
# print 'loaded console history from', self.filename
except IOError:
pass
return self
def __exit__(self, type, value, traceback):
readline.write_history_file( self.filename )
def cleanup(*args):
print('console cleanup')
os.system('stty sane')
for sig in [signal.SIGQUIT,
signal.SIGTERM,
signal.SIGILL,
signal.SIGSEGV]:
old = signal.getsignal(sig)
def new(*args):
cleanup()
signal.signal(sig, old)
os.kill(os.getpid(), sig)
signal.signal(sig, new)
# main loop
try:
with History( "~/.sofa-console" ):
print 'console started'
while True:
send( raw_input( recv() ) )
except KeyboardInterrupt:
print 'console exited (SIGINT)'
except EOFError:
ppid = os.getppid()
try:
os.kill(os.getppid(), signal.SIGTERM)
print 'console exited (EOF), terminating parent process'
except OSError:
pass
else:
import subprocess
import code
import atexit
_cleanup = None
def _register( c ):
global _cleanup
if _cleanup: _cleanup()
_cleanup = c
class Console(code.InteractiveConsole):
def __init__(self, locals = None, timeout = 100):
"""
python interpreter taking input from console subprocess
scope is provided through 'locals' (usually: locals() or globals())
'timeout' (in milliseconds) sets how often is the console polled.
"""
code.InteractiveConsole.__init__(self, locals)
if timeout >= 0:
def callback():
self.poll()
from PySide import QtCore
self.timer = QtCore.QTimer()
self.timer.timeout.connect( callback )
self.timer.start( timeout )
_register( lambda: self.timer.stop() )
# execute next command, blocks on console input
def next(self):
line = recv()
data = '>>> '
if self.push( line ):
data = '... '
send( data )
# convenience
def poll(self):
if ready(): self.next()
# send prompt to indicate we are ready
def send(data):
prompt_out.write(data + '\n')
prompt_out.flush()
# receive command line
def recv():
res = cmd_in.readline()
if res: return res.rstrip('\n')
return res
# is there any available command ?
def ready():
read, _, _ = select([ cmd_in ], [], [], 0)
return read
# communication pipes
prompt = os.pipe()
cmd = os.pipe()
# subprocess with in/out fd, and forwarding stdin
sub = subprocess.Popen(['python', __file__,
str(prompt[0]), str(cmd[1])],
stdin = sys.stdin)
# open the tubes !
prompt_out = os.fdopen(prompt[1], 'w')
cmd_in = os.fdopen(cmd[0], 'r')
# we're ready
send('>>> ')
# def cleanup(*args):
# print('console cleanup')
# os.system('stty sane')
# def exit(*args):
# print 'exit'
# cleanup()
# sys.exit(0) forces cleanup *from python* before the gui
# closes. otherwise pyside causes segfault on python finalize.
def handler(*args):
sub.terminate()
sub.wait()
sys.exit(0)
from PySide import QtCore
app = QtCore.QCoreApplication.instance()
app.aboutToQuit.connect( handler )
# import atexit
# atexit.register( handler )
# import atexit
# atexit.register( exit )
# for sig in [signal.SIGSEGV, signal.SIGILL]:
# old = signal.getsignal(sig)
# def h(*args):
# print args
# sub.terminate()
# signal.signal(sig, old)
# os.kill(os.getpid(), sig)
# signal.signal(sig, h)
| file_out.write(data + '\n')
file_out.flush() | identifier_body |
console.py | """a readline console module (unix only).
[email protected]
the module starts a subprocess for the readline console and
communicates through pipes (prompt/cmd).
the console is polled through a timer, which depends on PySide.
"""
from select import select
import os
import sys
import signal
if __name__ == '__main__':
import readline
# prompt input stream
fd_in = int(sys.argv[1])
file_in = os.fdopen( fd_in )
# cmd output stream
fd_out = int(sys.argv[2])
file_out = os.fdopen( fd_out, 'w' )
# some helpers
def send(data):
file_out.write(data + '\n')
file_out.flush()
def recv():
while True:
res = file_in.readline().rstrip('\n')
read, _, _ = select([ file_in ], [], [], 0)
if not read: return res
class History:
"""readline history safe open/close"""
def __init__(self, filename):
self.filename = os.path.expanduser( filename )
def __enter__(self):
try:
readline.read_history_file(self.filename)
# print 'loaded console history from', self.filename
except IOError:
pass
return self
def __exit__(self, type, value, traceback):
readline.write_history_file( self.filename )
def cleanup(*args):
print('console cleanup')
os.system('stty sane')
for sig in [signal.SIGQUIT,
signal.SIGTERM,
signal.SIGILL,
signal.SIGSEGV]:
old = signal.getsignal(sig)
def new(*args):
cleanup()
signal.signal(sig, old)
os.kill(os.getpid(), sig)
signal.signal(sig, new)
# main loop
try:
with History( "~/.sofa-console" ):
print 'console started'
while True:
send( raw_input( recv() ) )
except KeyboardInterrupt:
print 'console exited (SIGINT)'
except EOFError:
ppid = os.getppid()
try:
os.kill(os.getppid(), signal.SIGTERM)
print 'console exited (EOF), terminating parent process'
except OSError:
pass
else:
import subprocess
import code
import atexit
_cleanup = None
def _register( c ):
global _cleanup
if _cleanup: _cleanup()
_cleanup = c
class | (code.InteractiveConsole):
def __init__(self, locals = None, timeout = 100):
"""
python interpreter taking input from console subprocess
scope is provided through 'locals' (usually: locals() or globals())
'timeout' (in milliseconds) sets how often is the console polled.
"""
code.InteractiveConsole.__init__(self, locals)
if timeout >= 0:
def callback():
self.poll()
from PySide import QtCore
self.timer = QtCore.QTimer()
self.timer.timeout.connect( callback )
self.timer.start( timeout )
_register( lambda: self.timer.stop() )
# execute next command, blocks on console input
def next(self):
line = recv()
data = '>>> '
if self.push( line ):
data = '... '
send( data )
# convenience
def poll(self):
if ready(): self.next()
# send prompt to indicate we are ready
def send(data):
prompt_out.write(data + '\n')
prompt_out.flush()
# receive command line
def recv():
res = cmd_in.readline()
if res: return res.rstrip('\n')
return res
# is there any available command ?
def ready():
read, _, _ = select([ cmd_in ], [], [], 0)
return read
# communication pipes
prompt = os.pipe()
cmd = os.pipe()
# subprocess with in/out fd, and forwarding stdin
sub = subprocess.Popen(['python', __file__,
str(prompt[0]), str(cmd[1])],
stdin = sys.stdin)
# open the tubes !
prompt_out = os.fdopen(prompt[1], 'w')
cmd_in = os.fdopen(cmd[0], 'r')
# we're ready
send('>>> ')
# def cleanup(*args):
# print('console cleanup')
# os.system('stty sane')
# def exit(*args):
# print 'exit'
# cleanup()
# sys.exit(0) forces cleanup *from python* before the gui
# closes. otherwise pyside causes segfault on python finalize.
def handler(*args):
sub.terminate()
sub.wait()
sys.exit(0)
from PySide import QtCore
app = QtCore.QCoreApplication.instance()
app.aboutToQuit.connect( handler )
# import atexit
# atexit.register( handler )
# import atexit
# atexit.register( exit )
# for sig in [signal.SIGSEGV, signal.SIGILL]:
# old = signal.getsignal(sig)
# def h(*args):
# print args
# sub.terminate()
# signal.signal(sig, old)
# os.kill(os.getpid(), sig)
# signal.signal(sig, h)
| Console | identifier_name |
sign.rs | Copyright 2017-2021 int08h LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//!
//! A multi-step (init-update-finish) interface for Ed25519 signing and verification
//!
use std::fmt;
use std::fmt::Formatter;
use data_encoding::{Encoding, HEXLOWER_PERMISSIVE};
use ring::rand;
use ring::rand::SecureRandom;
use ring::signature::{self, Ed25519KeyPair, KeyPair};
const HEX: Encoding = HEXLOWER_PERMISSIVE;
const INITIAL_BUF_SIZE: usize = 1024;
/// A multi-step (init-update-finish) interface for verifying an Ed25519 signature
#[derive(Debug)]
pub struct Verifier {
pubkey: Vec<u8>,
buf: Vec<u8>,
}
impl Verifier {
pub fn new(pubkey: &[u8]) -> Self {
Verifier {
pubkey: Vec::from(pubkey),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn verify(&self, expected_sig: &[u8]) -> bool {
let pk = signature::UnparsedPublicKey::new(&signature::ED25519, &self.pubkey);
match pk.verify(&self.buf, expected_sig) {
Ok(_) => true,
_ => false,
}
}
}
/// A multi-step (init-update-finish) interface for creating an Ed25519 signature
pub struct Signer {
key_pair: Ed25519KeyPair,
buf: Vec<u8>,
}
impl Default for Signer {
fn default() -> Self |
}
impl Signer {
pub fn new() -> Self {
let rng = rand::SystemRandom::new();
let mut seed = [0u8; 32];
rng.fill(&mut seed).unwrap();
Signer::from_seed(&seed)
}
pub fn from_seed(seed: &[u8]) -> Self {
Signer {
key_pair: Ed25519KeyPair::from_seed_unchecked(seed).unwrap(),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn sign(&mut self) -> Vec<u8> {
let signature = self.key_pair.sign(&self.buf).as_ref().to_vec();
self.buf.clear();
signature
}
pub fn public_key_bytes(&self) -> &[u8] {
self.key_pair.public_key().as_ref()
}
}
impl fmt::Display for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", HEX.encode(self.public_key_bytes()))
}
}
impl fmt::Debug for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(
f,
"Signer({}, {:?})",
HEX.encode(self.public_key_bytes()),
self.buf
)
}
}
#[rustfmt::skip] // rustfmt errors on the long signature strings
#[cfg(test)]
mod test {
use super::*;
#[test]
fn verify_ed25519_sig_on_empty_message() {
let pubkey = hex::decode(
"d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a",
).unwrap();
let signature = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let v = Verifier::new(&pubkey);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn verify_ed25519_sig() {
let pubkey = hex::decode(
"c0dac102c4533186e25dc43128472353eaabdb878b152aeb8e001f92d90233a7",
).unwrap();
let message = hex::decode("5f4c8989").unwrap();
let signature = hex::decode(
"124f6fc6b0d100842769e71bd530664d888df8507df6c56dedfdb509aeb93416e26b918d38aa06305df3095697c18b2aa832eaa52edc0ae49fbae5a85e150c07"
).unwrap();
let mut v = Verifier::new(&pubkey);
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn sign_ed25519_empty_message() {
let seed = hex::decode("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60")
.unwrap();
let expected_sig = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let mut s = Signer::from_seed(&seed);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_ed25519_message() {
let seed = hex::decode("0d4a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = hex::decode("cbc77b").unwrap();
let expected_sig = hex::decode(
"d9868d52c2bebce5f3fa5a79891970f309cb6591e3e1702a70276fa97c24b3a8e58606c38c9758529da50ee31b8219cba45271c689afa60b0ea26c99db19b00c"
).unwrap();
let mut s = Signer::from_seed(&seed);
s.update(&message);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_verify_round_trip() {
let seed = hex::decode("334a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = "Hello world".as_bytes();
let mut signer = Signer::from_seed(&seed);
signer.update(&message);
let signature = signer.sign();
let mut v = Verifier::new(signer.public_key_bytes());
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, | {
Self::new()
} | identifier_body |
sign.rs | Copyright 2017-2021 int08h LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//!
//! A multi-step (init-update-finish) interface for Ed25519 signing and verification
//!
use std::fmt;
use std::fmt::Formatter;
use data_encoding::{Encoding, HEXLOWER_PERMISSIVE};
use ring::rand;
use ring::rand::SecureRandom;
use ring::signature::{self, Ed25519KeyPair, KeyPair};
const HEX: Encoding = HEXLOWER_PERMISSIVE;
const INITIAL_BUF_SIZE: usize = 1024;
/// A multi-step (init-update-finish) interface for verifying an Ed25519 signature
#[derive(Debug)]
pub struct Verifier {
pubkey: Vec<u8>,
buf: Vec<u8>,
}
impl Verifier {
pub fn new(pubkey: &[u8]) -> Self {
Verifier {
pubkey: Vec::from(pubkey),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn verify(&self, expected_sig: &[u8]) -> bool {
let pk = signature::UnparsedPublicKey::new(&signature::ED25519, &self.pubkey);
match pk.verify(&self.buf, expected_sig) {
Ok(_) => true,
_ => false,
}
}
}
/// A multi-step (init-update-finish) interface for creating an Ed25519 signature
pub struct Signer {
key_pair: Ed25519KeyPair,
buf: Vec<u8>,
}
impl Default for Signer {
fn default() -> Self {
Self::new()
}
}
impl Signer {
pub fn new() -> Self {
let rng = rand::SystemRandom::new();
let mut seed = [0u8; 32];
rng.fill(&mut seed).unwrap();
Signer::from_seed(&seed)
}
pub fn from_seed(seed: &[u8]) -> Self {
Signer {
key_pair: Ed25519KeyPair::from_seed_unchecked(seed).unwrap(),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn sign(&mut self) -> Vec<u8> {
let signature = self.key_pair.sign(&self.buf).as_ref().to_vec();
self.buf.clear();
signature
}
pub fn public_key_bytes(&self) -> &[u8] {
self.key_pair.public_key().as_ref()
}
}
impl fmt::Display for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", HEX.encode(self.public_key_bytes()))
}
}
impl fmt::Debug for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(
f,
"Signer({}, {:?})",
HEX.encode(self.public_key_bytes()),
self.buf
)
}
}
#[rustfmt::skip] // rustfmt errors on the long signature strings
#[cfg(test)] | let pubkey = hex::decode(
"d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a",
).unwrap();
let signature = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let v = Verifier::new(&pubkey);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn verify_ed25519_sig() {
let pubkey = hex::decode(
"c0dac102c4533186e25dc43128472353eaabdb878b152aeb8e001f92d90233a7",
).unwrap();
let message = hex::decode("5f4c8989").unwrap();
let signature = hex::decode(
"124f6fc6b0d100842769e71bd530664d888df8507df6c56dedfdb509aeb93416e26b918d38aa06305df3095697c18b2aa832eaa52edc0ae49fbae5a85e150c07"
).unwrap();
let mut v = Verifier::new(&pubkey);
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn sign_ed25519_empty_message() {
let seed = hex::decode("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60")
.unwrap();
let expected_sig = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let mut s = Signer::from_seed(&seed);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_ed25519_message() {
let seed = hex::decode("0d4a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = hex::decode("cbc77b").unwrap();
let expected_sig = hex::decode(
"d9868d52c2bebce5f3fa5a79891970f309cb6591e3e1702a70276fa97c24b3a8e58606c38c9758529da50ee31b8219cba45271c689afa60b0ea26c99db19b00c"
).unwrap();
let mut s = Signer::from_seed(&seed);
s.update(&message);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_verify_round_trip() {
let seed = hex::decode("334a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = "Hello world".as_bytes();
let mut signer = Signer::from_seed(&seed);
signer.update(&message);
let signature = signer.sign();
let mut v = Verifier::new(signer.public_key_bytes());
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
| mod test {
use super::*;
#[test]
fn verify_ed25519_sig_on_empty_message() { | random_line_split |
sign.rs | Copyright 2017-2021 int08h LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//!
//! A multi-step (init-update-finish) interface for Ed25519 signing and verification
//!
use std::fmt;
use std::fmt::Formatter;
use data_encoding::{Encoding, HEXLOWER_PERMISSIVE};
use ring::rand;
use ring::rand::SecureRandom;
use ring::signature::{self, Ed25519KeyPair, KeyPair};
const HEX: Encoding = HEXLOWER_PERMISSIVE;
const INITIAL_BUF_SIZE: usize = 1024;
/// A multi-step (init-update-finish) interface for verifying an Ed25519 signature
#[derive(Debug)]
pub struct Verifier {
pubkey: Vec<u8>,
buf: Vec<u8>,
}
impl Verifier {
pub fn new(pubkey: &[u8]) -> Self {
Verifier {
pubkey: Vec::from(pubkey),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn verify(&self, expected_sig: &[u8]) -> bool {
let pk = signature::UnparsedPublicKey::new(&signature::ED25519, &self.pubkey);
match pk.verify(&self.buf, expected_sig) {
Ok(_) => true,
_ => false,
}
}
}
/// A multi-step (init-update-finish) interface for creating an Ed25519 signature
pub struct Signer {
key_pair: Ed25519KeyPair,
buf: Vec<u8>,
}
impl Default for Signer {
fn default() -> Self {
Self::new()
}
}
impl Signer {
pub fn new() -> Self {
let rng = rand::SystemRandom::new();
let mut seed = [0u8; 32];
rng.fill(&mut seed).unwrap();
Signer::from_seed(&seed)
}
pub fn from_seed(seed: &[u8]) -> Self {
Signer {
key_pair: Ed25519KeyPair::from_seed_unchecked(seed).unwrap(),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn sign(&mut self) -> Vec<u8> {
let signature = self.key_pair.sign(&self.buf).as_ref().to_vec();
self.buf.clear();
signature
}
pub fn public_key_bytes(&self) -> &[u8] {
self.key_pair.public_key().as_ref()
}
}
impl fmt::Display for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", HEX.encode(self.public_key_bytes()))
}
}
impl fmt::Debug for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(
f,
"Signer({}, {:?})",
HEX.encode(self.public_key_bytes()),
self.buf
)
}
}
#[rustfmt::skip] // rustfmt errors on the long signature strings
#[cfg(test)]
mod test {
use super::*;
#[test]
fn verify_ed25519_sig_on_empty_message() {
let pubkey = hex::decode(
"d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a",
).unwrap();
let signature = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let v = Verifier::new(&pubkey);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn verify_ed25519_sig() {
let pubkey = hex::decode(
"c0dac102c4533186e25dc43128472353eaabdb878b152aeb8e001f92d90233a7",
).unwrap();
let message = hex::decode("5f4c8989").unwrap();
let signature = hex::decode(
"124f6fc6b0d100842769e71bd530664d888df8507df6c56dedfdb509aeb93416e26b918d38aa06305df3095697c18b2aa832eaa52edc0ae49fbae5a85e150c07"
).unwrap();
let mut v = Verifier::new(&pubkey);
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn | () {
let seed = hex::decode("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60")
.unwrap();
let expected_sig = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let mut s = Signer::from_seed(&seed);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_ed25519_message() {
let seed = hex::decode("0d4a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = hex::decode("cbc77b").unwrap();
let expected_sig = hex::decode(
"d9868d52c2bebce5f3fa5a79891970f309cb6591e3e1702a70276fa97c24b3a8e58606c38c9758529da50ee31b8219cba45271c689afa60b0ea26c99db19b00c"
).unwrap();
let mut s = Signer::from_seed(&seed);
s.update(&message);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_verify_round_trip() {
let seed = hex::decode("334a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = "Hello world".as_bytes();
let mut signer = Signer::from_seed(&seed);
signer.update(&message);
let signature = signer.sign();
let mut v = Verifier::new(signer.public_key_bytes());
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true | sign_ed25519_empty_message | identifier_name |
cookie_storage.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implementation of cookie storage as specified in
//! http://tools.ietf.org/html/rfc6265
use net_traits::CookieSource;
use url::Url;
use cookie::Cookie;
use std::cmp::Ordering;
pub struct CookieStorage {
cookies: Vec<Cookie>
}
impl CookieStorage {
pub fn new() -> CookieStorage {
CookieStorage {
cookies: Vec::new()
}
}
| // http://tools.ietf.org/html/rfc6265#section-5.3
pub fn remove(&mut self, cookie: &Cookie, source: CookieSource) -> Result<Option<Cookie>, ()> {
// Step 1
let position = self.cookies.iter().position(|c| {
c.cookie.domain == cookie.cookie.domain &&
c.cookie.path == cookie.cookie.path &&
c.cookie.name == cookie.cookie.name
});
if let Some(ind) = position {
let c = self.cookies.remove(ind);
// http://tools.ietf.org/html/rfc6265#section-5.3 step 11.2
if !c.cookie.httponly || source == CookieSource::HTTP {
Ok(Some(c))
} else {
// Undo the removal.
self.cookies.push(c);
Err(())
}
} else {
Ok(None)
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn push(&mut self, mut cookie: Cookie, source: CookieSource) {
let old_cookie = self.remove(&cookie, source);
if old_cookie.is_err() {
// This new cookie is not allowed to overwrite an existing one.
return;
}
if cookie.cookie.value.is_empty() {
return;
}
// Step 11
if let Some(old_cookie) = old_cookie.unwrap() {
// Step 11.3
cookie.creation_time = old_cookie.creation_time;
}
// Step 12
self.cookies.push(cookie);
}
pub fn cookie_comparator(a: &Cookie, b: &Cookie) -> Ordering {
let a_path_len = a.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
let b_path_len = b.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
match a_path_len.cmp(&b_path_len) {
Ordering::Equal => {
let a_creation_time = a.creation_time.to_timespec();
let b_creation_time = b.creation_time.to_timespec();
a_creation_time.cmp(&b_creation_time)
}
// Ensure that longer paths are sorted earlier than shorter paths
Ordering::Greater => Ordering::Less,
Ordering::Less => Ordering::Greater,
}
}
// http://tools.ietf.org/html/rfc6265#section-5.4
pub fn cookies_for_url(&mut self, url: &Url, source: CookieSource) -> Option<String> {
let filterer = |c: &&mut Cookie| -> bool {
info!(" === SENT COOKIE : {} {} {:?} {:?}",
c.cookie.name, c.cookie.value, c.cookie.domain, c.cookie.path);
info!(" === SENT COOKIE RESULT {}", c.appropriate_for_url(url, source));
// Step 1
c.appropriate_for_url(url, source)
};
// Step 2
let mut url_cookies: Vec<&mut Cookie> = self.cookies.iter_mut().filter(filterer).collect();
url_cookies.sort_by(|a, b| CookieStorage::cookie_comparator(*a, *b));
let reducer = |acc: String, c: &mut &mut Cookie| -> String {
// Step 3
c.touch();
// Step 4
(match acc.len() {
0 => acc,
_ => acc + ";"
}) + &c.cookie.name + "=" + &c.cookie.value
};
let result = url_cookies.iter_mut().fold("".to_string(), reducer);
info!(" === COOKIES SENT: {}", result);
match result.len() {
0 => None,
_ => Some(result)
}
}
} | random_line_split |
|
cookie_storage.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implementation of cookie storage as specified in
//! http://tools.ietf.org/html/rfc6265
use net_traits::CookieSource;
use url::Url;
use cookie::Cookie;
use std::cmp::Ordering;
pub struct CookieStorage {
cookies: Vec<Cookie>
}
impl CookieStorage {
pub fn new() -> CookieStorage {
CookieStorage {
cookies: Vec::new()
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn remove(&mut self, cookie: &Cookie, source: CookieSource) -> Result<Option<Cookie>, ()> {
// Step 1
let position = self.cookies.iter().position(|c| {
c.cookie.domain == cookie.cookie.domain &&
c.cookie.path == cookie.cookie.path &&
c.cookie.name == cookie.cookie.name
});
if let Some(ind) = position {
let c = self.cookies.remove(ind);
// http://tools.ietf.org/html/rfc6265#section-5.3 step 11.2
if !c.cookie.httponly || source == CookieSource::HTTP {
Ok(Some(c))
} else {
// Undo the removal.
self.cookies.push(c);
Err(())
}
} else {
Ok(None)
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn push(&mut self, mut cookie: Cookie, source: CookieSource) {
let old_cookie = self.remove(&cookie, source);
if old_cookie.is_err() {
// This new cookie is not allowed to overwrite an existing one.
return;
}
if cookie.cookie.value.is_empty() {
return;
}
// Step 11
if let Some(old_cookie) = old_cookie.unwrap() {
// Step 11.3
cookie.creation_time = old_cookie.creation_time;
}
// Step 12
self.cookies.push(cookie);
}
pub fn cookie_comparator(a: &Cookie, b: &Cookie) -> Ordering {
let a_path_len = a.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
let b_path_len = b.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
match a_path_len.cmp(&b_path_len) {
Ordering::Equal => {
let a_creation_time = a.creation_time.to_timespec();
let b_creation_time = b.creation_time.to_timespec();
a_creation_time.cmp(&b_creation_time)
}
// Ensure that longer paths are sorted earlier than shorter paths
Ordering::Greater => Ordering::Less,
Ordering::Less => Ordering::Greater,
}
}
// http://tools.ietf.org/html/rfc6265#section-5.4
pub fn | (&mut self, url: &Url, source: CookieSource) -> Option<String> {
let filterer = |c: &&mut Cookie| -> bool {
info!(" === SENT COOKIE : {} {} {:?} {:?}",
c.cookie.name, c.cookie.value, c.cookie.domain, c.cookie.path);
info!(" === SENT COOKIE RESULT {}", c.appropriate_for_url(url, source));
// Step 1
c.appropriate_for_url(url, source)
};
// Step 2
let mut url_cookies: Vec<&mut Cookie> = self.cookies.iter_mut().filter(filterer).collect();
url_cookies.sort_by(|a, b| CookieStorage::cookie_comparator(*a, *b));
let reducer = |acc: String, c: &mut &mut Cookie| -> String {
// Step 3
c.touch();
// Step 4
(match acc.len() {
0 => acc,
_ => acc + ";"
}) + &c.cookie.name + "=" + &c.cookie.value
};
let result = url_cookies.iter_mut().fold("".to_string(), reducer);
info!(" === COOKIES SENT: {}", result);
match result.len() {
0 => None,
_ => Some(result)
}
}
}
| cookies_for_url | identifier_name |
cookie_storage.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implementation of cookie storage as specified in
//! http://tools.ietf.org/html/rfc6265
use net_traits::CookieSource;
use url::Url;
use cookie::Cookie;
use std::cmp::Ordering;
pub struct CookieStorage {
cookies: Vec<Cookie>
}
impl CookieStorage {
pub fn new() -> CookieStorage {
CookieStorage {
cookies: Vec::new()
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn remove(&mut self, cookie: &Cookie, source: CookieSource) -> Result<Option<Cookie>, ()> {
// Step 1
let position = self.cookies.iter().position(|c| {
c.cookie.domain == cookie.cookie.domain &&
c.cookie.path == cookie.cookie.path &&
c.cookie.name == cookie.cookie.name
});
if let Some(ind) = position {
let c = self.cookies.remove(ind);
// http://tools.ietf.org/html/rfc6265#section-5.3 step 11.2
if !c.cookie.httponly || source == CookieSource::HTTP {
Ok(Some(c))
} else {
// Undo the removal.
self.cookies.push(c);
Err(())
}
} else {
Ok(None)
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn push(&mut self, mut cookie: Cookie, source: CookieSource) {
let old_cookie = self.remove(&cookie, source);
if old_cookie.is_err() {
// This new cookie is not allowed to overwrite an existing one.
return;
}
if cookie.cookie.value.is_empty() |
// Step 11
if let Some(old_cookie) = old_cookie.unwrap() {
// Step 11.3
cookie.creation_time = old_cookie.creation_time;
}
// Step 12
self.cookies.push(cookie);
}
pub fn cookie_comparator(a: &Cookie, b: &Cookie) -> Ordering {
let a_path_len = a.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
let b_path_len = b.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
match a_path_len.cmp(&b_path_len) {
Ordering::Equal => {
let a_creation_time = a.creation_time.to_timespec();
let b_creation_time = b.creation_time.to_timespec();
a_creation_time.cmp(&b_creation_time)
}
// Ensure that longer paths are sorted earlier than shorter paths
Ordering::Greater => Ordering::Less,
Ordering::Less => Ordering::Greater,
}
}
// http://tools.ietf.org/html/rfc6265#section-5.4
pub fn cookies_for_url(&mut self, url: &Url, source: CookieSource) -> Option<String> {
let filterer = |c: &&mut Cookie| -> bool {
info!(" === SENT COOKIE : {} {} {:?} {:?}",
c.cookie.name, c.cookie.value, c.cookie.domain, c.cookie.path);
info!(" === SENT COOKIE RESULT {}", c.appropriate_for_url(url, source));
// Step 1
c.appropriate_for_url(url, source)
};
// Step 2
let mut url_cookies: Vec<&mut Cookie> = self.cookies.iter_mut().filter(filterer).collect();
url_cookies.sort_by(|a, b| CookieStorage::cookie_comparator(*a, *b));
let reducer = |acc: String, c: &mut &mut Cookie| -> String {
// Step 3
c.touch();
// Step 4
(match acc.len() {
0 => acc,
_ => acc + ";"
}) + &c.cookie.name + "=" + &c.cookie.value
};
let result = url_cookies.iter_mut().fold("".to_string(), reducer);
info!(" === COOKIES SENT: {}", result);
match result.len() {
0 => None,
_ => Some(result)
}
}
}
| {
return;
} | conditional_block |
cookie_storage.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implementation of cookie storage as specified in
//! http://tools.ietf.org/html/rfc6265
use net_traits::CookieSource;
use url::Url;
use cookie::Cookie;
use std::cmp::Ordering;
pub struct CookieStorage {
cookies: Vec<Cookie>
}
impl CookieStorage {
pub fn new() -> CookieStorage {
CookieStorage {
cookies: Vec::new()
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn remove(&mut self, cookie: &Cookie, source: CookieSource) -> Result<Option<Cookie>, ()> | Ok(None)
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn push(&mut self, mut cookie: Cookie, source: CookieSource) {
let old_cookie = self.remove(&cookie, source);
if old_cookie.is_err() {
// This new cookie is not allowed to overwrite an existing one.
return;
}
if cookie.cookie.value.is_empty() {
return;
}
// Step 11
if let Some(old_cookie) = old_cookie.unwrap() {
// Step 11.3
cookie.creation_time = old_cookie.creation_time;
}
// Step 12
self.cookies.push(cookie);
}
pub fn cookie_comparator(a: &Cookie, b: &Cookie) -> Ordering {
let a_path_len = a.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
let b_path_len = b.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
match a_path_len.cmp(&b_path_len) {
Ordering::Equal => {
let a_creation_time = a.creation_time.to_timespec();
let b_creation_time = b.creation_time.to_timespec();
a_creation_time.cmp(&b_creation_time)
}
// Ensure that longer paths are sorted earlier than shorter paths
Ordering::Greater => Ordering::Less,
Ordering::Less => Ordering::Greater,
}
}
// http://tools.ietf.org/html/rfc6265#section-5.4
pub fn cookies_for_url(&mut self, url: &Url, source: CookieSource) -> Option<String> {
let filterer = |c: &&mut Cookie| -> bool {
info!(" === SENT COOKIE : {} {} {:?} {:?}",
c.cookie.name, c.cookie.value, c.cookie.domain, c.cookie.path);
info!(" === SENT COOKIE RESULT {}", c.appropriate_for_url(url, source));
// Step 1
c.appropriate_for_url(url, source)
};
// Step 2
let mut url_cookies: Vec<&mut Cookie> = self.cookies.iter_mut().filter(filterer).collect();
url_cookies.sort_by(|a, b| CookieStorage::cookie_comparator(*a, *b));
let reducer = |acc: String, c: &mut &mut Cookie| -> String {
// Step 3
c.touch();
// Step 4
(match acc.len() {
0 => acc,
_ => acc + ";"
}) + &c.cookie.name + "=" + &c.cookie.value
};
let result = url_cookies.iter_mut().fold("".to_string(), reducer);
info!(" === COOKIES SENT: {}", result);
match result.len() {
0 => None,
_ => Some(result)
}
}
}
| {
// Step 1
let position = self.cookies.iter().position(|c| {
c.cookie.domain == cookie.cookie.domain &&
c.cookie.path == cookie.cookie.path &&
c.cookie.name == cookie.cookie.name
});
if let Some(ind) = position {
let c = self.cookies.remove(ind);
// http://tools.ietf.org/html/rfc6265#section-5.3 step 11.2
if !c.cookie.httponly || source == CookieSource::HTTP {
Ok(Some(c))
} else {
// Undo the removal.
self.cookies.push(c);
Err(())
}
} else { | identifier_body |
surface.rs | use {Scalar, TOLERANCE};
use maths::{CrossProduct, DotProduct, UnitVec3D, Vec3D};
/// Represents a `Surface` for a given set of points.
#[derive(Copy, Clone)]
pub struct Surface {
/// The `Surface` normal
pub normal: UnitVec3D,
/// The node indices associated with the `Surface`
pub nodes: [usize; 3],
}
impl Surface {
/// Creates a new `Surface` from the point cloud and indices provided.
pub fn new(vertices: &Vec<Vec3D>, index_0: usize, index_1: usize, index_2: usize) -> Surface {
let reference_point = vertices.iter()
.fold(Vec3D::zero(), |total, &vector| {
total + vector
}) / (vertices.len() as Scalar);
let base = vertices[index_0];
let relative_to_reference = base - reference_point;
let edge_0 = vertices[index_1] - base;
let edge_1 = vertices[index_2] - base;
let mut normal = edge_0.cross(edge_1).normalize();
if normal.dot(relative_to_reference) < TOLERANCE |
return Surface {
normal: normal,
nodes: [index_0, index_1, index_2],
};
}
/// Computes the centroid of a `Surface` using the node indices in the
/// `Surface` and the point cloud provided.
pub fn compute_centroid(surface: &Surface, vertices: &Vec<Vec3D>) -> Vec3D {
return surface.nodes.iter()
.fold(Vec3D::zero(), |total, &index| {
total + vertices[index]
}) / 3.0;
}
}
| {
normal = -normal;
} | conditional_block |
surface.rs | use {Scalar, TOLERANCE};
use maths::{CrossProduct, DotProduct, UnitVec3D, Vec3D};
/// Represents a `Surface` for a given set of points.
#[derive(Copy, Clone)]
pub struct Surface {
/// The `Surface` normal
pub normal: UnitVec3D,
/// The node indices associated with the `Surface`
pub nodes: [usize; 3],
}
impl Surface {
/// Creates a new `Surface` from the point cloud and indices provided.
pub fn new(vertices: &Vec<Vec3D>, index_0: usize, index_1: usize, index_2: usize) -> Surface {
let reference_point = vertices.iter()
.fold(Vec3D::zero(), |total, &vector| {
total + vector
}) / (vertices.len() as Scalar);
let base = vertices[index_0];
let relative_to_reference = base - reference_point;
let edge_0 = vertices[index_1] - base;
let edge_1 = vertices[index_2] - base;
let mut normal = edge_0.cross(edge_1).normalize();
if normal.dot(relative_to_reference) < TOLERANCE {
normal = -normal;
}
return Surface {
normal: normal,
nodes: [index_0, index_1, index_2],
};
}
/// Computes the centroid of a `Surface` using the node indices in the
/// `Surface` and the point cloud provided.
pub fn | (surface: &Surface, vertices: &Vec<Vec3D>) -> Vec3D {
return surface.nodes.iter()
.fold(Vec3D::zero(), |total, &index| {
total + vertices[index]
}) / 3.0;
}
}
| compute_centroid | identifier_name |
surface.rs | use {Scalar, TOLERANCE}; | /// Represents a `Surface` for a given set of points.
#[derive(Copy, Clone)]
pub struct Surface {
/// The `Surface` normal
pub normal: UnitVec3D,
/// The node indices associated with the `Surface`
pub nodes: [usize; 3],
}
impl Surface {
/// Creates a new `Surface` from the point cloud and indices provided.
pub fn new(vertices: &Vec<Vec3D>, index_0: usize, index_1: usize, index_2: usize) -> Surface {
let reference_point = vertices.iter()
.fold(Vec3D::zero(), |total, &vector| {
total + vector
}) / (vertices.len() as Scalar);
let base = vertices[index_0];
let relative_to_reference = base - reference_point;
let edge_0 = vertices[index_1] - base;
let edge_1 = vertices[index_2] - base;
let mut normal = edge_0.cross(edge_1).normalize();
if normal.dot(relative_to_reference) < TOLERANCE {
normal = -normal;
}
return Surface {
normal: normal,
nodes: [index_0, index_1, index_2],
};
}
/// Computes the centroid of a `Surface` using the node indices in the
/// `Surface` and the point cloud provided.
pub fn compute_centroid(surface: &Surface, vertices: &Vec<Vec3D>) -> Vec3D {
return surface.nodes.iter()
.fold(Vec3D::zero(), |total, &index| {
total + vertices[index]
}) / 3.0;
}
} | use maths::{CrossProduct, DotProduct, UnitVec3D, Vec3D};
| random_line_split |
issue-21384.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn | <T : Clone>(arg: T) -> T {
arg.clone()
}
#[derive(PartialEq)]
struct Test(int);
fn main() {
// Check that ranges implement clone
assert!(test(1..5) == (1..5));
assert!(test(..5) == (..5));
assert!(test(1..) == (1..));
assert!(test(FullRange) == (FullRange));
// Check that ranges can still be used with non-clone limits
assert!((Test(1)..Test(5)) == (Test(1)..Test(5)));
}
| test | identifier_name |
issue-21384.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | }
#[derive(PartialEq)]
struct Test(int);
fn main() {
// Check that ranges implement clone
assert!(test(1..5) == (1..5));
assert!(test(..5) == (..5));
assert!(test(1..) == (1..));
assert!(test(FullRange) == (FullRange));
// Check that ranges can still be used with non-clone limits
assert!((Test(1)..Test(5)) == (Test(1)..Test(5)));
} | // option. This file may not be copied, modified, or distributed
// except according to those terms.
fn test<T : Clone>(arg: T) -> T {
arg.clone() | random_line_split |
issue-21384.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn test<T : Clone>(arg: T) -> T {
arg.clone()
}
#[derive(PartialEq)]
struct Test(int);
fn main() | {
// Check that ranges implement clone
assert!(test(1..5) == (1..5));
assert!(test(..5) == (..5));
assert!(test(1..) == (1..));
assert!(test(FullRange) == (FullRange));
// Check that ranges can still be used with non-clone limits
assert!((Test(1)..Test(5)) == (Test(1)..Test(5)));
} | identifier_body |
|
gd.js | ich an eag",
"Header 4": "Bann-cinn 4",
"Paste is now in plain text mode. Contents will now be pasted as plain text until you toggle this option off.": "Ma chuireas tu rud ann a-nis, th\u00e8id an t-susbaint a chur ann mar theacsa lom gus an cuir thu dheth an roghainn seo a-rithist.",
"Underline": "Fo-loidhne",
"Cancel": "Sguir dheth",
"Justify": "Blocaich",
"Inline": "Taobh a-staigh na loidhne",
"Copy": "D\u00e8an lethbhreac",
"Align left": "Co-thaobhaich ris an l\u00e0imh chl\u00ec",
"Visual aids": "Taic l\u00e8irsinne",
"Lower Greek": "Litrichean Greugach beaga",
"Square": "Ce\u00e0rnag",
"Default": "Bun-roghainn",
"Lower Alpha": "Aibidileach is beag",
"Circle": "Cearcall",
"Disc": "Diosga",
"Upper Alpha": "Aibidileach is m\u00f2r",
"Upper Roman": "\u00c0ireamhan R\u00f2manach is m\u00f2r",
"Lower Roman": "\u00c0ireamhan R\u00f2manach is beag",
"Name": "Ainm",
"Anchor": "Acair",
"You have unsaved changes are you sure you want to navigate away?": "Tha atharraichean gun s\u00e0bhaladh agad, a bheil thu cinnteach gu bheil thu airson gluasad air falbh?",
"Restore last draft": "Aisig an dreach mu dheireadh",
"Special character": "Caractar s\u00f2nraichte",
"Source code": "An c\u00f2d t\u00f9sail",
"B": "B",
"R": "R",
"G": "G",
"Color": "Dath",
"Right to left": "Deas gu cl\u00ec",
"Left to right": "Cl\u00ec gu deas",
"Emoticons": "Samhlaidhean-gn\u00f9ise",
"Robots": "Robotairean",
"Document properties": "Roghainnean na sgr\u00ecobhainne",
"Title": "Tiotal",
"Keywords": "Faclan-luirg",
"Encoding": "C\u00f2dachadh",
"Description": "Tuairisgeul",
"Author": "\u00d9ghdar",
"Fullscreen": "L\u00e0n-sgr\u00ecn",
"Horizontal line": "Loidhne ch\u00f2mhnard",
"Horizontal space": "\u00c0ite c\u00f2mhnard",
"Insert\/edit image": "Cuir a-steach\/Deasaich an dealbh",
"General": "Coitcheann",
"Advanced": "Adhartach",
"Source": "T\u00f9s",
"Border": "Iomall",
"Constrain proportions": "Cuingich na co-r\u00e8irean",
"Vertical space": "\u00c0ite inghearach",
"Image description": "Tuairisgeul an deilbh",
"Style": "Stoidhle",
"Dimensions": "Meudachd",
"Insert image": "Cuir a-steach dealbh",
"Zoom in": "S\u00f9m a-steach",
"Contrast": "Iomsgaradh",
"Back": "Air ais",
"Gamma": "Gamma",
"Flip horizontally": "Thoir flip air a\u2019 ch\u00f2mhnard",
"Resize": "Atharraich am meud",
"Sharpen": "Geuraich",
"Zoom out": "S\u00f9m a-mach",
"Image options": "Roghainnean an deilbh",
"Apply": "Cuir an s\u00e0s",
"Brightness": "Soilleireachd",
"Rotate clockwise": "Cuairtich gu deiseil",
"Rotate counterclockwise": "Cuairtich gu tuathail",
"Edit image": "Deasaich an dealbh",
"Color levels": "\u00ccrean nan dathan",
"Crop": "Bearr",
"Orientation": "Comhair",
"Flip vertically": "Thoir flip gu inghearach",
"Invert": "Ais-thionndaidh",
"Insert date\/time": "Cuir a-steach ceann-l\u00e0\/\u00e0m",
"Remove link": "Thoir air falbh an ceangal",
"Url": "URL",
"Text to display": "An teacsa a th\u00e8id a shealltainn",
"Anchors": "Acraichean",
"Insert link": "Cuir a-steach ceangal",
"New window": "Uinneag \u00f9r",
"None": "Chan eil gin",
"The URL you entered seems to be an external link. Do you want to add the required http:\/\/ prefix?": "Tha coltas gu bheil an URL a chuir thu a-steach 'na cheangal ris an taobh a-muigh. A bheil thu airson an ro-leasachan http:\/\/ a chur ris? Tha feum air.",
"Target": "Targaid",
"The URL you entered seems to be an email address. Do you want to add the required mailto: prefix?": "Tha coltas gu bheil an URL a chuir thu a-steach 'na she\u00f2ladh puist-d. A bheil thu airson an ro-leasachan mailto: a chur ris? Tha feum air.",
"Insert\/edit link": "Cuir a-steach\/Deasaich an ceangal",
"Insert\/edit video": "Cuir a-steach\/Deasaich a' video",
"Poster": "P\u00f2stair",
"Alternative source": "Roghainn eile de th\u00f9s",
"Paste your embed code below:": "Cuir an c\u00f2d leabachaidh agad a-steach gu h-\u00ecosal:",
"Insert video": "Cuir a-steach video",
"Embed": "Leabaich",
"Nonbreaking space": "Be\u00e0rn neo-bhristidh",
"Page break": "Briseadh-duilleige",
"Paste as text": "Cuir ann mar theacsa",
"Preview": "Ro-shealladh",
"Print": "Cl\u00f2-bhuail",
"Save": "S\u00e0bhail",
"Could not find the specified string.": "Cha b' urrainn dhuinn na dh'iarr thu a lorg.",
"Replace": "Cuir 'na \u00e0ite",
"Next": "Air adhart",
"Whole words": "Faclan sl\u00e0na",
"Find and replace": "Lorg is cuir 'na \u00e0ite",
"Replace with": "Cuir na leanas 'na \u00e0ite",
"Find": "Lorg",
"Replace all": "Cuir an \u00e0ite na h-uile",
"Match case": "Maids litrichean m\u00f2ra 's beaga",
"Prev": "Air ais",
"Spellcheck": "Dearbhaich an litreachadh",
"Finish": "Cr\u00ecochnaich",
"Ignore all": "Leig seachad na h-uile",
"Ignore": "Leig seachad",
"Add to Dictionary": "Cuir ris an fhaclair",
"Insert row before": "Cuir a-steach r\u00e0gh roimhe",
"Rows": "R\u00e0ghan",
"Height": "\u00c0irde",
"Paste row after": "Cuir ann r\u00e0gh 'na dh\u00e8idh",
"Alignment": "Co-thaobhadh",
"Border color": "Dath an iomaill",
"Column group": "Buidheann cholbhan",
"Row": "R\u00e0gh",
"Insert column before": "Cuir a-steach colbh roimhe",
"Split cell": "Sgoilt an cealla",
"Cell padding": "Padadh nan ceallan", | "Cell spacing": "Be\u00e0rnadh nan ceallan",
"Row type": "Se\u00f2rsa an r\u00e0igh",
"Insert table": "Cuir a-steach cl\u00e0r",
"Body": "Bodhaig",
"Caption": "Caipsean", | random_line_split |
|
SearchCriteria.ts | import { Injectable } from 'angular2/core';
import {YoutubeService} from './youtube.service';
import {LatLng} from './LatLng';
import {YoutubeVideos} from './Youtube';
export class SelectOption{
value:string;
displayText:string;
}
@Injectable()
export class SearchCriteria{
sortOptions:Array<SelectOption> = [
{value:'relevance', displayText:'Relevance'},
{value:'date', displayText:'Date'},
{value:'rating', displayText:'Rating'},
{value:'title', displayText:'Title'},
];
searchSortBy:string;
searchText:string;
addressText:string;
searchRange:string;
searchRanges:Array<SelectOption> = [
{value:'100m', displayText:'100 Miles'},
{value:'250m', displayText:'250 Miles'},
{value:'500m', displayText:'500 Miles'},
{value:'1000m', displayText:'1000 Miles'},
];
latLng:LatLng;
pageToken:string;
constructor(private _youtubeService:YoutubeService){
this.latLng = new LatLng();
}
public buildSearchCriteria(){
return new Promise((resolve) => {
if(this.addressText){
this._youtubeService.runGeoLocator(this.addressText).then((_latLng:LatLng)=>{
this.latLng = _latLng;
resolve(this.compileCriteria());
})
}else{
resolve(this.compileCriteria());
}
});
}
private | (){
var returnObject = {
part: 'snippet',
q: this.searchText,
order: 'relevance',
type: 'video',
};
if(this.searchSortBy){
returnObject.order = this.searchSortBy;
}
if(this.pageToken){
returnObject['pageToken'] = this.pageToken;
}
if(this.addressText && this.latLng.status && this.searchRange){
returnObject['location'] = this.latLng.lat + ', ' + this.latLng.lng;
returnObject['locationRadius'] = this.searchRange;
}
return returnObject;
}
public onSearchRangeChange(newValue) {
this.searchRange = newValue;
}
public setSearchSort(newValue) {
this.searchSortBy = newValue;
}
public setPageToken(newValue) {
this.pageToken = newValue;
}
public youtubeSearch(){
return new Promise((resolve) => {
if(this.searchText){
this.buildSearchCriteria().then((searchCriteria) => {
this._youtubeService.runYoutubeSearch(searchCriteria).then((searchResults:YoutubeVideos) => {
resolve(searchResults);
});
});
}else{
alert('Please Enter Search Text');
}
})
}
}
| compileCriteria | identifier_name |
SearchCriteria.ts | import { Injectable } from 'angular2/core';
import {YoutubeService} from './youtube.service';
import {LatLng} from './LatLng';
import {YoutubeVideos} from './Youtube';
export class SelectOption{
value:string;
displayText:string;
}
@Injectable()
export class SearchCriteria{
sortOptions:Array<SelectOption> = [
{value:'relevance', displayText:'Relevance'},
{value:'date', displayText:'Date'},
{value:'rating', displayText:'Rating'},
{value:'title', displayText:'Title'},
];
searchSortBy:string;
searchText:string;
addressText:string;
searchRange:string;
searchRanges:Array<SelectOption> = [
{value:'100m', displayText:'100 Miles'},
{value:'250m', displayText:'250 Miles'},
{value:'500m', displayText:'500 Miles'},
{value:'1000m', displayText:'1000 Miles'},
];
latLng:LatLng;
pageToken:string;
constructor(private _youtubeService:YoutubeService){
this.latLng = new LatLng();
}
public buildSearchCriteria(){
return new Promise((resolve) => {
if(this.addressText){
this._youtubeService.runGeoLocator(this.addressText).then((_latLng:LatLng)=>{
this.latLng = _latLng;
resolve(this.compileCriteria());
})
}else{
resolve(this.compileCriteria());
}
});
}
private compileCriteria() | return returnObject;
}
public onSearchRangeChange(newValue) {
this.searchRange = newValue;
}
public setSearchSort(newValue) {
this.searchSortBy = newValue;
}
public setPageToken(newValue) {
this.pageToken = newValue;
}
public youtubeSearch(){
return new Promise((resolve) => {
if(this.searchText){
this.buildSearchCriteria().then((searchCriteria) => {
this._youtubeService.runYoutubeSearch(searchCriteria).then((searchResults:YoutubeVideos) => {
resolve(searchResults);
});
});
}else{
alert('Please Enter Search Text');
}
})
}
}
| {
var returnObject = {
part: 'snippet',
q: this.searchText,
order: 'relevance',
type: 'video',
};
if(this.searchSortBy){
returnObject.order = this.searchSortBy;
}
if(this.pageToken){
returnObject['pageToken'] = this.pageToken;
}
if(this.addressText && this.latLng.status && this.searchRange){
returnObject['location'] = this.latLng.lat + ', ' + this.latLng.lng;
returnObject['locationRadius'] = this.searchRange;
} | identifier_body |
SearchCriteria.ts | import { Injectable } from 'angular2/core';
import {YoutubeService} from './youtube.service';
import {LatLng} from './LatLng';
import {YoutubeVideos} from './Youtube';
export class SelectOption{
value:string;
displayText:string;
}
@Injectable()
export class SearchCriteria{
sortOptions:Array<SelectOption> = [
{value:'relevance', displayText:'Relevance'},
{value:'date', displayText:'Date'},
{value:'rating', displayText:'Rating'},
{value:'title', displayText:'Title'},
];
searchSortBy:string;
searchText:string;
addressText:string;
searchRange:string;
searchRanges:Array<SelectOption> = [
{value:'100m', displayText:'100 Miles'},
{value:'250m', displayText:'250 Miles'},
{value:'500m', displayText:'500 Miles'},
{value:'1000m', displayText:'1000 Miles'},
];
latLng:LatLng;
pageToken:string;
constructor(private _youtubeService:YoutubeService){
this.latLng = new LatLng();
}
public buildSearchCriteria(){
return new Promise((resolve) => {
if(this.addressText){
this._youtubeService.runGeoLocator(this.addressText).then((_latLng:LatLng)=>{
this.latLng = _latLng;
resolve(this.compileCriteria());
})
}else{
resolve(this.compileCriteria());
}
});
}
private compileCriteria(){
var returnObject = { |
if(this.searchSortBy){
returnObject.order = this.searchSortBy;
}
if(this.pageToken){
returnObject['pageToken'] = this.pageToken;
}
if(this.addressText && this.latLng.status && this.searchRange){
returnObject['location'] = this.latLng.lat + ', ' + this.latLng.lng;
returnObject['locationRadius'] = this.searchRange;
}
return returnObject;
}
public onSearchRangeChange(newValue) {
this.searchRange = newValue;
}
public setSearchSort(newValue) {
this.searchSortBy = newValue;
}
public setPageToken(newValue) {
this.pageToken = newValue;
}
public youtubeSearch(){
return new Promise((resolve) => {
if(this.searchText){
this.buildSearchCriteria().then((searchCriteria) => {
this._youtubeService.runYoutubeSearch(searchCriteria).then((searchResults:YoutubeVideos) => {
resolve(searchResults);
});
});
}else{
alert('Please Enter Search Text');
}
})
}
} | part: 'snippet',
q: this.searchText,
order: 'relevance',
type: 'video',
}; | random_line_split |
SearchCriteria.ts | import { Injectable } from 'angular2/core';
import {YoutubeService} from './youtube.service';
import {LatLng} from './LatLng';
import {YoutubeVideos} from './Youtube';
export class SelectOption{
value:string;
displayText:string;
}
@Injectable()
export class SearchCriteria{
sortOptions:Array<SelectOption> = [
{value:'relevance', displayText:'Relevance'},
{value:'date', displayText:'Date'},
{value:'rating', displayText:'Rating'},
{value:'title', displayText:'Title'},
];
searchSortBy:string;
searchText:string;
addressText:string;
searchRange:string;
searchRanges:Array<SelectOption> = [
{value:'100m', displayText:'100 Miles'},
{value:'250m', displayText:'250 Miles'},
{value:'500m', displayText:'500 Miles'},
{value:'1000m', displayText:'1000 Miles'},
];
latLng:LatLng;
pageToken:string;
constructor(private _youtubeService:YoutubeService){
this.latLng = new LatLng();
}
public buildSearchCriteria(){
return new Promise((resolve) => {
if(this.addressText){
this._youtubeService.runGeoLocator(this.addressText).then((_latLng:LatLng)=>{
this.latLng = _latLng;
resolve(this.compileCriteria());
})
}else{
resolve(this.compileCriteria());
}
});
}
private compileCriteria(){
var returnObject = {
part: 'snippet',
q: this.searchText,
order: 'relevance',
type: 'video',
};
if(this.searchSortBy) |
if(this.pageToken){
returnObject['pageToken'] = this.pageToken;
}
if(this.addressText && this.latLng.status && this.searchRange){
returnObject['location'] = this.latLng.lat + ', ' + this.latLng.lng;
returnObject['locationRadius'] = this.searchRange;
}
return returnObject;
}
public onSearchRangeChange(newValue) {
this.searchRange = newValue;
}
public setSearchSort(newValue) {
this.searchSortBy = newValue;
}
public setPageToken(newValue) {
this.pageToken = newValue;
}
public youtubeSearch(){
return new Promise((resolve) => {
if(this.searchText){
this.buildSearchCriteria().then((searchCriteria) => {
this._youtubeService.runYoutubeSearch(searchCriteria).then((searchResults:YoutubeVideos) => {
resolve(searchResults);
});
});
}else{
alert('Please Enter Search Text');
}
})
}
}
| {
returnObject.order = this.searchSortBy;
} | conditional_block |
ContactsPage.tsx | import * as React from 'react';
import {Page, Navbar, List, ListGroup, ListItem} from 'framework7-react';
const contacts = {
'A': [
'Aaron',
'Abbie',
'Adam',
'Adele',
'Agatha',
'Agnes',
'Albert',
'Alexander'
],
'B': [
'Bailey',
'Barclay',
'Bartolo',
'Bellamy',
'Belle',
'Benjamin'
],
'C': [
'Caiden',
'Calvin',
'Candy',
'Carl',
'Cherilyn',
'Chester',
'Chloe' | };
export const ContactsPage = () => {
return (
<Page>
<Navbar backLink="Back" title="Contacts" sliding />
<List contacts>
{Object.keys(contacts).reduce((listGroups, nextGroupName) => {
return [
...listGroups,
<ListGroup key={nextGroupName}>
<ListItem title={nextGroupName} groupTitle />
{contacts[nextGroupName].map(contactName => {
return <ListItem key={contactName} title={contactName} />;
})}
</ListGroup>
];
}, [])}
</List>
</Page>
);
}; | ],
'V': [
'Vladimir'
] | random_line_split |
app.module.ts | import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
import { RouterModule } from '@angular/router';
import { HashLocationStrategy, LocationStrategy } from '@angular/common';
import { AppComponent } from './app.component';
import { GameService } from './game.service';
import { GameComponent } from './game/game.component';
import { GameActionDialog } from './game/game.action.dialog';
import { GameChatDialog } from './game/game.chat.dialog';
import { GameConfirmDialog } from './game/game.confirm.dialog';
import {
MatAutocompleteModule,
MatButtonModule,
MatButtonToggleModule,
MatCardModule,
MatCheckboxModule,
MatChipsModule,
MatDatepickerModule,
MatDialogModule,
MatExpansionModule,
MatGridListModule,
MatIconModule,
MatInputModule,
MatListModule,
MatMenuModule,
MatNativeDateModule,
MatPaginatorModule,
MatProgressBarModule,
MatProgressSpinnerModule,
MatRadioModule,
MatRippleModule,
MatSelectModule,
MatSidenavModule,
MatSliderModule,
MatSlideToggleModule,
MatSnackBarModule,
MatSortModule,
MatTableModule,
MatTabsModule,
MatToolbarModule,
MatTooltipModule,
MatStepperModule,
} from '@angular/material';
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
import {CdkTableModule} from '@angular/cdk/table';
import { StatComponent } from './stat/stat.component';
import { Num2ArrayPipe, HasNonZeroPropertyPipe, SumOfPropertyPipe } from './game/game.pipe';
const ROUTES = [
{ path: '', redirectTo: 'games', pathMatch: 'full' },
{ path: 'games', component: GameComponent },
{ path: 'stat', component: StatComponent }
];
@NgModule({
exports: [
CdkTableModule,
MatAutocompleteModule,
MatButtonModule,
MatButtonToggleModule,
MatCardModule,
MatCheckboxModule,
MatChipsModule,
MatStepperModule,
MatDatepickerModule,
MatDialogModule,
MatExpansionModule,
MatGridListModule,
MatIconModule,
MatInputModule,
MatListModule,
MatMenuModule,
MatNativeDateModule,
MatPaginatorModule,
MatProgressBarModule, | MatRippleModule,
MatSelectModule,
MatSidenavModule,
MatSliderModule,
MatSlideToggleModule,
MatSnackBarModule,
MatSortModule,
MatTableModule,
MatTabsModule,
MatToolbarModule,
MatTooltipModule,
]
})
export class MaterialModule {}
@NgModule({
imports: [
BrowserModule,
BrowserAnimationsModule,
FormsModule,
HttpModule,
MaterialModule,
RouterModule.forRoot(ROUTES)
],
declarations: [
AppComponent,
GameComponent,
GameActionDialog,
GameChatDialog,
GameConfirmDialog,
StatComponent,
Num2ArrayPipe,
HasNonZeroPropertyPipe,
SumOfPropertyPipe
],
providers: [
GameService,
{provide: LocationStrategy, useClass: HashLocationStrategy}
],
entryComponents: [
GameActionDialog,
GameChatDialog,
GameConfirmDialog
],
bootstrap: [AppComponent]
})
export class AppModule { } | MatProgressSpinnerModule,
MatRadioModule, | random_line_split |
app.module.ts | import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
import { RouterModule } from '@angular/router';
import { HashLocationStrategy, LocationStrategy } from '@angular/common';
import { AppComponent } from './app.component';
import { GameService } from './game.service';
import { GameComponent } from './game/game.component';
import { GameActionDialog } from './game/game.action.dialog';
import { GameChatDialog } from './game/game.chat.dialog';
import { GameConfirmDialog } from './game/game.confirm.dialog';
import {
MatAutocompleteModule,
MatButtonModule,
MatButtonToggleModule,
MatCardModule,
MatCheckboxModule,
MatChipsModule,
MatDatepickerModule,
MatDialogModule,
MatExpansionModule,
MatGridListModule,
MatIconModule,
MatInputModule,
MatListModule,
MatMenuModule,
MatNativeDateModule,
MatPaginatorModule,
MatProgressBarModule,
MatProgressSpinnerModule,
MatRadioModule,
MatRippleModule,
MatSelectModule,
MatSidenavModule,
MatSliderModule,
MatSlideToggleModule,
MatSnackBarModule,
MatSortModule,
MatTableModule,
MatTabsModule,
MatToolbarModule,
MatTooltipModule,
MatStepperModule,
} from '@angular/material';
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
import {CdkTableModule} from '@angular/cdk/table';
import { StatComponent } from './stat/stat.component';
import { Num2ArrayPipe, HasNonZeroPropertyPipe, SumOfPropertyPipe } from './game/game.pipe';
const ROUTES = [
{ path: '', redirectTo: 'games', pathMatch: 'full' },
{ path: 'games', component: GameComponent },
{ path: 'stat', component: StatComponent }
];
@NgModule({
exports: [
CdkTableModule,
MatAutocompleteModule,
MatButtonModule,
MatButtonToggleModule,
MatCardModule,
MatCheckboxModule,
MatChipsModule,
MatStepperModule,
MatDatepickerModule,
MatDialogModule,
MatExpansionModule,
MatGridListModule,
MatIconModule,
MatInputModule,
MatListModule,
MatMenuModule,
MatNativeDateModule,
MatPaginatorModule,
MatProgressBarModule,
MatProgressSpinnerModule,
MatRadioModule,
MatRippleModule,
MatSelectModule,
MatSidenavModule,
MatSliderModule,
MatSlideToggleModule,
MatSnackBarModule,
MatSortModule,
MatTableModule,
MatTabsModule,
MatToolbarModule,
MatTooltipModule,
]
})
export class | {}
@NgModule({
imports: [
BrowserModule,
BrowserAnimationsModule,
FormsModule,
HttpModule,
MaterialModule,
RouterModule.forRoot(ROUTES)
],
declarations: [
AppComponent,
GameComponent,
GameActionDialog,
GameChatDialog,
GameConfirmDialog,
StatComponent,
Num2ArrayPipe,
HasNonZeroPropertyPipe,
SumOfPropertyPipe
],
providers: [
GameService,
{provide: LocationStrategy, useClass: HashLocationStrategy}
],
entryComponents: [
GameActionDialog,
GameChatDialog,
GameConfirmDialog
],
bootstrap: [AppComponent]
})
export class AppModule { }
| MaterialModule | identifier_name |
portfolio.py | # encoding: utf-8
# (c) 2019 Open Risk (https://www.openriskmanagement.com)
#
# portfolioAnalytics is licensed under the Apache 2.0 license a copy of which is included
# in the source distribution of correlationMatrix. This is notwithstanding any licenses of
# third-party software included in this distribution. You may not use this file except in
# compliance with the License.
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions and
# limitations under the License.
""" This module provides simple functionality for holding portfolio data for calculation purposes.
| * Portfolio_ implements a simple portfolio data container
"""
import numpy as np
class Portfolio(object):
""" The _`Portfolio` object implements a simple portfolio data structure. See `loan tape <https://www.openriskmanual.org/wiki/Loan_Tape>`_ for more general structures.
"""
def __init__(self, psize=0, rating=[], exposure=[], factor=[]):
"""Initialize portfolio.
:param psize: initialization values
:param rating: list of default probabilities
:param exposure: list of exposures (numerical values, e.g. `Exposure At Default <https://www.openriskmanual.org/wiki/Exposure_At_Default>`_
:param factor: list of factor indices (those should match the factors used e.g. in a correlation matrix
:type psize: int
:type rating: list of floats
:type exposure: list of floats
:type factor: list of int
:returns: returns a Portfolio object
:rtype: object
.. note:: The initialization in itself does not validate if the provided values form indeed valid portfolio data
"""
self.psize = psize
self.exposure = exposure
self.rating = rating
self.factor = factor
def loadjson(self, data):
"""Load portfolio data from JSON object.
The data format for the input json object is a list of dictionaries as follows
[{"ID":"1","PD":"0.015","EAD":"40","FACTOR":0},
...
{"ID":"2","PD":"0.286","EAD":"20","FACTOR":0}]
"""
self.psize = len(data)
for x in data:
self.exposure.append(float(x['EAD']))
self.rating.append(float(x['PD']))
self.factor.append(x['FACTOR'])
def preprocess_portfolio(self):
"""
Produce some portfolio statistics like total number of entities and exposure weighted average probability of default
:return:
"""
N = self.psize
Total_Exposure = np.sum(self.exposure)
p = np.inner(self.rating, self.exposure) / Total_Exposure
return N, p | random_line_split |
|
portfolio.py | # encoding: utf-8
# (c) 2019 Open Risk (https://www.openriskmanagement.com)
#
# portfolioAnalytics is licensed under the Apache 2.0 license a copy of which is included
# in the source distribution of correlationMatrix. This is notwithstanding any licenses of
# third-party software included in this distribution. You may not use this file except in
# compliance with the License.
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions and
# limitations under the License.
""" This module provides simple functionality for holding portfolio data for calculation purposes.
* Portfolio_ implements a simple portfolio data container
"""
import numpy as np
class Portfolio(object):
""" The _`Portfolio` object implements a simple portfolio data structure. See `loan tape <https://www.openriskmanual.org/wiki/Loan_Tape>`_ for more general structures.
"""
def __init__(self, psize=0, rating=[], exposure=[], factor=[]):
|
def loadjson(self, data):
"""Load portfolio data from JSON object.
The data format for the input json object is a list of dictionaries as follows
[{"ID":"1","PD":"0.015","EAD":"40","FACTOR":0},
...
{"ID":"2","PD":"0.286","EAD":"20","FACTOR":0}]
"""
self.psize = len(data)
for x in data:
self.exposure.append(float(x['EAD']))
self.rating.append(float(x['PD']))
self.factor.append(x['FACTOR'])
def preprocess_portfolio(self):
"""
Produce some portfolio statistics like total number of entities and exposure weighted average probability of default
:return:
"""
N = self.psize
Total_Exposure = np.sum(self.exposure)
p = np.inner(self.rating, self.exposure) / Total_Exposure
return N, p
| """Initialize portfolio.
:param psize: initialization values
:param rating: list of default probabilities
:param exposure: list of exposures (numerical values, e.g. `Exposure At Default <https://www.openriskmanual.org/wiki/Exposure_At_Default>`_
:param factor: list of factor indices (those should match the factors used e.g. in a correlation matrix
:type psize: int
:type rating: list of floats
:type exposure: list of floats
:type factor: list of int
:returns: returns a Portfolio object
:rtype: object
.. note:: The initialization in itself does not validate if the provided values form indeed valid portfolio data
"""
self.psize = psize
self.exposure = exposure
self.rating = rating
self.factor = factor | identifier_body |
portfolio.py | # encoding: utf-8
# (c) 2019 Open Risk (https://www.openriskmanagement.com)
#
# portfolioAnalytics is licensed under the Apache 2.0 license a copy of which is included
# in the source distribution of correlationMatrix. This is notwithstanding any licenses of
# third-party software included in this distribution. You may not use this file except in
# compliance with the License.
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions and
# limitations under the License.
""" This module provides simple functionality for holding portfolio data for calculation purposes.
* Portfolio_ implements a simple portfolio data container
"""
import numpy as np
class Portfolio(object):
""" The _`Portfolio` object implements a simple portfolio data structure. See `loan tape <https://www.openriskmanual.org/wiki/Loan_Tape>`_ for more general structures.
"""
def __init__(self, psize=0, rating=[], exposure=[], factor=[]):
"""Initialize portfolio.
:param psize: initialization values
:param rating: list of default probabilities
:param exposure: list of exposures (numerical values, e.g. `Exposure At Default <https://www.openriskmanual.org/wiki/Exposure_At_Default>`_
:param factor: list of factor indices (those should match the factors used e.g. in a correlation matrix
:type psize: int
:type rating: list of floats
:type exposure: list of floats
:type factor: list of int
:returns: returns a Portfolio object
:rtype: object
.. note:: The initialization in itself does not validate if the provided values form indeed valid portfolio data
"""
self.psize = psize
self.exposure = exposure
self.rating = rating
self.factor = factor
def loadjson(self, data):
"""Load portfolio data from JSON object.
The data format for the input json object is a list of dictionaries as follows
[{"ID":"1","PD":"0.015","EAD":"40","FACTOR":0},
...
{"ID":"2","PD":"0.286","EAD":"20","FACTOR":0}]
"""
self.psize = len(data)
for x in data:
self.exposure.append(float(x['EAD']))
self.rating.append(float(x['PD']))
self.factor.append(x['FACTOR'])
def | (self):
"""
Produce some portfolio statistics like total number of entities and exposure weighted average probability of default
:return:
"""
N = self.psize
Total_Exposure = np.sum(self.exposure)
p = np.inner(self.rating, self.exposure) / Total_Exposure
return N, p
| preprocess_portfolio | identifier_name |
portfolio.py | # encoding: utf-8
# (c) 2019 Open Risk (https://www.openriskmanagement.com)
#
# portfolioAnalytics is licensed under the Apache 2.0 license a copy of which is included
# in the source distribution of correlationMatrix. This is notwithstanding any licenses of
# third-party software included in this distribution. You may not use this file except in
# compliance with the License.
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions and
# limitations under the License.
""" This module provides simple functionality for holding portfolio data for calculation purposes.
* Portfolio_ implements a simple portfolio data container
"""
import numpy as np
class Portfolio(object):
""" The _`Portfolio` object implements a simple portfolio data structure. See `loan tape <https://www.openriskmanual.org/wiki/Loan_Tape>`_ for more general structures.
"""
def __init__(self, psize=0, rating=[], exposure=[], factor=[]):
"""Initialize portfolio.
:param psize: initialization values
:param rating: list of default probabilities
:param exposure: list of exposures (numerical values, e.g. `Exposure At Default <https://www.openriskmanual.org/wiki/Exposure_At_Default>`_
:param factor: list of factor indices (those should match the factors used e.g. in a correlation matrix
:type psize: int
:type rating: list of floats
:type exposure: list of floats
:type factor: list of int
:returns: returns a Portfolio object
:rtype: object
.. note:: The initialization in itself does not validate if the provided values form indeed valid portfolio data
"""
self.psize = psize
self.exposure = exposure
self.rating = rating
self.factor = factor
def loadjson(self, data):
"""Load portfolio data from JSON object.
The data format for the input json object is a list of dictionaries as follows
[{"ID":"1","PD":"0.015","EAD":"40","FACTOR":0},
...
{"ID":"2","PD":"0.286","EAD":"20","FACTOR":0}]
"""
self.psize = len(data)
for x in data:
|
def preprocess_portfolio(self):
"""
Produce some portfolio statistics like total number of entities and exposure weighted average probability of default
:return:
"""
N = self.psize
Total_Exposure = np.sum(self.exposure)
p = np.inner(self.rating, self.exposure) / Total_Exposure
return N, p
| self.exposure.append(float(x['EAD']))
self.rating.append(float(x['PD']))
self.factor.append(x['FACTOR']) | conditional_block |
SoftKeyboard.js | SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT
* OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
*****************************************************************************/
/* @flow */
"use strict";
/*globals cordova*/
import Emitter from "yasmf-emitter";
// private properties -- we need some symbols
let _selectors = Symbol();
let _useSmoothScrolling = Symbol();
let _smoothScrollDuration = Symbol();
let _showElementUnderKeyboard = Symbol();
/**
* given a selector string, return matching elements in an Array
* @param {string} selectorString
* @return Array<Node>
*/
function getScrollContainers(selectorString/*: string*/)/*: Array<Node>*/ {
return Array.from(document.querySelectorAll(selectorString));
}
/**
* Given an element, returns an array representing the start and end of
* the current selection.
* @param {Node} focusedElement
* @return Array<number>
*/
function getElementSelection(focusedElement/*: Node*/)/*: Array<number>*/ {
return [focusedElement.selectionStart, focusedElement.selectionEnd];
}
/**
* Given an element and a tuple representing the start and end of the selection,
* set the selection on the element.
* @param {Node} focusedElement
* @param {number} selectionStart the start of the selection
* @param {number} selectionEnd the end of the selection
*/
function setElementSelection(focusedElement/*: Node*/, [selectionStart/*: number*/, selectionEnd/*: number*/] = [0, 0])/*: void*/ {
focusedElement.selectionStart = selectionStart;
focusedElement.selectionEnd = selectionEnd;
}
function | (keyboardHeight/*: number*/) {
let e = document.createElement("div");
e.className = "sk-element-under-keyboard";
e.style.position = "absolute";
e.style.bottom = "0";
e.style.left = "0";
e.style.right = "0";
e.style.zIndex = "9999999999999";
e.style.height = `${keyboardHeight}px`;
document.body.appendChild(e);
}
function hideElementUnderKeyboard()/*: void*/ {
let els = Array.from(document.querySelectorAll(".sk-element-under-keyboard"));
els.forEach((el) => document.body.removeChild(el));
}
/**
* Saves the element's current text selection, then resets it to 0. After a tick, it
* restores the element's saved text selection.
* This is to fix iOS's buggy behavior regarding cursor positioning after the scrolling
* of an element.
*
* @param {Node} focusedElement
*/
function handleTextSelection(focusedElement/*: Node*/)/*: void*/ {
setTimeout(() => {
// save the selection
let selection = getElementSelection(focusedElement);
// reset the selection to 0,0
setElementSelection(focusedElement);
// after a short delay, restore the selection
setTimeout(() => setElementSelection(focusedElement, selection), 33);
}, 0);
}
/**
* Provides methods for avoiding the soft keyboard. This tries to be automatic,
* but you will need to supply scrollable selectors.
*/
export default class SoftKeyboard extends Emitter {
/**
* Construct an instance of the SoftKeyboard
*
* @return SoftKeyboard
*/
constructor(options) {
super(options);
}
/**
* Initialize a SoftKeyboard. Will be called automatically during construction
* @param {Array<string>} [selectors] defaults to an empty array
* @param {boolean} [useSmoothScrolling] defaults to true
* @param {number} [smoothScrollDuration] defaults to 100
* @param {boolean} [showElementUnderKeyboard] defaults to false
*/
init({selectors=[], useSmoothScrolling = true, smoothScrollDuration = 100,
showElementUnderKeyboard = false} = {}) {
// selectors: Array, useSmoothScrolling: boolean, smoothScrollDuration: number
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.disableScroll(true);
window.addEventListener("native.keyboardshow", this.keyboardShown.bind(this));
window.addEventListener("native.keyboardhide", this.keyboardHidden.bind(this));
}
}
this[_selectors] = new Set();
selectors.forEach(sel => this.addSelector(sel));
this[_useSmoothScrolling] = useSmoothScrolling;
this[_smoothScrollDuration] = smoothScrollDuration;
this[_showElementUnderKeyboard] = showElementUnderKeyboard;
}
/**
* Adds a selector
* @param {string} selector A CSS selector that identifies a scrolling container
* @return {SoftKeyboard}
*/
addSelector(selector/*: string*/)/*: SoftKeyboard*/ {
this[_selectors].add(selector);
return this;
}
/**
* Removes a selector
* @param {string} selector A CSS selector that identifies a scrolling container
* @return {SoftKeyboard}
*/
removeSelector(selector/*: string*/)/*: SoftKeyboard*/ {
this[_selectors].delete(selector);
return this;
}
get selectors()/*: Array*/ {
return Array.from(this[_selectors]);
}
get selectorString()/*: string*/ {
return this.selectors.join(", ");
}
get useSmoothScrolling()/*: boolean*/ {
return this[_useSmoothScrolling];
}
set useSmoothScrolling(b/*: boolean*/)/*: void*/ {
this[_useSmoothScrolling] = b;
}
get smoothScrollDuration()/*: number*/ {
return this[_smoothScrollDuration];
}
set smoothScrollDuration(d/*: number*/)/*: void*/ {
this[_smoothScrollDuration] = d;
}
get showElementUnderKeyboard()/*: boolean*/ {
return this[_showElementUnderKeyboard];
}
set showElementUnderKeyboard(b/*: boolean*/)/*: void*/ {
this[_showElementUnderKeyboard] = b;
}
/**
* Shows the keyboard, if possible
*/
showKeyboard(force/*: boolean*/ = false)/*: void*/ {
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.show();
return;
}
}
if (force) {
this.keyboardShown({keyboardHeight: 240});
}
}
/**
* Hide the keyboard, if possible
*/
hideKeyboard(force/*: boolean*/ = false)/*: void*/ {
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.hide();
return;
}
}
if (force) {
this.keyboardHidden({});
}
}
/**
* Triggered when the soft keyboard is displayed
* @param {{keyboardHeight: number}} e the event triggered from the keyboard plugin
*/
keyboardShown(e)/*: void*/ {
this.emit("keyboardShown", e);
this.emit("willResize", e);
console.log ("keyboard shown", e.keyboardHeight);
setTimeout(() => {
let screenHeight = window.innerHeight; //(document.body.clientWidth === window.screen.height ? window.screen.width : window.screen.height);
let scrollContainers = getScrollContainers(this.selectorString);
let keyboardHeight = 0; //e.keyboardHeight;
if (this.showElementUnderKeyboard) {
showElementUnderKeyboard(keyboardHeight);
}
// for each scroll container in the DOM, we need to calculate the
// the height it should be to fit in the reduced view
scrollContainers.forEach((sc) => {
let scTop = sc.getBoundingClientRect().top;
// now that we know the top of the scroll container, the height of the
// the screen, and the height of the keyboard, we can calculate the
// appropriate max-height for the container.
let maxHeight = "" + (screenHeight - keyboardHeight - scTop);
console.log("New height", maxHeight);
if (maxHeight > 100) {
sc.style.maxHeight = maxHeight + "px";
}
});
this.emit("didResize", e);
// changing the height isn't sufficient: we need to scroll any focused
// element into view.
setTimeout(() => {
let focusedElement = document.querySelector(":focus");
if (focusedElement) {
if (!this.useSmoothScrolling || !window.requestAnimationFrame) {
// scroll the element into view, but only if we have to
if (focusedElement.scrollIntoViewIfNeeded) {
focusedElement.scrollIntoViewIfNeeded();
} else {
// aim for the bottom of the viewport
focusedElement.scrollIntoView(false);
}
// iOS doesn't always position the cursor correctly after
// a scroll operation. Clear the selection so that iOS is
// forced to recompute where the cursor should appear.
handleTextSelection(focusedElement);
} else {
// to scroll the element smoothly into view, things become a little
// more difficult.
let fElTop = focusedElement.getBoundingClientRect().top,
sc = focusedElement, scTop, scBottom | showElementUnderKeyboard | identifier_name |
SoftKeyboard.js | /*: string*/)/*: Array<Node>*/ {
return Array.from(document.querySelectorAll(selectorString));
}
/**
* Given an element, returns an array representing the start and end of
* the current selection.
* @param {Node} focusedElement
* @return Array<number>
*/
function getElementSelection(focusedElement/*: Node*/)/*: Array<number>*/ {
return [focusedElement.selectionStart, focusedElement.selectionEnd];
}
/**
* Given an element and a tuple representing the start and end of the selection,
* set the selection on the element.
* @param {Node} focusedElement
* @param {number} selectionStart the start of the selection
* @param {number} selectionEnd the end of the selection
*/
function setElementSelection(focusedElement/*: Node*/, [selectionStart/*: number*/, selectionEnd/*: number*/] = [0, 0])/*: void*/ {
focusedElement.selectionStart = selectionStart;
focusedElement.selectionEnd = selectionEnd;
}
function showElementUnderKeyboard(keyboardHeight/*: number*/) {
let e = document.createElement("div");
e.className = "sk-element-under-keyboard";
e.style.position = "absolute";
e.style.bottom = "0";
e.style.left = "0";
e.style.right = "0";
e.style.zIndex = "9999999999999";
e.style.height = `${keyboardHeight}px`;
document.body.appendChild(e);
}
function hideElementUnderKeyboard()/*: void*/ {
let els = Array.from(document.querySelectorAll(".sk-element-under-keyboard"));
els.forEach((el) => document.body.removeChild(el));
}
/**
* Saves the element's current text selection, then resets it to 0. After a tick, it
* restores the element's saved text selection.
* This is to fix iOS's buggy behavior regarding cursor positioning after the scrolling
* of an element.
*
* @param {Node} focusedElement
*/
function handleTextSelection(focusedElement/*: Node*/)/*: void*/ {
setTimeout(() => {
// save the selection
let selection = getElementSelection(focusedElement);
// reset the selection to 0,0
setElementSelection(focusedElement);
// after a short delay, restore the selection
setTimeout(() => setElementSelection(focusedElement, selection), 33);
}, 0);
}
/**
* Provides methods for avoiding the soft keyboard. This tries to be automatic,
* but you will need to supply scrollable selectors.
*/
export default class SoftKeyboard extends Emitter {
/**
* Construct an instance of the SoftKeyboard
*
* @return SoftKeyboard
*/
constructor(options) {
super(options);
}
/**
* Initialize a SoftKeyboard. Will be called automatically during construction
* @param {Array<string>} [selectors] defaults to an empty array
* @param {boolean} [useSmoothScrolling] defaults to true
* @param {number} [smoothScrollDuration] defaults to 100
* @param {boolean} [showElementUnderKeyboard] defaults to false
*/
init({selectors=[], useSmoothScrolling = true, smoothScrollDuration = 100,
showElementUnderKeyboard = false} = {}) {
// selectors: Array, useSmoothScrolling: boolean, smoothScrollDuration: number
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.disableScroll(true);
window.addEventListener("native.keyboardshow", this.keyboardShown.bind(this));
window.addEventListener("native.keyboardhide", this.keyboardHidden.bind(this));
}
}
this[_selectors] = new Set();
selectors.forEach(sel => this.addSelector(sel));
this[_useSmoothScrolling] = useSmoothScrolling;
this[_smoothScrollDuration] = smoothScrollDuration;
this[_showElementUnderKeyboard] = showElementUnderKeyboard;
}
/**
* Adds a selector
* @param {string} selector A CSS selector that identifies a scrolling container
* @return {SoftKeyboard}
*/
addSelector(selector/*: string*/)/*: SoftKeyboard*/ {
this[_selectors].add(selector);
return this;
}
/**
* Removes a selector
* @param {string} selector A CSS selector that identifies a scrolling container
* @return {SoftKeyboard}
*/
removeSelector(selector/*: string*/)/*: SoftKeyboard*/ {
this[_selectors].delete(selector);
return this;
}
get selectors()/*: Array*/ {
return Array.from(this[_selectors]);
}
get selectorString()/*: string*/ {
return this.selectors.join(", ");
}
get useSmoothScrolling()/*: boolean*/ {
return this[_useSmoothScrolling];
}
set useSmoothScrolling(b/*: boolean*/)/*: void*/ {
this[_useSmoothScrolling] = b;
}
get smoothScrollDuration()/*: number*/ {
return this[_smoothScrollDuration];
}
set smoothScrollDuration(d/*: number*/)/*: void*/ {
this[_smoothScrollDuration] = d;
}
get showElementUnderKeyboard()/*: boolean*/ {
return this[_showElementUnderKeyboard];
}
set showElementUnderKeyboard(b/*: boolean*/)/*: void*/ {
this[_showElementUnderKeyboard] = b;
}
/**
* Shows the keyboard, if possible
*/
showKeyboard(force/*: boolean*/ = false)/*: void*/ {
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.show();
return;
}
}
if (force) {
this.keyboardShown({keyboardHeight: 240});
}
}
/**
* Hide the keyboard, if possible
*/
hideKeyboard(force/*: boolean*/ = false)/*: void*/ {
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.hide();
return;
}
}
if (force) {
this.keyboardHidden({});
}
}
/**
* Triggered when the soft keyboard is displayed
* @param {{keyboardHeight: number}} e the event triggered from the keyboard plugin
*/
keyboardShown(e)/*: void*/ {
this.emit("keyboardShown", e);
this.emit("willResize", e);
console.log ("keyboard shown", e.keyboardHeight);
setTimeout(() => {
let screenHeight = window.innerHeight; //(document.body.clientWidth === window.screen.height ? window.screen.width : window.screen.height);
let scrollContainers = getScrollContainers(this.selectorString);
let keyboardHeight = 0; //e.keyboardHeight;
if (this.showElementUnderKeyboard) {
showElementUnderKeyboard(keyboardHeight);
}
// for each scroll container in the DOM, we need to calculate the
// the height it should be to fit in the reduced view
scrollContainers.forEach((sc) => {
let scTop = sc.getBoundingClientRect().top;
// now that we know the top of the scroll container, the height of the
// the screen, and the height of the keyboard, we can calculate the
// appropriate max-height for the container.
let maxHeight = "" + (screenHeight - keyboardHeight - scTop);
console.log("New height", maxHeight);
if (maxHeight > 100) {
sc.style.maxHeight = maxHeight + "px";
}
});
this.emit("didResize", e);
// changing the height isn't sufficient: we need to scroll any focused
// element into view.
setTimeout(() => {
let focusedElement = document.querySelector(":focus");
if (focusedElement) {
if (!this.useSmoothScrolling || !window.requestAnimationFrame) {
// scroll the element into view, but only if we have to
if (focusedElement.scrollIntoViewIfNeeded) {
focusedElement.scrollIntoViewIfNeeded();
} else {
// aim for the bottom of the viewport
focusedElement.scrollIntoView(false);
}
// iOS doesn't always position the cursor correctly after
// a scroll operation. Clear the selection so that iOS is
// forced to recompute where the cursor should appear.
handleTextSelection(focusedElement);
} else {
// to scroll the element smoothly into view, things become a little
// more difficult.
let fElTop = focusedElement.getBoundingClientRect().top,
sc = focusedElement, scTop, scBottom,
selectorString = this.selectorString;
// find the containing scroll container if we can
while (sc && !sc.matches(selectorString)) {
sc = sc.parentElement;
}
if (sc) {
scTop = sc.getBoundingClientRect().top;
scBottom = sc.getBoundingClientRect().bottom;
if (fElTop < scTop || fElTop > (((scBottom - scTop) / 2) + scTop)) {
// the element isn't above the keyboard (or is too far above),
// scroll it into view smoothly
let targetTop = ((scBottom - scTop) / 2) + scTop,
deltaTop = fElTop - targetTop,
origScrollTop = sc.scrollTop,
startTimestamp = null;
// animate our scroll
let scrollStep;
window.requestAnimationFrame(scrollStep = (timestamp) => {
if (!startTimestamp) | {
startTimestamp = timestamp;
} | conditional_block |
|
SoftKeyboard.js | SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT
* OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
*****************************************************************************/
/* @flow */
"use strict";
/*globals cordova*/
import Emitter from "yasmf-emitter";
// private properties -- we need some symbols
let _selectors = Symbol();
let _useSmoothScrolling = Symbol();
let _smoothScrollDuration = Symbol();
let _showElementUnderKeyboard = Symbol();
/**
* given a selector string, return matching elements in an Array
* @param {string} selectorString
* @return Array<Node>
*/
function getScrollContainers(selectorString/*: string*/)/*: Array<Node>*/ {
return Array.from(document.querySelectorAll(selectorString));
}
/**
* Given an element, returns an array representing the start and end of
* the current selection.
* @param {Node} focusedElement
* @return Array<number>
*/
function getElementSelection(focusedElement/*: Node*/)/*: Array<number>*/ {
return [focusedElement.selectionStart, focusedElement.selectionEnd];
}
/**
* Given an element and a tuple representing the start and end of the selection,
* set the selection on the element.
* @param {Node} focusedElement
* @param {number} selectionStart the start of the selection
* @param {number} selectionEnd the end of the selection
*/
function setElementSelection(focusedElement/*: Node*/, [selectionStart/*: number*/, selectionEnd/*: number*/] = [0, 0])/*: void*/ {
focusedElement.selectionStart = selectionStart;
focusedElement.selectionEnd = selectionEnd;
}
function showElementUnderKeyboard(keyboardHeight/*: number*/) {
let e = document.createElement("div");
e.className = "sk-element-under-keyboard";
e.style.position = "absolute";
e.style.bottom = "0";
e.style.left = "0";
e.style.right = "0";
e.style.zIndex = "9999999999999";
e.style.height = `${keyboardHeight}px`;
document.body.appendChild(e);
}
function hideElementUnderKeyboard()/*: void*/ {
let els = Array.from(document.querySelectorAll(".sk-element-under-keyboard"));
els.forEach((el) => document.body.removeChild(el));
}
/**
* Saves the element's current text selection, then resets it to 0. After a tick, it
* restores the element's saved text selection.
* This is to fix iOS's buggy behavior regarding cursor positioning after the scrolling
* of an element.
*
* @param {Node} focusedElement
*/
function handleTextSelection(focusedElement/*: Node*/)/*: void*/ {
setTimeout(() => {
// save the selection
let selection = getElementSelection(focusedElement);
// reset the selection to 0,0
setElementSelection(focusedElement);
// after a short delay, restore the selection
setTimeout(() => setElementSelection(focusedElement, selection), 33);
}, 0);
}
/**
* Provides methods for avoiding the soft keyboard. This tries to be automatic,
* but you will need to supply scrollable selectors.
*/
export default class SoftKeyboard extends Emitter {
/**
* Construct an instance of the SoftKeyboard
*
* @return SoftKeyboard
*/
constructor(options) {
super(options);
}
/**
* Initialize a SoftKeyboard. Will be called automatically during construction
* @param {Array<string>} [selectors] defaults to an empty array
* @param {boolean} [useSmoothScrolling] defaults to true
* @param {number} [smoothScrollDuration] defaults to 100
* @param {boolean} [showElementUnderKeyboard] defaults to false
*/
init({selectors=[], useSmoothScrolling = true, smoothScrollDuration = 100,
showElementUnderKeyboard = false} = {}) {
// selectors: Array, useSmoothScrolling: boolean, smoothScrollDuration: number
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.disableScroll(true);
window.addEventListener("native.keyboardshow", this.keyboardShown.bind(this));
window.addEventListener("native.keyboardhide", this.keyboardHidden.bind(this));
}
}
this[_selectors] = new Set();
selectors.forEach(sel => this.addSelector(sel));
this[_useSmoothScrolling] = useSmoothScrolling;
this[_smoothScrollDuration] = smoothScrollDuration;
this[_showElementUnderKeyboard] = showElementUnderKeyboard;
}
/**
* Adds a selector
* @param {string} selector A CSS selector that identifies a scrolling container
* @return {SoftKeyboard}
*/
addSelector(selector/*: string*/)/*: SoftKeyboard*/ {
this[_selectors].add(selector);
return this;
}
/**
* Removes a selector
* @param {string} selector A CSS selector that identifies a scrolling container
* @return {SoftKeyboard}
*/
removeSelector(selector/*: string*/)/*: SoftKeyboard*/ {
this[_selectors].delete(selector);
return this;
}
get selectors()/*: Array*/ {
return Array.from(this[_selectors]);
}
get selectorString()/*: string*/ {
return this.selectors.join(", ");
}
get useSmoothScrolling()/*: boolean*/ {
return this[_useSmoothScrolling];
}
set useSmoothScrolling(b/*: boolean*/)/*: void*/ {
this[_useSmoothScrolling] = b;
}
get smoothScrollDuration()/*: number*/ {
return this[_smoothScrollDuration];
}
set smoothScrollDuration(d/*: number*/)/*: void*/ {
this[_smoothScrollDuration] = d;
}
get showElementUnderKeyboard()/*: boolean*/ {
return this[_showElementUnderKeyboard];
}
set showElementUnderKeyboard(b/*: boolean*/)/*: void*/ |
/**
* Shows the keyboard, if possible
*/
showKeyboard(force/*: boolean*/ = false)/*: void*/ {
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.show();
return;
}
}
if (force) {
this.keyboardShown({keyboardHeight: 240});
}
}
/**
* Hide the keyboard, if possible
*/
hideKeyboard(force/*: boolean*/ = false)/*: void*/ {
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.hide();
return;
}
}
if (force) {
this.keyboardHidden({});
}
}
/**
* Triggered when the soft keyboard is displayed
* @param {{keyboardHeight: number}} e the event triggered from the keyboard plugin
*/
keyboardShown(e)/*: void*/ {
this.emit("keyboardShown", e);
this.emit("willResize", e);
console.log ("keyboard shown", e.keyboardHeight);
setTimeout(() => {
let screenHeight = window.innerHeight; //(document.body.clientWidth === window.screen.height ? window.screen.width : window.screen.height);
let scrollContainers = getScrollContainers(this.selectorString);
let keyboardHeight = 0; //e.keyboardHeight;
if (this.showElementUnderKeyboard) {
showElementUnderKeyboard(keyboardHeight);
}
// for each scroll container in the DOM, we need to calculate the
// the height it should be to fit in the reduced view
scrollContainers.forEach((sc) => {
let scTop = sc.getBoundingClientRect().top;
// now that we know the top of the scroll container, the height of the
// the screen, and the height of the keyboard, we can calculate the
// appropriate max-height for the container.
let maxHeight = "" + (screenHeight - keyboardHeight - scTop);
console.log("New height", maxHeight);
if (maxHeight > 100) {
sc.style.maxHeight = maxHeight + "px";
}
});
this.emit("didResize", e);
// changing the height isn't sufficient: we need to scroll any focused
// element into view.
setTimeout(() => {
let focusedElement = document.querySelector(":focus");
if (focusedElement) {
if (!this.useSmoothScrolling || !window.requestAnimationFrame) {
// scroll the element into view, but only if we have to
if (focusedElement.scrollIntoViewIfNeeded) {
focusedElement.scrollIntoViewIfNeeded();
} else {
// aim for the bottom of the viewport
focusedElement.scrollIntoView(false);
}
// iOS doesn't always position the cursor correctly after
// a scroll operation. Clear the selection so that iOS is
// forced to recompute where the cursor should appear.
handleTextSelection(focusedElement);
} else {
// to scroll the element smoothly into view, things become a little
// more difficult.
let fElTop = focusedElement.getBoundingClientRect().top,
sc = focusedElement, scTop, sc | {
this[_showElementUnderKeyboard] = b;
} | identifier_body |
SoftKeyboard.js | SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT
* OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
*****************************************************************************/
/* @flow */
"use strict";
/*globals cordova*/
import Emitter from "yasmf-emitter";
// private properties -- we need some symbols
let _selectors = Symbol();
let _useSmoothScrolling = Symbol();
let _smoothScrollDuration = Symbol();
let _showElementUnderKeyboard = Symbol();
/**
* given a selector string, return matching elements in an Array
* @param {string} selectorString
* @return Array<Node>
*/
function getScrollContainers(selectorString/*: string*/)/*: Array<Node>*/ {
return Array.from(document.querySelectorAll(selectorString));
}
/**
* Given an element, returns an array representing the start and end of
* the current selection.
* @param {Node} focusedElement
* @return Array<number>
*/
function getElementSelection(focusedElement/*: Node*/)/*: Array<number>*/ {
return [focusedElement.selectionStart, focusedElement.selectionEnd];
}
/**
* Given an element and a tuple representing the start and end of the selection,
* set the selection on the element.
* @param {Node} focusedElement
* @param {number} selectionStart the start of the selection
* @param {number} selectionEnd the end of the selection
*/
function setElementSelection(focusedElement/*: Node*/, [selectionStart/*: number*/, selectionEnd/*: number*/] = [0, 0])/*: void*/ {
focusedElement.selectionStart = selectionStart;
focusedElement.selectionEnd = selectionEnd;
}
function showElementUnderKeyboard(keyboardHeight/*: number*/) {
let e = document.createElement("div"); | e.style.zIndex = "9999999999999";
e.style.height = `${keyboardHeight}px`;
document.body.appendChild(e);
}
function hideElementUnderKeyboard()/*: void*/ {
let els = Array.from(document.querySelectorAll(".sk-element-under-keyboard"));
els.forEach((el) => document.body.removeChild(el));
}
/**
* Saves the element's current text selection, then resets it to 0. After a tick, it
* restores the element's saved text selection.
* This is to fix iOS's buggy behavior regarding cursor positioning after the scrolling
* of an element.
*
* @param {Node} focusedElement
*/
function handleTextSelection(focusedElement/*: Node*/)/*: void*/ {
setTimeout(() => {
// save the selection
let selection = getElementSelection(focusedElement);
// reset the selection to 0,0
setElementSelection(focusedElement);
// after a short delay, restore the selection
setTimeout(() => setElementSelection(focusedElement, selection), 33);
}, 0);
}
/**
* Provides methods for avoiding the soft keyboard. This tries to be automatic,
* but you will need to supply scrollable selectors.
*/
export default class SoftKeyboard extends Emitter {
/**
* Construct an instance of the SoftKeyboard
*
* @return SoftKeyboard
*/
constructor(options) {
super(options);
}
/**
* Initialize a SoftKeyboard. Will be called automatically during construction
* @param {Array<string>} [selectors] defaults to an empty array
* @param {boolean} [useSmoothScrolling] defaults to true
* @param {number} [smoothScrollDuration] defaults to 100
* @param {boolean} [showElementUnderKeyboard] defaults to false
*/
init({selectors=[], useSmoothScrolling = true, smoothScrollDuration = 100,
showElementUnderKeyboard = false} = {}) {
// selectors: Array, useSmoothScrolling: boolean, smoothScrollDuration: number
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.disableScroll(true);
window.addEventListener("native.keyboardshow", this.keyboardShown.bind(this));
window.addEventListener("native.keyboardhide", this.keyboardHidden.bind(this));
}
}
this[_selectors] = new Set();
selectors.forEach(sel => this.addSelector(sel));
this[_useSmoothScrolling] = useSmoothScrolling;
this[_smoothScrollDuration] = smoothScrollDuration;
this[_showElementUnderKeyboard] = showElementUnderKeyboard;
}
/**
* Adds a selector
* @param {string} selector A CSS selector that identifies a scrolling container
* @return {SoftKeyboard}
*/
addSelector(selector/*: string*/)/*: SoftKeyboard*/ {
this[_selectors].add(selector);
return this;
}
/**
* Removes a selector
* @param {string} selector A CSS selector that identifies a scrolling container
* @return {SoftKeyboard}
*/
removeSelector(selector/*: string*/)/*: SoftKeyboard*/ {
this[_selectors].delete(selector);
return this;
}
get selectors()/*: Array*/ {
return Array.from(this[_selectors]);
}
get selectorString()/*: string*/ {
return this.selectors.join(", ");
}
get useSmoothScrolling()/*: boolean*/ {
return this[_useSmoothScrolling];
}
set useSmoothScrolling(b/*: boolean*/)/*: void*/ {
this[_useSmoothScrolling] = b;
}
get smoothScrollDuration()/*: number*/ {
return this[_smoothScrollDuration];
}
set smoothScrollDuration(d/*: number*/)/*: void*/ {
this[_smoothScrollDuration] = d;
}
get showElementUnderKeyboard()/*: boolean*/ {
return this[_showElementUnderKeyboard];
}
set showElementUnderKeyboard(b/*: boolean*/)/*: void*/ {
this[_showElementUnderKeyboard] = b;
}
/**
* Shows the keyboard, if possible
*/
showKeyboard(force/*: boolean*/ = false)/*: void*/ {
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.show();
return;
}
}
if (force) {
this.keyboardShown({keyboardHeight: 240});
}
}
/**
* Hide the keyboard, if possible
*/
hideKeyboard(force/*: boolean*/ = false)/*: void*/ {
if (typeof cordova !== "undefined") {
if (cordova.plugins && cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.hide();
return;
}
}
if (force) {
this.keyboardHidden({});
}
}
/**
* Triggered when the soft keyboard is displayed
* @param {{keyboardHeight: number}} e the event triggered from the keyboard plugin
*/
keyboardShown(e)/*: void*/ {
this.emit("keyboardShown", e);
this.emit("willResize", e);
console.log ("keyboard shown", e.keyboardHeight);
setTimeout(() => {
let screenHeight = window.innerHeight; //(document.body.clientWidth === window.screen.height ? window.screen.width : window.screen.height);
let scrollContainers = getScrollContainers(this.selectorString);
let keyboardHeight = 0; //e.keyboardHeight;
if (this.showElementUnderKeyboard) {
showElementUnderKeyboard(keyboardHeight);
}
// for each scroll container in the DOM, we need to calculate the
// the height it should be to fit in the reduced view
scrollContainers.forEach((sc) => {
let scTop = sc.getBoundingClientRect().top;
// now that we know the top of the scroll container, the height of the
// the screen, and the height of the keyboard, we can calculate the
// appropriate max-height for the container.
let maxHeight = "" + (screenHeight - keyboardHeight - scTop);
console.log("New height", maxHeight);
if (maxHeight > 100) {
sc.style.maxHeight = maxHeight + "px";
}
});
this.emit("didResize", e);
// changing the height isn't sufficient: we need to scroll any focused
// element into view.
setTimeout(() => {
let focusedElement = document.querySelector(":focus");
if (focusedElement) {
if (!this.useSmoothScrolling || !window.requestAnimationFrame) {
// scroll the element into view, but only if we have to
if (focusedElement.scrollIntoViewIfNeeded) {
focusedElement.scrollIntoViewIfNeeded();
} else {
// aim for the bottom of the viewport
focusedElement.scrollIntoView(false);
}
// iOS doesn't always position the cursor correctly after
// a scroll operation. Clear the selection so that iOS is
// forced to recompute where the cursor should appear.
handleTextSelection(focusedElement);
} else {
// to scroll the element smoothly into view, things become a little
// more difficult.
let fElTop = focusedElement.getBoundingClientRect().top,
sc = focusedElement, scTop, scBottom,
| e.className = "sk-element-under-keyboard";
e.style.position = "absolute";
e.style.bottom = "0";
e.style.left = "0";
e.style.right = "0"; | random_line_split |
weight.rs | use rand::distributions::{IndependentSample, Normal};
use rand::{Closed01, Rng};
/// Represents a connection weight.
#[derive(Debug, Clone, Copy)]
pub struct Weight(pub f64);
impl Weight {
pub fn inv(self) -> Self {
Weight(-self.0)
}
}
impl Into<f64> for Weight {
fn into(self) -> f64 {
self.0
}
}
impl Into<f32> for Weight {
fn into(self) -> f32 {
self.0 as f32
}
}
/// Represents the range of a connection weight. The range is closed,
/// i.e. including both endpoints [low, high].
#[derive(Debug, Clone, Copy)]
pub struct WeightRange {
high: f64,
low: f64,
}
impl WeightRange {
pub fn new(high: f64, low: f64) -> WeightRange {
assert!(high >= low);
WeightRange {
high: high,
low: low,
}
}
pub fn high(&self) -> Weight {
Weight(self.high)
}
pub fn low(&self) -> Weight {
Weight(self.low)
}
pub fn unipolar(magnitude: f64) -> WeightRange {
if magnitude >= 0.0 {
WeightRange {
high: magnitude,
low: 0.0,
}
} else {
WeightRange {
high: 0.0,
low: magnitude,
}
}
}
pub fn bipolar(magnitude: f64) -> WeightRange {
assert!(magnitude >= 0.0);
WeightRange {
high: magnitude,
low: -magnitude,
}
}
pub fn in_range(&self, weight: Weight) -> bool {
weight.0 >= self.low && weight.0 <= self.high
}
pub fn random_weight<R: Rng>(&self, rng: &mut R) -> Weight {
let w = rng.gen::<Closed01<f64>>().0;
debug_assert!(w >= 0.0 && w <= 1.0);
let weight = Weight(((self.high - self.low) * w) + self.low);
debug_assert!(self.in_range(weight));
weight
}
pub fn clip_weight(&self, weight: Weight) -> Weight {
let clipped = if weight.0 >= self.high {
Weight(self.high)
} else if weight.0 <= self.low {
Weight(self.low)
} else {
weight
};
debug_assert!(self.in_range(clipped));
clipped
}
}
/// Defines a perturbance method.
#[derive(Debug, Clone, Copy)]
pub enum WeightPerturbanceMethod {
JiggleUniform { range: WeightRange },
JiggleGaussian { sigma: f64 },
Random,
}
pub fn | <R: Rng>(sigma: f64, rng: &mut R) -> f64 {
let normal = Normal::new(0.0, sigma);
normal.ind_sample(rng)
}
impl WeightPerturbanceMethod {
pub fn perturb<R: Rng>(
&self,
weight: Weight,
weight_range: &WeightRange,
rng: &mut R,
) -> Weight {
match *self {
WeightPerturbanceMethod::Random => weight_range.random_weight(rng),
WeightPerturbanceMethod::JiggleUniform { range } => {
weight_range.clip_weight(Weight(weight.0 + range.random_weight(rng).0))
}
WeightPerturbanceMethod::JiggleGaussian { sigma } => {
weight_range.clip_weight(Weight(weight.0 + gaussian(sigma, rng)))
}
}
}
}
| gaussian | identifier_name |
weight.rs | use rand::distributions::{IndependentSample, Normal};
use rand::{Closed01, Rng};
/// Represents a connection weight.
#[derive(Debug, Clone, Copy)]
pub struct Weight(pub f64);
impl Weight {
pub fn inv(self) -> Self {
Weight(-self.0)
}
}
impl Into<f64> for Weight {
fn into(self) -> f64 {
self.0
}
}
impl Into<f32> for Weight {
fn into(self) -> f32 {
self.0 as f32
}
}
/// Represents the range of a connection weight. The range is closed,
/// i.e. including both endpoints [low, high].
#[derive(Debug, Clone, Copy)]
pub struct WeightRange {
high: f64,
low: f64,
}
impl WeightRange {
pub fn new(high: f64, low: f64) -> WeightRange {
assert!(high >= low);
WeightRange {
high: high,
low: low,
}
}
pub fn high(&self) -> Weight {
Weight(self.high)
}
pub fn low(&self) -> Weight {
Weight(self.low)
}
pub fn unipolar(magnitude: f64) -> WeightRange {
if magnitude >= 0.0 {
WeightRange {
high: magnitude,
low: 0.0,
}
} else {
WeightRange {
high: 0.0,
low: magnitude,
}
}
}
pub fn bipolar(magnitude: f64) -> WeightRange {
assert!(magnitude >= 0.0);
WeightRange {
high: magnitude,
low: -magnitude,
}
}
pub fn in_range(&self, weight: Weight) -> bool {
weight.0 >= self.low && weight.0 <= self.high
}
pub fn random_weight<R: Rng>(&self, rng: &mut R) -> Weight {
let w = rng.gen::<Closed01<f64>>().0;
debug_assert!(w >= 0.0 && w <= 1.0);
let weight = Weight(((self.high - self.low) * w) + self.low);
debug_assert!(self.in_range(weight));
weight
} | pub fn clip_weight(&self, weight: Weight) -> Weight {
let clipped = if weight.0 >= self.high {
Weight(self.high)
} else if weight.0 <= self.low {
Weight(self.low)
} else {
weight
};
debug_assert!(self.in_range(clipped));
clipped
}
}
/// Defines a perturbance method.
#[derive(Debug, Clone, Copy)]
pub enum WeightPerturbanceMethod {
JiggleUniform { range: WeightRange },
JiggleGaussian { sigma: f64 },
Random,
}
pub fn gaussian<R: Rng>(sigma: f64, rng: &mut R) -> f64 {
let normal = Normal::new(0.0, sigma);
normal.ind_sample(rng)
}
impl WeightPerturbanceMethod {
pub fn perturb<R: Rng>(
&self,
weight: Weight,
weight_range: &WeightRange,
rng: &mut R,
) -> Weight {
match *self {
WeightPerturbanceMethod::Random => weight_range.random_weight(rng),
WeightPerturbanceMethod::JiggleUniform { range } => {
weight_range.clip_weight(Weight(weight.0 + range.random_weight(rng).0))
}
WeightPerturbanceMethod::JiggleGaussian { sigma } => {
weight_range.clip_weight(Weight(weight.0 + gaussian(sigma, rng)))
}
}
}
} | random_line_split |
|
weight.rs | use rand::distributions::{IndependentSample, Normal};
use rand::{Closed01, Rng};
/// Represents a connection weight.
#[derive(Debug, Clone, Copy)]
pub struct Weight(pub f64);
impl Weight {
pub fn inv(self) -> Self {
Weight(-self.0)
}
}
impl Into<f64> for Weight {
fn into(self) -> f64 {
self.0
}
}
impl Into<f32> for Weight {
fn into(self) -> f32 {
self.0 as f32
}
}
/// Represents the range of a connection weight. The range is closed,
/// i.e. including both endpoints [low, high].
#[derive(Debug, Clone, Copy)]
pub struct WeightRange {
high: f64,
low: f64,
}
impl WeightRange {
pub fn new(high: f64, low: f64) -> WeightRange {
assert!(high >= low);
WeightRange {
high: high,
low: low,
}
}
pub fn high(&self) -> Weight {
Weight(self.high)
}
pub fn low(&self) -> Weight {
Weight(self.low)
}
pub fn unipolar(magnitude: f64) -> WeightRange {
if magnitude >= 0.0 {
WeightRange {
high: magnitude,
low: 0.0,
}
} else {
WeightRange {
high: 0.0,
low: magnitude,
}
}
}
pub fn bipolar(magnitude: f64) -> WeightRange {
assert!(magnitude >= 0.0);
WeightRange {
high: magnitude,
low: -magnitude,
}
}
pub fn in_range(&self, weight: Weight) -> bool {
weight.0 >= self.low && weight.0 <= self.high
}
pub fn random_weight<R: Rng>(&self, rng: &mut R) -> Weight {
let w = rng.gen::<Closed01<f64>>().0;
debug_assert!(w >= 0.0 && w <= 1.0);
let weight = Weight(((self.high - self.low) * w) + self.low);
debug_assert!(self.in_range(weight));
weight
}
pub fn clip_weight(&self, weight: Weight) -> Weight {
let clipped = if weight.0 >= self.high {
Weight(self.high)
} else if weight.0 <= self.low {
Weight(self.low)
} else | ;
debug_assert!(self.in_range(clipped));
clipped
}
}
/// Defines a perturbance method.
#[derive(Debug, Clone, Copy)]
pub enum WeightPerturbanceMethod {
JiggleUniform { range: WeightRange },
JiggleGaussian { sigma: f64 },
Random,
}
pub fn gaussian<R: Rng>(sigma: f64, rng: &mut R) -> f64 {
let normal = Normal::new(0.0, sigma);
normal.ind_sample(rng)
}
impl WeightPerturbanceMethod {
pub fn perturb<R: Rng>(
&self,
weight: Weight,
weight_range: &WeightRange,
rng: &mut R,
) -> Weight {
match *self {
WeightPerturbanceMethod::Random => weight_range.random_weight(rng),
WeightPerturbanceMethod::JiggleUniform { range } => {
weight_range.clip_weight(Weight(weight.0 + range.random_weight(rng).0))
}
WeightPerturbanceMethod::JiggleGaussian { sigma } => {
weight_range.clip_weight(Weight(weight.0 + gaussian(sigma, rng)))
}
}
}
}
| {
weight
} | conditional_block |
weight.rs | use rand::distributions::{IndependentSample, Normal};
use rand::{Closed01, Rng};
/// Represents a connection weight.
#[derive(Debug, Clone, Copy)]
pub struct Weight(pub f64);
impl Weight {
pub fn inv(self) -> Self {
Weight(-self.0)
}
}
impl Into<f64> for Weight {
fn into(self) -> f64 {
self.0
}
}
impl Into<f32> for Weight {
fn into(self) -> f32 {
self.0 as f32
}
}
/// Represents the range of a connection weight. The range is closed,
/// i.e. including both endpoints [low, high].
#[derive(Debug, Clone, Copy)]
pub struct WeightRange {
high: f64,
low: f64,
}
impl WeightRange {
pub fn new(high: f64, low: f64) -> WeightRange {
assert!(high >= low);
WeightRange {
high: high,
low: low,
}
}
pub fn high(&self) -> Weight {
Weight(self.high)
}
pub fn low(&self) -> Weight {
Weight(self.low)
}
pub fn unipolar(magnitude: f64) -> WeightRange |
pub fn bipolar(magnitude: f64) -> WeightRange {
assert!(magnitude >= 0.0);
WeightRange {
high: magnitude,
low: -magnitude,
}
}
pub fn in_range(&self, weight: Weight) -> bool {
weight.0 >= self.low && weight.0 <= self.high
}
pub fn random_weight<R: Rng>(&self, rng: &mut R) -> Weight {
let w = rng.gen::<Closed01<f64>>().0;
debug_assert!(w >= 0.0 && w <= 1.0);
let weight = Weight(((self.high - self.low) * w) + self.low);
debug_assert!(self.in_range(weight));
weight
}
pub fn clip_weight(&self, weight: Weight) -> Weight {
let clipped = if weight.0 >= self.high {
Weight(self.high)
} else if weight.0 <= self.low {
Weight(self.low)
} else {
weight
};
debug_assert!(self.in_range(clipped));
clipped
}
}
/// Defines a perturbance method.
#[derive(Debug, Clone, Copy)]
pub enum WeightPerturbanceMethod {
JiggleUniform { range: WeightRange },
JiggleGaussian { sigma: f64 },
Random,
}
pub fn gaussian<R: Rng>(sigma: f64, rng: &mut R) -> f64 {
let normal = Normal::new(0.0, sigma);
normal.ind_sample(rng)
}
impl WeightPerturbanceMethod {
pub fn perturb<R: Rng>(
&self,
weight: Weight,
weight_range: &WeightRange,
rng: &mut R,
) -> Weight {
match *self {
WeightPerturbanceMethod::Random => weight_range.random_weight(rng),
WeightPerturbanceMethod::JiggleUniform { range } => {
weight_range.clip_weight(Weight(weight.0 + range.random_weight(rng).0))
}
WeightPerturbanceMethod::JiggleGaussian { sigma } => {
weight_range.clip_weight(Weight(weight.0 + gaussian(sigma, rng)))
}
}
}
}
| {
if magnitude >= 0.0 {
WeightRange {
high: magnitude,
low: 0.0,
}
} else {
WeightRange {
high: 0.0,
low: magnitude,
}
}
} | identifier_body |
_helpers.py | # Copyright 2021 Google LLC.
# Copyright (c) Microsoft Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import json
import os
import pytest
import websockets
_command_counter = 1
def get_next_command_id():
global _command_counter
_command_counter += 1
return _command_counter
@pytest.fixture
async def websocket():
port = os.getenv('PORT', 8080)
url = f'ws://localhost:{port}'
async with websockets.connect(url) as connection:
yield connection
# noinspection PyUnusedFunction
@pytest.fixture
async def context_id(websocket):
# Note: there can be a race condition between initially created context's
# events and following subscription commands. Sometimes subscribe is called
# before the initial context emitted `browsingContext.contextCreated`,
# `browsingContext.domContentLoaded`, or `browsingContext.load` events,
# which makes events verification way harder. Navigation command guarantees
# there will be no follow-up events, as it uses `interactive` flag.
# TODO: find a way to avoid mentioned race condition properly.
open_context_id = await get_open_context_id(websocket)
await goto_url(websocket, open_context_id, "about:blank")
return open_context_id
@pytest.fixture(autouse=True)
async def before_each_test(websocket):
# This method can be used for browser state preparation.
assert True
async def subscribe(websocket, event_names, context_ids=None):
if isinstance(event_names, str):
event_names = [event_names]
command = {
"method": "session.subscribe",
"params": {
"events": event_names}}
if context_ids is not None:
command["params"]["contexts"] = context_ids
await execute_command(websocket, command)
# Compares 2 objects recursively ignoring values of specific attributes.
def recursiveCompare(expected, actual, ignore_attributes=[]):
assert type(expected) == type(actual)
if type(expected) is list:
assert len(expected) == len(actual)
for index, val in enumerate(expected):
recursiveCompare(expected[index], actual[index], ignore_attributes)
return
if type(expected) is dict:
assert expected.keys() == actual.keys(), \
f"Key sets should be the same: " \
f"\nNot present: {set(expected.keys()) - set(actual.keys())}" \
f"\nUnexpected: {set(actual.keys()) - set(expected.keys())}"
for index, val in enumerate(expected):
if val not in ignore_attributes:
recursiveCompare(expected[val], actual[val], ignore_attributes)
return
assert expected == actual
# Returns an id of an open context.
async def get_open_context_id(websocket):
result = await execute_command(websocket, {
"method": "browsingContext.getTree",
"params": {}})
return result['contexts'][0]['context']
async def send_JSON_command(websocket, command):
if 'id' not in command:
command_id = get_next_command_id()
command['id'] = command_id
await websocket.send(json.dumps(command))
async def read_JSON_message(websocket):
return json.loads(await websocket.recv())
# Open given URL in the given context.
async def goto_url(websocket, context_id, url):
await execute_command(websocket, {
"method": "browsingContext.navigate", | "url": url,
"context": context_id,
"wait": "interactive"}})
# noinspection PySameParameterValue
async def execute_command(websocket, command, result_field='result'):
command_id = get_next_command_id()
command['id'] = command_id
await send_JSON_command(websocket, command)
while True:
# Wait for the command to be finished.
resp = await read_JSON_message(websocket)
if 'id' in resp and resp['id'] == command_id:
assert result_field in resp, \
f"Field `{result_field}` should be in the result object:" \
f"\n {resp}"
return resp[result_field]
# Wait and return a specific event from Bidi server
async def wait_for_event(websocket, event_method):
while True:
event_response = await read_JSON_message(websocket)
if 'method' in event_response and event_response['method'] == event_method:
return event_response | "params": { | random_line_split |
_helpers.py | # Copyright 2021 Google LLC.
# Copyright (c) Microsoft Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import json
import os
import pytest
import websockets
_command_counter = 1
def get_next_command_id():
global _command_counter
_command_counter += 1
return _command_counter
@pytest.fixture
async def websocket():
port = os.getenv('PORT', 8080)
url = f'ws://localhost:{port}'
async with websockets.connect(url) as connection:
yield connection
# noinspection PyUnusedFunction
@pytest.fixture
async def context_id(websocket):
# Note: there can be a race condition between initially created context's
# events and following subscription commands. Sometimes subscribe is called
# before the initial context emitted `browsingContext.contextCreated`,
# `browsingContext.domContentLoaded`, or `browsingContext.load` events,
# which makes events verification way harder. Navigation command guarantees
# there will be no follow-up events, as it uses `interactive` flag.
# TODO: find a way to avoid mentioned race condition properly.
open_context_id = await get_open_context_id(websocket)
await goto_url(websocket, open_context_id, "about:blank")
return open_context_id
@pytest.fixture(autouse=True)
async def before_each_test(websocket):
# This method can be used for browser state preparation.
assert True
async def subscribe(websocket, event_names, context_ids=None):
if isinstance(event_names, str):
event_names = [event_names]
command = {
"method": "session.subscribe",
"params": {
"events": event_names}}
if context_ids is not None:
command["params"]["contexts"] = context_ids
await execute_command(websocket, command)
# Compares 2 objects recursively ignoring values of specific attributes.
def recursiveCompare(expected, actual, ignore_attributes=[]):
assert type(expected) == type(actual)
if type(expected) is list:
|
if type(expected) is dict:
assert expected.keys() == actual.keys(), \
f"Key sets should be the same: " \
f"\nNot present: {set(expected.keys()) - set(actual.keys())}" \
f"\nUnexpected: {set(actual.keys()) - set(expected.keys())}"
for index, val in enumerate(expected):
if val not in ignore_attributes:
recursiveCompare(expected[val], actual[val], ignore_attributes)
return
assert expected == actual
# Returns an id of an open context.
async def get_open_context_id(websocket):
result = await execute_command(websocket, {
"method": "browsingContext.getTree",
"params": {}})
return result['contexts'][0]['context']
async def send_JSON_command(websocket, command):
if 'id' not in command:
command_id = get_next_command_id()
command['id'] = command_id
await websocket.send(json.dumps(command))
async def read_JSON_message(websocket):
return json.loads(await websocket.recv())
# Open given URL in the given context.
async def goto_url(websocket, context_id, url):
await execute_command(websocket, {
"method": "browsingContext.navigate",
"params": {
"url": url,
"context": context_id,
"wait": "interactive"}})
# noinspection PySameParameterValue
async def execute_command(websocket, command, result_field='result'):
command_id = get_next_command_id()
command['id'] = command_id
await send_JSON_command(websocket, command)
while True:
# Wait for the command to be finished.
resp = await read_JSON_message(websocket)
if 'id' in resp and resp['id'] == command_id:
assert result_field in resp, \
f"Field `{result_field}` should be in the result object:" \
f"\n {resp}"
return resp[result_field]
# Wait and return a specific event from Bidi server
async def wait_for_event(websocket, event_method):
while True:
event_response = await read_JSON_message(websocket)
if 'method' in event_response and event_response['method'] == event_method:
return event_response
| assert len(expected) == len(actual)
for index, val in enumerate(expected):
recursiveCompare(expected[index], actual[index], ignore_attributes)
return | conditional_block |
_helpers.py | # Copyright 2021 Google LLC.
# Copyright (c) Microsoft Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import json
import os
import pytest
import websockets
_command_counter = 1
def get_next_command_id():
global _command_counter
_command_counter += 1
return _command_counter
@pytest.fixture
async def websocket():
port = os.getenv('PORT', 8080)
url = f'ws://localhost:{port}'
async with websockets.connect(url) as connection:
yield connection
# noinspection PyUnusedFunction
@pytest.fixture
async def | (websocket):
# Note: there can be a race condition between initially created context's
# events and following subscription commands. Sometimes subscribe is called
# before the initial context emitted `browsingContext.contextCreated`,
# `browsingContext.domContentLoaded`, or `browsingContext.load` events,
# which makes events verification way harder. Navigation command guarantees
# there will be no follow-up events, as it uses `interactive` flag.
# TODO: find a way to avoid mentioned race condition properly.
open_context_id = await get_open_context_id(websocket)
await goto_url(websocket, open_context_id, "about:blank")
return open_context_id
@pytest.fixture(autouse=True)
async def before_each_test(websocket):
# This method can be used for browser state preparation.
assert True
async def subscribe(websocket, event_names, context_ids=None):
if isinstance(event_names, str):
event_names = [event_names]
command = {
"method": "session.subscribe",
"params": {
"events": event_names}}
if context_ids is not None:
command["params"]["contexts"] = context_ids
await execute_command(websocket, command)
# Compares 2 objects recursively ignoring values of specific attributes.
def recursiveCompare(expected, actual, ignore_attributes=[]):
assert type(expected) == type(actual)
if type(expected) is list:
assert len(expected) == len(actual)
for index, val in enumerate(expected):
recursiveCompare(expected[index], actual[index], ignore_attributes)
return
if type(expected) is dict:
assert expected.keys() == actual.keys(), \
f"Key sets should be the same: " \
f"\nNot present: {set(expected.keys()) - set(actual.keys())}" \
f"\nUnexpected: {set(actual.keys()) - set(expected.keys())}"
for index, val in enumerate(expected):
if val not in ignore_attributes:
recursiveCompare(expected[val], actual[val], ignore_attributes)
return
assert expected == actual
# Returns an id of an open context.
async def get_open_context_id(websocket):
result = await execute_command(websocket, {
"method": "browsingContext.getTree",
"params": {}})
return result['contexts'][0]['context']
async def send_JSON_command(websocket, command):
if 'id' not in command:
command_id = get_next_command_id()
command['id'] = command_id
await websocket.send(json.dumps(command))
async def read_JSON_message(websocket):
return json.loads(await websocket.recv())
# Open given URL in the given context.
async def goto_url(websocket, context_id, url):
await execute_command(websocket, {
"method": "browsingContext.navigate",
"params": {
"url": url,
"context": context_id,
"wait": "interactive"}})
# noinspection PySameParameterValue
async def execute_command(websocket, command, result_field='result'):
command_id = get_next_command_id()
command['id'] = command_id
await send_JSON_command(websocket, command)
while True:
# Wait for the command to be finished.
resp = await read_JSON_message(websocket)
if 'id' in resp and resp['id'] == command_id:
assert result_field in resp, \
f"Field `{result_field}` should be in the result object:" \
f"\n {resp}"
return resp[result_field]
# Wait and return a specific event from Bidi server
async def wait_for_event(websocket, event_method):
while True:
event_response = await read_JSON_message(websocket)
if 'method' in event_response and event_response['method'] == event_method:
return event_response
| context_id | identifier_name |
_helpers.py | # Copyright 2021 Google LLC.
# Copyright (c) Microsoft Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import json
import os
import pytest
import websockets
_command_counter = 1
def get_next_command_id():
global _command_counter
_command_counter += 1
return _command_counter
@pytest.fixture
async def websocket():
port = os.getenv('PORT', 8080)
url = f'ws://localhost:{port}'
async with websockets.connect(url) as connection:
yield connection
# noinspection PyUnusedFunction
@pytest.fixture
async def context_id(websocket):
# Note: there can be a race condition between initially created context's
# events and following subscription commands. Sometimes subscribe is called
# before the initial context emitted `browsingContext.contextCreated`,
# `browsingContext.domContentLoaded`, or `browsingContext.load` events,
# which makes events verification way harder. Navigation command guarantees
# there will be no follow-up events, as it uses `interactive` flag.
# TODO: find a way to avoid mentioned race condition properly.
open_context_id = await get_open_context_id(websocket)
await goto_url(websocket, open_context_id, "about:blank")
return open_context_id
@pytest.fixture(autouse=True)
async def before_each_test(websocket):
# This method can be used for browser state preparation.
assert True
async def subscribe(websocket, event_names, context_ids=None):
if isinstance(event_names, str):
event_names = [event_names]
command = {
"method": "session.subscribe",
"params": {
"events": event_names}}
if context_ids is not None:
command["params"]["contexts"] = context_ids
await execute_command(websocket, command)
# Compares 2 objects recursively ignoring values of specific attributes.
def recursiveCompare(expected, actual, ignore_attributes=[]):
assert type(expected) == type(actual)
if type(expected) is list:
assert len(expected) == len(actual)
for index, val in enumerate(expected):
recursiveCompare(expected[index], actual[index], ignore_attributes)
return
if type(expected) is dict:
assert expected.keys() == actual.keys(), \
f"Key sets should be the same: " \
f"\nNot present: {set(expected.keys()) - set(actual.keys())}" \
f"\nUnexpected: {set(actual.keys()) - set(expected.keys())}"
for index, val in enumerate(expected):
if val not in ignore_attributes:
recursiveCompare(expected[val], actual[val], ignore_attributes)
return
assert expected == actual
# Returns an id of an open context.
async def get_open_context_id(websocket):
result = await execute_command(websocket, {
"method": "browsingContext.getTree",
"params": {}})
return result['contexts'][0]['context']
async def send_JSON_command(websocket, command):
if 'id' not in command:
command_id = get_next_command_id()
command['id'] = command_id
await websocket.send(json.dumps(command))
async def read_JSON_message(websocket):
return json.loads(await websocket.recv())
# Open given URL in the given context.
async def goto_url(websocket, context_id, url):
await execute_command(websocket, {
"method": "browsingContext.navigate",
"params": {
"url": url,
"context": context_id,
"wait": "interactive"}})
# noinspection PySameParameterValue
async def execute_command(websocket, command, result_field='result'):
|
# Wait and return a specific event from Bidi server
async def wait_for_event(websocket, event_method):
while True:
event_response = await read_JSON_message(websocket)
if 'method' in event_response and event_response['method'] == event_method:
return event_response
| command_id = get_next_command_id()
command['id'] = command_id
await send_JSON_command(websocket, command)
while True:
# Wait for the command to be finished.
resp = await read_JSON_message(websocket)
if 'id' in resp and resp['id'] == command_id:
assert result_field in resp, \
f"Field `{result_field}` should be in the result object:" \
f"\n {resp}"
return resp[result_field] | identifier_body |
lib.rs | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::unseparated_literal_suffix,
// TODO: Falsely triggers for async/await:
// see https://github.com/rust-lang/rust-clippy/issues/5360
// clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings:
#![allow(clippy::mutex_atomic)]
use std::collections::VecDeque;
use std::future::Future;
use std::sync::Arc;
use parking_lot::Mutex;
use tokio::sync::{Semaphore, SemaphorePermit};
struct Inner {
sema: Semaphore,
available_ids: Mutex<VecDeque<usize>>,
}
#[derive(Clone)]
pub struct AsyncSemaphore {
inner: Arc<Inner>,
}
impl AsyncSemaphore {
pub fn new(permits: usize) -> AsyncSemaphore {
let mut available_ids = VecDeque::new();
for id in 1..=permits {
available_ids.push_back(id);
}
AsyncSemaphore {
inner: Arc::new(Inner {
sema: Semaphore::new(permits),
available_ids: Mutex::new(available_ids),
}),
}
}
pub fn available_permits(&self) -> usize {
self.inner.sema.available_permits()
}
///
/// Runs the given Future-creating function (and the Future it returns) under the semaphore.
///
pub async fn with_acquired<F, B, O>(self, f: F) -> O
where
F: FnOnce(usize) -> B + Send + 'static,
B: Future<Output = O> + Send + 'static,
{ | let res = f(permit.id).await;
drop(permit);
res
}
async fn acquire(&self) -> Permit<'_> {
let permit = self.inner.sema.acquire().await.expect("semaphore closed");
let id = {
let mut available_ids = self.inner.available_ids.lock();
available_ids
.pop_front()
.expect("More permits were distributed than ids exist.")
};
Permit {
inner: self.inner.clone(),
_permit: permit,
id,
}
}
}
pub struct Permit<'a> {
inner: Arc<Inner>,
// NB: Kept for its `Drop` impl.
_permit: SemaphorePermit<'a>,
id: usize,
}
impl<'a> Drop for Permit<'a> {
fn drop(&mut self) {
let mut available_ids = self.inner.available_ids.lock();
available_ids.push_back(self.id);
}
}
#[cfg(test)]
mod tests; | let permit = self.acquire().await; | random_line_split |
lib.rs | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::unseparated_literal_suffix,
// TODO: Falsely triggers for async/await:
// see https://github.com/rust-lang/rust-clippy/issues/5360
// clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings:
#![allow(clippy::mutex_atomic)]
use std::collections::VecDeque;
use std::future::Future;
use std::sync::Arc;
use parking_lot::Mutex;
use tokio::sync::{Semaphore, SemaphorePermit};
struct Inner {
sema: Semaphore,
available_ids: Mutex<VecDeque<usize>>,
}
#[derive(Clone)]
pub struct AsyncSemaphore {
inner: Arc<Inner>,
}
impl AsyncSemaphore {
pub fn new(permits: usize) -> AsyncSemaphore {
let mut available_ids = VecDeque::new();
for id in 1..=permits {
available_ids.push_back(id);
}
AsyncSemaphore {
inner: Arc::new(Inner {
sema: Semaphore::new(permits),
available_ids: Mutex::new(available_ids),
}),
}
}
pub fn | (&self) -> usize {
self.inner.sema.available_permits()
}
///
/// Runs the given Future-creating function (and the Future it returns) under the semaphore.
///
pub async fn with_acquired<F, B, O>(self, f: F) -> O
where
F: FnOnce(usize) -> B + Send + 'static,
B: Future<Output = O> + Send + 'static,
{
let permit = self.acquire().await;
let res = f(permit.id).await;
drop(permit);
res
}
async fn acquire(&self) -> Permit<'_> {
let permit = self.inner.sema.acquire().await.expect("semaphore closed");
let id = {
let mut available_ids = self.inner.available_ids.lock();
available_ids
.pop_front()
.expect("More permits were distributed than ids exist.")
};
Permit {
inner: self.inner.clone(),
_permit: permit,
id,
}
}
}
pub struct Permit<'a> {
inner: Arc<Inner>,
// NB: Kept for its `Drop` impl.
_permit: SemaphorePermit<'a>,
id: usize,
}
impl<'a> Drop for Permit<'a> {
fn drop(&mut self) {
let mut available_ids = self.inner.available_ids.lock();
available_ids.push_back(self.id);
}
}
#[cfg(test)]
mod tests;
| available_permits | identifier_name |
error.rs | use std::error;
use std::fmt;
use provider;
use provider::service::inline::systemd;
#[derive(Debug)]
pub enum | {
DBus(systemd::dbus::Error),
DBusArgTypeMismatch(systemd::dbus::arg::TypeMismatchError),
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::DBus(ref err) => err.description(),
Error::DBusArgTypeMismatch(ref err) => err.description(),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::DBus(ref err) => err.fmt(f),
Error::DBusArgTypeMismatch(ref err) => err.fmt(f),
}
}
}
impl From<systemd::dbus::Error> for Error {
fn from(err: systemd::dbus::Error) -> Error {
Error::DBus(err)
}
}
impl From<systemd::dbus::arg::TypeMismatchError> for Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> Error {
Error::DBusArgTypeMismatch(err)
}
}
impl From<systemd::dbus::Error> for provider::error::Error {
fn from(err: systemd::dbus::Error) -> provider::error::Error {
Error::DBus(err).into()
}
}
impl From<systemd::dbus::arg::TypeMismatchError> for provider::error::Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> provider::error::Error {
Error::DBusArgTypeMismatch(err).into()
}
}
| Error | identifier_name |
error.rs | use std::error;
use std::fmt; | pub enum Error {
DBus(systemd::dbus::Error),
DBusArgTypeMismatch(systemd::dbus::arg::TypeMismatchError),
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::DBus(ref err) => err.description(),
Error::DBusArgTypeMismatch(ref err) => err.description(),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::DBus(ref err) => err.fmt(f),
Error::DBusArgTypeMismatch(ref err) => err.fmt(f),
}
}
}
impl From<systemd::dbus::Error> for Error {
fn from(err: systemd::dbus::Error) -> Error {
Error::DBus(err)
}
}
impl From<systemd::dbus::arg::TypeMismatchError> for Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> Error {
Error::DBusArgTypeMismatch(err)
}
}
impl From<systemd::dbus::Error> for provider::error::Error {
fn from(err: systemd::dbus::Error) -> provider::error::Error {
Error::DBus(err).into()
}
}
impl From<systemd::dbus::arg::TypeMismatchError> for provider::error::Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> provider::error::Error {
Error::DBusArgTypeMismatch(err).into()
}
} |
use provider;
use provider::service::inline::systemd;
#[derive(Debug)] | random_line_split |
error.rs | use std::error;
use std::fmt;
use provider;
use provider::service::inline::systemd;
#[derive(Debug)]
pub enum Error {
DBus(systemd::dbus::Error),
DBusArgTypeMismatch(systemd::dbus::arg::TypeMismatchError),
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::DBus(ref err) => err.description(),
Error::DBusArgTypeMismatch(ref err) => err.description(),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::DBus(ref err) => err.fmt(f),
Error::DBusArgTypeMismatch(ref err) => err.fmt(f),
}
}
}
impl From<systemd::dbus::Error> for Error {
fn from(err: systemd::dbus::Error) -> Error {
Error::DBus(err)
}
}
impl From<systemd::dbus::arg::TypeMismatchError> for Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> Error {
Error::DBusArgTypeMismatch(err)
}
}
impl From<systemd::dbus::Error> for provider::error::Error {
fn from(err: systemd::dbus::Error) -> provider::error::Error |
}
impl From<systemd::dbus::arg::TypeMismatchError> for provider::error::Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> provider::error::Error {
Error::DBusArgTypeMismatch(err).into()
}
}
| {
Error::DBus(err).into()
} | identifier_body |
server.js |
const http = require('http');
const metrics = require('./metrics');
const profiling = require('./profiling');
const DEFAULT_HOSTNAME = '0.0.0.0';
const DEFAULT_PORT = 9142;
const DEFAULT_PATH = '/metrics';
function slash_metrics_handler(req, res) | ;
function slash_handler(req, res) {
res.writeHead(301, {
'Location': '/metrics'
});
res.end(`<html>
<head><title>Node profiling exporter</title></head>
<body>
<h1>Node profiling exporter</h1>
<p><a href="/metrics">Metrics</a></p>
</body>
</html>`);
}
function startServer(opts) {
if (!opts)
opts = {};
const hostname = !!opts.hostname ? opts.hostname : DEFAULT_HOSTNAME;
const port = !!opts.port ? parseInt(opts.port) : DEFAULT_PORT;
const path = !!opts.path ? opts.path : DEFAULT_PATH;
return new Promise( (resolve, reject) => {
const server = http.createServer((req, res) => {
const { method, url } = req;
if (method === "GET" && url === path)
return slash_metrics_handler(req, res);
else if (method === "GET" && url === "/")
return slash_handler(req, res);
res.statusCode = 404;
res.end();
return;
});
server.on('clientError', (err, socket) => {
socket.end('HTTP/1.1 400 Bad Request\r\n\r\n');
});
server.listen(port, hostname, () => {
resolve('http://' + hostname + ':' + port + path);
});
});
}
module.exports = { startServer };
| {
res.statusCode = 200;
res.end(metrics.getMetrics());
} | identifier_body |
server.js | const http = require('http');
const metrics = require('./metrics');
const profiling = require('./profiling');
const DEFAULT_HOSTNAME = '0.0.0.0';
const DEFAULT_PORT = 9142;
const DEFAULT_PATH = '/metrics';
function slash_metrics_handler(req, res) { | res.end(metrics.getMetrics());
};
function slash_handler(req, res) {
res.writeHead(301, {
'Location': '/metrics'
});
res.end(`<html>
<head><title>Node profiling exporter</title></head>
<body>
<h1>Node profiling exporter</h1>
<p><a href="/metrics">Metrics</a></p>
</body>
</html>`);
}
function startServer(opts) {
if (!opts)
opts = {};
const hostname = !!opts.hostname ? opts.hostname : DEFAULT_HOSTNAME;
const port = !!opts.port ? parseInt(opts.port) : DEFAULT_PORT;
const path = !!opts.path ? opts.path : DEFAULT_PATH;
return new Promise( (resolve, reject) => {
const server = http.createServer((req, res) => {
const { method, url } = req;
if (method === "GET" && url === path)
return slash_metrics_handler(req, res);
else if (method === "GET" && url === "/")
return slash_handler(req, res);
res.statusCode = 404;
res.end();
return;
});
server.on('clientError', (err, socket) => {
socket.end('HTTP/1.1 400 Bad Request\r\n\r\n');
});
server.listen(port, hostname, () => {
resolve('http://' + hostname + ':' + port + path);
});
});
}
module.exports = { startServer }; | res.statusCode = 200; | random_line_split |
server.js |
const http = require('http');
const metrics = require('./metrics');
const profiling = require('./profiling');
const DEFAULT_HOSTNAME = '0.0.0.0';
const DEFAULT_PORT = 9142;
const DEFAULT_PATH = '/metrics';
function slash_metrics_handler(req, res) {
res.statusCode = 200;
res.end(metrics.getMetrics());
};
function slash_handler(req, res) {
res.writeHead(301, {
'Location': '/metrics'
});
res.end(`<html>
<head><title>Node profiling exporter</title></head>
<body>
<h1>Node profiling exporter</h1>
<p><a href="/metrics">Metrics</a></p>
</body>
</html>`);
}
function | (opts) {
if (!opts)
opts = {};
const hostname = !!opts.hostname ? opts.hostname : DEFAULT_HOSTNAME;
const port = !!opts.port ? parseInt(opts.port) : DEFAULT_PORT;
const path = !!opts.path ? opts.path : DEFAULT_PATH;
return new Promise( (resolve, reject) => {
const server = http.createServer((req, res) => {
const { method, url } = req;
if (method === "GET" && url === path)
return slash_metrics_handler(req, res);
else if (method === "GET" && url === "/")
return slash_handler(req, res);
res.statusCode = 404;
res.end();
return;
});
server.on('clientError', (err, socket) => {
socket.end('HTTP/1.1 400 Bad Request\r\n\r\n');
});
server.listen(port, hostname, () => {
resolve('http://' + hostname + ':' + port + path);
});
});
}
module.exports = { startServer };
| startServer | identifier_name |
unpack_pak_test.py | #!/usr/bin/env python
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unpack_pak
import unittest
class | (unittest.TestCase):
def testMapFileLine(self):
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH}'))
def testGzippedMapFileLine(self):
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH, false}'))
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH, true}'))
def testGetFileAndDirName(self):
(f, d) = unpack_pak.GetFileAndDirName(
'out/build/gen/foo/foo.unpak', 'out/build/gen/foo', 'a/b.js')
self.assertEquals('b.js', f)
self.assertEquals('out/build/gen/foo/foo.unpak/a', d)
def testGetFileAndDirNameForGeneratedResource(self):
(f, d) = unpack_pak.GetFileAndDirName(
'out/build/gen/foo/foo.unpak', 'out/build/gen/foo',
'@out_folder@/out/build/gen/foo/a/b.js')
self.assertEquals('b.js', f)
self.assertEquals('out/build/gen/foo/foo.unpak/a', d)
if __name__ == '__main__':
unittest.main()
| UnpackPakTest | identifier_name |
unpack_pak_test.py | #!/usr/bin/env python
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unpack_pak
import unittest
class UnpackPakTest(unittest.TestCase):
def testMapFileLine(self):
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH}'))
def testGzippedMapFileLine(self):
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH, false}'))
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH, true}'))
def testGetFileAndDirName(self):
(f, d) = unpack_pak.GetFileAndDirName(
'out/build/gen/foo/foo.unpak', 'out/build/gen/foo', 'a/b.js')
self.assertEquals('b.js', f)
self.assertEquals('out/build/gen/foo/foo.unpak/a', d)
def testGetFileAndDirNameForGeneratedResource(self):
|
if __name__ == '__main__':
unittest.main()
| (f, d) = unpack_pak.GetFileAndDirName(
'out/build/gen/foo/foo.unpak', 'out/build/gen/foo',
'@out_folder@/out/build/gen/foo/a/b.js')
self.assertEquals('b.js', f)
self.assertEquals('out/build/gen/foo/foo.unpak/a', d) | identifier_body |
unpack_pak_test.py | #!/usr/bin/env python
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unpack_pak
import unittest
class UnpackPakTest(unittest.TestCase):
def testMapFileLine(self):
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH}'))
def testGzippedMapFileLine(self):
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH, false}'))
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH, true}'))
def testGetFileAndDirName(self):
(f, d) = unpack_pak.GetFileAndDirName(
'out/build/gen/foo/foo.unpak', 'out/build/gen/foo', 'a/b.js')
self.assertEquals('b.js', f)
self.assertEquals('out/build/gen/foo/foo.unpak/a', d)
def testGetFileAndDirNameForGeneratedResource(self):
(f, d) = unpack_pak.GetFileAndDirName(
'out/build/gen/foo/foo.unpak', 'out/build/gen/foo',
'@out_folder@/out/build/gen/foo/a/b.js')
self.assertEquals('b.js', f)
self.assertEquals('out/build/gen/foo/foo.unpak/a', d)
if __name__ == '__main__':
| unittest.main() | conditional_block |
|
unpack_pak_test.py | #!/usr/bin/env python
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unpack_pak
import unittest
class UnpackPakTest(unittest.TestCase):
def testMapFileLine(self):
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH}'))
def testGzippedMapFileLine(self):
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH, false}'))
self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH, true}'))
def testGetFileAndDirName(self):
(f, d) = unpack_pak.GetFileAndDirName(
'out/build/gen/foo/foo.unpak', 'out/build/gen/foo', 'a/b.js')
self.assertEquals('b.js', f)
self.assertEquals('out/build/gen/foo/foo.unpak/a', d)
def testGetFileAndDirNameForGeneratedResource(self):
(f, d) = unpack_pak.GetFileAndDirName( |
if __name__ == '__main__':
unittest.main() | 'out/build/gen/foo/foo.unpak', 'out/build/gen/foo',
'@out_folder@/out/build/gen/foo/a/b.js')
self.assertEquals('b.js', f)
self.assertEquals('out/build/gen/foo/foo.unpak/a', d) | random_line_split |
lib_2015.rs | // Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate proc_macro;
use proc_macro::TokenStream;
/// This macro is a no-op; it is exceedingly simple as a result
/// of avoiding dependencies on both the syn and quote crates.
#[proc_macro_derive(HelloWorld)]
pub fn hello_world(_input: TokenStream) -> TokenStream | {
TokenStream::new()
} | identifier_body |
|
lib_2015.rs | // Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate proc_macro;
use proc_macro::TokenStream;
/// This macro is a no-op; it is exceedingly simple as a result
/// of avoiding dependencies on both the syn and quote crates.
#[proc_macro_derive(HelloWorld)]
pub fn | (_input: TokenStream) -> TokenStream {
TokenStream::new()
}
| hello_world | identifier_name |
lib_2015.rs | // Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
| use proc_macro::TokenStream;
/// This macro is a no-op; it is exceedingly simple as a result
/// of avoiding dependencies on both the syn and quote crates.
#[proc_macro_derive(HelloWorld)]
pub fn hello_world(_input: TokenStream) -> TokenStream {
TokenStream::new()
} | extern crate proc_macro;
| random_line_split |
noop_method_call.rs | use crate::context::LintContext;
use crate::rustc_middle::ty::TypeFoldable;
use crate::LateContext;
use crate::LateLintPass;
use rustc_hir::def::DefKind;
use rustc_hir::{Expr, ExprKind};
use rustc_middle::ty;
use rustc_span::symbol::sym;
declare_lint! {
/// The `noop_method_call` lint detects specific calls to noop methods
/// such as a calling `<&T as Clone>::clone` where `T: !Clone`.
///
/// ### Example
///
/// ```rust
/// # #![allow(unused)]
/// #![warn(noop_method_call)]
/// struct Foo;
/// let foo = &Foo;
/// let clone: &Foo = foo.clone();
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Some method calls are noops meaning that they do nothing. Usually such methods
/// are the result of blanket implementations that happen to create some method invocations
/// that end up not doing anything. For instance, `Clone` is implemented on all `&T`, but
/// calling `clone` on a `&T` where `T` does not implement clone, actually doesn't do anything
/// as references are copy. This lint detects these calls and warns the user about them.
pub NOOP_METHOD_CALL,
Allow,
"detects the use of well-known noop methods"
}
declare_lint_pass!(NoopMethodCall => [NOOP_METHOD_CALL]);
impl<'tcx> LateLintPass<'tcx> for NoopMethodCall {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
// We only care about method calls.
let (call, elements) = match expr.kind {
ExprKind::MethodCall(call, _, elements, _) => (call, elements),
_ => return,
};
// We only care about method calls corresponding to the `Clone`, `Deref` and `Borrow`
// traits and ignore any other method call.
let (trait_id, did) = match cx.typeck_results().type_dependent_def(expr.hir_id) {
// Verify we are dealing with a method/associated function.
Some((DefKind::AssocFn, did)) => match cx.tcx.trait_of_item(did) {
// Check that we're dealing with a trait method for one of the traits we care about.
Some(trait_id)
if matches!(
cx.tcx.get_diagnostic_name(trait_id),
Some(sym::Borrow | sym::Clone | sym::Deref)
) =>
{
(trait_id, did)
}
_ => return,
},
_ => return,
};
let substs = cx.typeck_results().node_substs(expr.hir_id);
if substs.definitely_needs_subst(cx.tcx) {
// We can't resolve on types that require monomorphization, so we don't handle them if
// we need to perfom substitution.
return;
}
let param_env = cx.tcx.param_env(trait_id);
// Resolve the trait method instance.
let i = match ty::Instance::resolve(cx.tcx, param_env, did, substs) {
Ok(Some(i)) => i,
_ => return,
};
// (Re)check that it implements the noop diagnostic.
for s in [sym::noop_method_clone, sym::noop_method_deref, sym::noop_method_borrow].iter() {
if cx.tcx.is_diagnostic_item(*s, i.def_id()) {
let method = &call.ident.name;
let receiver = &elements[0];
let receiver_ty = cx.typeck_results().expr_ty(receiver);
let expr_ty = cx.typeck_results().expr_ty_adjusted(expr);
if receiver_ty != expr_ty {
// This lint will only trigger if the receiver type and resulting expression \
// type are the same, implying that the method call is unnecessary.
return;
}
let expr_span = expr.span; | let note = format!(
"the type `{:?}` which `{}` is being called on is the same as \
the type returned from `{}`, so the method call does not do \
anything and can be removed",
receiver_ty, method, method,
);
let span = expr_span.with_lo(receiver.span.hi());
cx.struct_span_lint(NOOP_METHOD_CALL, span, |lint| {
let method = &call.ident.name;
let message = format!(
"call to `.{}()` on a reference in this situation does nothing",
&method,
);
lint.build(&message)
.span_label(span, "unnecessary method call")
.note(¬e)
.emit()
});
}
}
}
} | random_line_split |
|
noop_method_call.rs | use crate::context::LintContext;
use crate::rustc_middle::ty::TypeFoldable;
use crate::LateContext;
use crate::LateLintPass;
use rustc_hir::def::DefKind;
use rustc_hir::{Expr, ExprKind};
use rustc_middle::ty;
use rustc_span::symbol::sym;
declare_lint! {
/// The `noop_method_call` lint detects specific calls to noop methods
/// such as a calling `<&T as Clone>::clone` where `T: !Clone`.
///
/// ### Example
///
/// ```rust
/// # #![allow(unused)]
/// #![warn(noop_method_call)]
/// struct Foo;
/// let foo = &Foo;
/// let clone: &Foo = foo.clone();
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Some method calls are noops meaning that they do nothing. Usually such methods
/// are the result of blanket implementations that happen to create some method invocations
/// that end up not doing anything. For instance, `Clone` is implemented on all `&T`, but
/// calling `clone` on a `&T` where `T` does not implement clone, actually doesn't do anything
/// as references are copy. This lint detects these calls and warns the user about them.
pub NOOP_METHOD_CALL,
Allow,
"detects the use of well-known noop methods"
}
declare_lint_pass!(NoopMethodCall => [NOOP_METHOD_CALL]);
impl<'tcx> LateLintPass<'tcx> for NoopMethodCall {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) | _ => return,
},
_ => return,
};
let substs = cx.typeck_results().node_substs(expr.hir_id);
if substs.definitely_needs_subst(cx.tcx) {
// We can't resolve on types that require monomorphization, so we don't handle them if
// we need to perfom substitution.
return;
}
let param_env = cx.tcx.param_env(trait_id);
// Resolve the trait method instance.
let i = match ty::Instance::resolve(cx.tcx, param_env, did, substs) {
Ok(Some(i)) => i,
_ => return,
};
// (Re)check that it implements the noop diagnostic.
for s in [sym::noop_method_clone, sym::noop_method_deref, sym::noop_method_borrow].iter() {
if cx.tcx.is_diagnostic_item(*s, i.def_id()) {
let method = &call.ident.name;
let receiver = &elements[0];
let receiver_ty = cx.typeck_results().expr_ty(receiver);
let expr_ty = cx.typeck_results().expr_ty_adjusted(expr);
if receiver_ty != expr_ty {
// This lint will only trigger if the receiver type and resulting expression \
// type are the same, implying that the method call is unnecessary.
return;
}
let expr_span = expr.span;
let note = format!(
"the type `{:?}` which `{}` is being called on is the same as \
the type returned from `{}`, so the method call does not do \
anything and can be removed",
receiver_ty, method, method,
);
let span = expr_span.with_lo(receiver.span.hi());
cx.struct_span_lint(NOOP_METHOD_CALL, span, |lint| {
let method = &call.ident.name;
let message = format!(
"call to `.{}()` on a reference in this situation does nothing",
&method,
);
lint.build(&message)
.span_label(span, "unnecessary method call")
.note(¬e)
.emit()
});
}
}
}
}
| {
// We only care about method calls.
let (call, elements) = match expr.kind {
ExprKind::MethodCall(call, _, elements, _) => (call, elements),
_ => return,
};
// We only care about method calls corresponding to the `Clone`, `Deref` and `Borrow`
// traits and ignore any other method call.
let (trait_id, did) = match cx.typeck_results().type_dependent_def(expr.hir_id) {
// Verify we are dealing with a method/associated function.
Some((DefKind::AssocFn, did)) => match cx.tcx.trait_of_item(did) {
// Check that we're dealing with a trait method for one of the traits we care about.
Some(trait_id)
if matches!(
cx.tcx.get_diagnostic_name(trait_id),
Some(sym::Borrow | sym::Clone | sym::Deref)
) =>
{
(trait_id, did)
} | identifier_body |
noop_method_call.rs | use crate::context::LintContext;
use crate::rustc_middle::ty::TypeFoldable;
use crate::LateContext;
use crate::LateLintPass;
use rustc_hir::def::DefKind;
use rustc_hir::{Expr, ExprKind};
use rustc_middle::ty;
use rustc_span::symbol::sym;
declare_lint! {
/// The `noop_method_call` lint detects specific calls to noop methods
/// such as a calling `<&T as Clone>::clone` where `T: !Clone`.
///
/// ### Example
///
/// ```rust
/// # #![allow(unused)]
/// #![warn(noop_method_call)]
/// struct Foo;
/// let foo = &Foo;
/// let clone: &Foo = foo.clone();
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Some method calls are noops meaning that they do nothing. Usually such methods
/// are the result of blanket implementations that happen to create some method invocations
/// that end up not doing anything. For instance, `Clone` is implemented on all `&T`, but
/// calling `clone` on a `&T` where `T` does not implement clone, actually doesn't do anything
/// as references are copy. This lint detects these calls and warns the user about them.
pub NOOP_METHOD_CALL,
Allow,
"detects the use of well-known noop methods"
}
declare_lint_pass!(NoopMethodCall => [NOOP_METHOD_CALL]);
impl<'tcx> LateLintPass<'tcx> for NoopMethodCall {
fn | (&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
// We only care about method calls.
let (call, elements) = match expr.kind {
ExprKind::MethodCall(call, _, elements, _) => (call, elements),
_ => return,
};
// We only care about method calls corresponding to the `Clone`, `Deref` and `Borrow`
// traits and ignore any other method call.
let (trait_id, did) = match cx.typeck_results().type_dependent_def(expr.hir_id) {
// Verify we are dealing with a method/associated function.
Some((DefKind::AssocFn, did)) => match cx.tcx.trait_of_item(did) {
// Check that we're dealing with a trait method for one of the traits we care about.
Some(trait_id)
if matches!(
cx.tcx.get_diagnostic_name(trait_id),
Some(sym::Borrow | sym::Clone | sym::Deref)
) =>
{
(trait_id, did)
}
_ => return,
},
_ => return,
};
let substs = cx.typeck_results().node_substs(expr.hir_id);
if substs.definitely_needs_subst(cx.tcx) {
// We can't resolve on types that require monomorphization, so we don't handle them if
// we need to perfom substitution.
return;
}
let param_env = cx.tcx.param_env(trait_id);
// Resolve the trait method instance.
let i = match ty::Instance::resolve(cx.tcx, param_env, did, substs) {
Ok(Some(i)) => i,
_ => return,
};
// (Re)check that it implements the noop diagnostic.
for s in [sym::noop_method_clone, sym::noop_method_deref, sym::noop_method_borrow].iter() {
if cx.tcx.is_diagnostic_item(*s, i.def_id()) {
let method = &call.ident.name;
let receiver = &elements[0];
let receiver_ty = cx.typeck_results().expr_ty(receiver);
let expr_ty = cx.typeck_results().expr_ty_adjusted(expr);
if receiver_ty != expr_ty {
// This lint will only trigger if the receiver type and resulting expression \
// type are the same, implying that the method call is unnecessary.
return;
}
let expr_span = expr.span;
let note = format!(
"the type `{:?}` which `{}` is being called on is the same as \
the type returned from `{}`, so the method call does not do \
anything and can be removed",
receiver_ty, method, method,
);
let span = expr_span.with_lo(receiver.span.hi());
cx.struct_span_lint(NOOP_METHOD_CALL, span, |lint| {
let method = &call.ident.name;
let message = format!(
"call to `.{}()` on a reference in this situation does nothing",
&method,
);
lint.build(&message)
.span_label(span, "unnecessary method call")
.note(¬e)
.emit()
});
}
}
}
}
| check_expr | identifier_name |
issue-3743.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Vec2 {
x: f64,
y: f64
}
// methods we want to export as methods as well as operators
impl Vec2 {
#[inline(always)]
fn | (self, other: f64) -> Vec2 {
Vec2 { x: self.x * other, y: self.y * other }
}
}
// Right-hand-side operator visitor pattern
trait RhsOfVec2Mul<Result> { fn mul_vec2_by(&self, lhs: &Vec2) -> Result; }
// Vec2's implementation of Mul "from the other side" using the above trait
impl<Res, Rhs: RhsOfVec2Mul<Res>> Mul<Rhs,Res> for Vec2 {
fn mul(&self, rhs: &Rhs) -> Res { rhs.mul_vec2_by(self) }
}
// Implementation of 'f64 as right-hand-side of Vec2::Mul'
impl RhsOfVec2Mul<Vec2> for f64 {
fn mul_vec2_by(&self, lhs: &Vec2) -> Vec2 { lhs.vmul(*self) }
}
// Usage with failing inference
pub fn main() {
let a = Vec2 { x: 3.0, y: 4.0 };
// the following compiles and works properly
let v1: Vec2 = a * 3.0;
println!("{} {}", v1.x, v1.y);
// the following compiles but v2 will not be Vec2 yet and
// using it later will cause an error that the type of v2
// must be known
let v2 = a * 3.0;
println!("{} {}", v2.x, v2.y); // error regarding v2's type
}
| vmul | identifier_name |
issue-3743.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Vec2 {
x: f64,
y: f64
}
// methods we want to export as methods as well as operators
impl Vec2 {
#[inline(always)]
fn vmul(self, other: f64) -> Vec2 |
}
// Right-hand-side operator visitor pattern
trait RhsOfVec2Mul<Result> { fn mul_vec2_by(&self, lhs: &Vec2) -> Result; }
// Vec2's implementation of Mul "from the other side" using the above trait
impl<Res, Rhs: RhsOfVec2Mul<Res>> Mul<Rhs,Res> for Vec2 {
fn mul(&self, rhs: &Rhs) -> Res { rhs.mul_vec2_by(self) }
}
// Implementation of 'f64 as right-hand-side of Vec2::Mul'
impl RhsOfVec2Mul<Vec2> for f64 {
fn mul_vec2_by(&self, lhs: &Vec2) -> Vec2 { lhs.vmul(*self) }
}
// Usage with failing inference
pub fn main() {
let a = Vec2 { x: 3.0, y: 4.0 };
// the following compiles and works properly
let v1: Vec2 = a * 3.0;
println!("{} {}", v1.x, v1.y);
// the following compiles but v2 will not be Vec2 yet and
// using it later will cause an error that the type of v2
// must be known
let v2 = a * 3.0;
println!("{} {}", v2.x, v2.y); // error regarding v2's type
}
| {
Vec2 { x: self.x * other, y: self.y * other }
} | identifier_body |
issue-3743.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Vec2 {
x: f64,
y: f64
}
// methods we want to export as methods as well as operators
impl Vec2 {
#[inline(always)]
fn vmul(self, other: f64) -> Vec2 {
Vec2 { x: self.x * other, y: self.y * other }
}
} |
// Right-hand-side operator visitor pattern
trait RhsOfVec2Mul<Result> { fn mul_vec2_by(&self, lhs: &Vec2) -> Result; }
// Vec2's implementation of Mul "from the other side" using the above trait
impl<Res, Rhs: RhsOfVec2Mul<Res>> Mul<Rhs,Res> for Vec2 {
fn mul(&self, rhs: &Rhs) -> Res { rhs.mul_vec2_by(self) }
}
// Implementation of 'f64 as right-hand-side of Vec2::Mul'
impl RhsOfVec2Mul<Vec2> for f64 {
fn mul_vec2_by(&self, lhs: &Vec2) -> Vec2 { lhs.vmul(*self) }
}
// Usage with failing inference
pub fn main() {
let a = Vec2 { x: 3.0, y: 4.0 };
// the following compiles and works properly
let v1: Vec2 = a * 3.0;
println!("{} {}", v1.x, v1.y);
// the following compiles but v2 will not be Vec2 yet and
// using it later will cause an error that the type of v2
// must be known
let v2 = a * 3.0;
println!("{} {}", v2.x, v2.y); // error regarding v2's type
} | random_line_split |
|
24.js | var app = angular.module('commentsApp', []);
var pageNumber = window.location.pathname
app.controller('CommentController', function($scope, $http) {
var updateCommentSection = function() { | url: '/svc/comments/get?page_number=' + pageNumber
}).then(function successCallback(response) {
$scope.comments = response.data;
}, function errorCallback(response) {
console.log("Service not found.")
});
};
updateCommentSection();
$scope.addComment = function() {
if ($scope.comment)
{
$http({
method: 'POST',
url: '/svc/comments/add',
data: {
username: $scope.username,
page_number: pageNumber,
comment_text: $scope.comment
}
}).then(function successCallback(response) {
$scope.comment = "";
updateCommentSection();
}, function errorCallback(response) {
console.log("Service not found.")
});
}
};
});
var s = new WebSocket("ws://dev.ivoirians.me/ws/echo");
s.onmessage = function(evt) {console.log(evt) };
s.onerror = function(evt) {console.log(evt) };
s.onclose = function(evt) {console.log(evt) };
s.send("hi");
s.send("hello");
s.send("what's up"); | $scope.comments = [];
$http({
method: 'GET', | random_line_split |
24.js | var app = angular.module('commentsApp', []);
var pageNumber = window.location.pathname
app.controller('CommentController', function($scope, $http) {
var updateCommentSection = function() {
$scope.comments = [];
$http({
method: 'GET',
url: '/svc/comments/get?page_number=' + pageNumber
}).then(function successCallback(response) {
$scope.comments = response.data;
}, function errorCallback(response) {
console.log("Service not found.")
});
};
updateCommentSection();
$scope.addComment = function() {
if ($scope.comment)
|
};
});
var s = new WebSocket("ws://dev.ivoirians.me/ws/echo");
s.onmessage = function(evt) {console.log(evt) };
s.onerror = function(evt) {console.log(evt) };
s.onclose = function(evt) {console.log(evt) };
s.send("hi");
s.send("hello");
s.send("what's up"); | {
$http({
method: 'POST',
url: '/svc/comments/add',
data: {
username: $scope.username,
page_number: pageNumber,
comment_text: $scope.comment
}
}).then(function successCallback(response) {
$scope.comment = "";
updateCommentSection();
}, function errorCallback(response) {
console.log("Service not found.")
});
} | conditional_block |
monitors_util.py | (Exception):
pass
class InvalidTimestampFormat(Error):
pass
def prepend_timestamp(msg, format):
"""Prepend timestamp to a message in a standard way.
Args:
msg: str; Message to prepend timestamp to.
format: str or callable; Either format string that
can be passed to time.strftime or a callable
that will generate the timestamp string.
Returns: str; 'timestamp\tmsg'
"""
if type(format) is str:
timestamp = time.strftime(format, time.localtime())
elif callable(format):
timestamp = str(format())
else:
raise InvalidTimestampFormat
return '%s\t%s' % (timestamp, msg)
def write_logline(logfile, msg, timestamp_format=None):
"""Write msg, possibly prepended with a timestamp, as a terminated line.
Args:
logfile: file; File object to .write() msg to.
msg: str; Message to write.
timestamp_format: str or callable; If specified will
be passed into prepend_timestamp along with msg.
"""
msg = msg.rstrip('\n')
if timestamp_format:
msg = prepend_timestamp(msg, timestamp_format)
logfile.write(msg + '\n')
def make_alert(warnfile, msg_type, msg_template, timestamp_format=None):
"""Create an alert generation function that writes to warnfile.
Args:
warnfile: file; File object to write msg's to.
msg_type: str; String describing the message type
msg_template: str; String template that function params
are passed through.
timestamp_format: str or callable; If specified will
be passed into prepend_timestamp along with msg.
Returns: function with a signature of (*params);
The format for a warning used here is:
%(timestamp)d\t%(msg_type)s\t%(status)s\n
"""
if timestamp_format is None:
timestamp_format = lambda: int(time.time())
def alert(*params):
formatted_msg = msg_type + "\t" + msg_template % params
timestamped_msg = prepend_timestamp(formatted_msg, timestamp_format)
print >> warnfile, timestamped_msg
return alert
def build_alert_hooks(patterns_file, warnfile):
"""Parse data in patterns file and transform into alert_hook list.
Args:
patterns_file: file; File to read alert pattern definitions from.
warnfile: file; File to configure alert function to write warning to.
Returns:
list; Regex to alert function mapping.
[(regex, alert_function), ...]
"""
pattern_lines = patterns_file.readlines()
# expected pattern format:
# <msgtype> <newline> <regex> <newline> <alert> <newline> <newline>
# msgtype = a string categorizing the type of the message - used for
# enabling/disabling specific categories of warnings
# regex = a python regular expression
# alert = a string describing the alert message
# if the regex matches the line, this displayed warning will
# be the result of (alert % match.groups())
patterns = zip(pattern_lines[0::4], pattern_lines[1::4],
pattern_lines[2::4])
# assert that the patterns are separated by empty lines
if sum(len(line.strip()) for line in pattern_lines[3::4]) > 0:
raise ValueError('warning patterns are not separated by blank lines')
hooks = []
for msgtype, regex, alert in patterns:
regex = re.compile(regex.rstrip('\n'))
alert_function = make_alert(warnfile, msgtype.rstrip('\n'),
alert.rstrip('\n'))
hooks.append((regex, alert_function))
return hooks
def process_input(
input, logfile, log_timestamp_format=None, alert_hooks=()):
"""Continuously read lines from input stream and:
- Write them to log, possibly prefixed by timestamp.
- Watch for alert patterns.
Args:
input: file; Stream to read from.
logfile: file; Log file to write to
log_timestamp_format: str; Format to use for timestamping entries.
No timestamp is added if None.
alert_hooks: list; Generated from build_alert_hooks.
[(regex, alert_function), ...]
"""
while True:
line = input.readline()
if len(line) == 0:
# this should only happen if the remote console unexpectedly
# goes away. terminate this process so that we don't spin
# forever doing 0-length reads off of input
write_logline(logfile, TERM_MSG, log_timestamp_format)
break
if line == '\n':
# If it's just an empty line we discard and continue.
continue
write_logline(logfile, line, log_timestamp_format)
for regex, callback in alert_hooks:
match = re.match(regex, line.strip())
if match:
callback(*match.groups())
def lookup_lastlines(lastlines_dirpath, path):
"""Retrieve last lines seen for path.
Open corresponding lastline file for path
If there isn't one or isn't a match return None
Args:
lastlines_dirpath: str; Dirpath to store lastlines files to.
path: str; Filepath to source file that lastlines came from.
Returns:
str; Last lines seen if they exist
- Or -
None; Otherwise
"""
underscored = path.replace('/', '_')
try:
lastlines_file = open(os.path.join(lastlines_dirpath, underscored))
except (OSError, IOError):
return
lastlines = lastlines_file.read()
lastlines_file.close()
os.remove(lastlines_file.name)
if not lastlines:
return
try:
target_file = open(path)
except (OSError, IOError):
return
# Load it all in for now
target_data = target_file.read()
target_file.close()
# Get start loc in the target_data string, scanning from right
loc = target_data.rfind(lastlines)
if loc == -1:
return
# Then translate this into a reverse line number
# (count newlines that occur afterward)
reverse_lineno = target_data.count('\n', loc + len(lastlines))
return reverse_lineno
def write_lastlines_file(lastlines_dirpath, path, data):
"""Write data to lastlines file for path.
Args:
lastlines_dirpath: str; Dirpath to store lastlines files to.
path: str; Filepath to source file that data comes from.
data: str;
Returns:
str; Filepath that lastline data was written to.
"""
underscored = path.replace('/', '_')
dest_path = os.path.join(lastlines_dirpath, underscored)
open(dest_path, 'w').write(data)
return dest_path
def nonblocking(pipe):
"""Set python file object to nonblocking mode.
This allows us to take advantage of pipe.read()
where we don't have to specify a buflen.
Cuts down on a few lines we'd have to maintain.
Args:
pipe: file; File object to modify
Returns: pipe
"""
flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
fcntl.fcntl(pipe, fcntl.F_SETFL, flags| os.O_NONBLOCK)
return pipe
def launch_tails(follow_paths, lastlines_dirpath=None):
"""Launch a tail process for each follow_path.
Args:
follow_paths: list;
lastlines_dirpath: str;
Returns:
tuple; (procs, pipes) or
({path: subprocess.Popen, ...}, {file: path, ...})
"""
if lastlines_dirpath and not os.path.exists(lastlines_dirpath):
os.makedirs(lastlines_dirpath)
tail_cmd = ('/usr/bin/tail', '--retry', '--follow=name')
procs = {} # path -> tail_proc
pipes = {} # tail_proc.stdout -> path
for path in follow_paths:
cmd = list(tail_cmd)
if lastlines_dirpath:
reverse_lineno = lookup_lastlines(lastlines_dirpath, path)
if reverse_lineno is None:
reverse_lineno = 1
cmd.append('--lines=%d' % reverse_lineno)
cmd.append(path)
tail_proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
procs[path] = tail_proc
pipes[nonblocking(tail_proc.stdout)] = path
return procs, pipes
def poll_tail_pipes(pipes, lastlines_dirpath=None, waitsecs=5):
"""Wait on tail pipes for new data for waitsecs, return any new lines.
Args:
pipes: dict; {subprocess.Popen: follow_path, ...}
lastlines_dirpath: str; Path to write lastlines to.
waitsecs: int; Timeout to pass to select
Returns:
tuple; (lines, bad_pipes) or ([line, ...], [subprocess.Popen, ...])
"""
lines = []
bad_pipes = []
# Block until at least one is ready to read or waitsecs elapses
ready, _, _ = select.select(pipes.keys(), (), (), waitsecs)
for fi in ready:
path = pipes[fi]
data = fi.read()
if len(data) == 0:
# If no data, process is probably dead, add to bad_pipes
bad_pipes.append(fi)
continue
if lastlines_dirpath:
# Overwrite the lastlines | Error | identifier_name |
|
monitors_util.py | that will generate the timestamp string.
Returns: str; 'timestamp\tmsg'
"""
if type(format) is str:
timestamp = time.strftime(format, time.localtime())
elif callable(format):
timestamp = str(format())
else:
raise InvalidTimestampFormat
return '%s\t%s' % (timestamp, msg)
def write_logline(logfile, msg, timestamp_format=None):
"""Write msg, possibly prepended with a timestamp, as a terminated line.
Args:
logfile: file; File object to .write() msg to.
msg: str; Message to write.
timestamp_format: str or callable; If specified will
be passed into prepend_timestamp along with msg.
"""
msg = msg.rstrip('\n')
if timestamp_format:
msg = prepend_timestamp(msg, timestamp_format)
logfile.write(msg + '\n')
def make_alert(warnfile, msg_type, msg_template, timestamp_format=None):
"""Create an alert generation function that writes to warnfile.
Args:
warnfile: file; File object to write msg's to.
msg_type: str; String describing the message type
msg_template: str; String template that function params
are passed through.
timestamp_format: str or callable; If specified will
be passed into prepend_timestamp along with msg.
Returns: function with a signature of (*params);
The format for a warning used here is:
%(timestamp)d\t%(msg_type)s\t%(status)s\n
"""
if timestamp_format is None:
timestamp_format = lambda: int(time.time())
def alert(*params):
formatted_msg = msg_type + "\t" + msg_template % params
timestamped_msg = prepend_timestamp(formatted_msg, timestamp_format)
print >> warnfile, timestamped_msg
return alert
def build_alert_hooks(patterns_file, warnfile):
| pattern_lines[2::4])
# assert that the patterns are separated by empty lines
if sum(len(line.strip()) for line in pattern_lines[3::4]) > 0:
raise ValueError('warning patterns are not separated by blank lines')
hooks = []
for msgtype, regex, alert in patterns:
regex = re.compile(regex.rstrip('\n'))
alert_function = make_alert(warnfile, msgtype.rstrip('\n'),
alert.rstrip('\n'))
hooks.append((regex, alert_function))
return hooks
def process_input(
input, logfile, log_timestamp_format=None, alert_hooks=()):
"""Continuously read lines from input stream and:
- Write them to log, possibly prefixed by timestamp.
- Watch for alert patterns.
Args:
input: file; Stream to read from.
logfile: file; Log file to write to
log_timestamp_format: str; Format to use for timestamping entries.
No timestamp is added if None.
alert_hooks: list; Generated from build_alert_hooks.
[(regex, alert_function), ...]
"""
while True:
line = input.readline()
if len(line) == 0:
# this should only happen if the remote console unexpectedly
# goes away. terminate this process so that we don't spin
# forever doing 0-length reads off of input
write_logline(logfile, TERM_MSG, log_timestamp_format)
break
if line == '\n':
# If it's just an empty line we discard and continue.
continue
write_logline(logfile, line, log_timestamp_format)
for regex, callback in alert_hooks:
match = re.match(regex, line.strip())
if match:
callback(*match.groups())
def lookup_lastlines(lastlines_dirpath, path):
"""Retrieve last lines seen for path.
Open corresponding lastline file for path
If there isn't one or isn't a match return None
Args:
lastlines_dirpath: str; Dirpath to store lastlines files to.
path: str; Filepath to source file that lastlines came from.
Returns:
str; Last lines seen if they exist
- Or -
None; Otherwise
"""
underscored = path.replace('/', '_')
try:
lastlines_file = open(os.path.join(lastlines_dirpath, underscored))
except (OSError, IOError):
return
lastlines = lastlines_file.read()
lastlines_file.close()
os.remove(lastlines_file.name)
if not lastlines:
return
try:
target_file = open(path)
except (OSError, IOError):
return
# Load it all in for now
target_data = target_file.read()
target_file.close()
# Get start loc in the target_data string, scanning from right
loc = target_data.rfind(lastlines)
if loc == -1:
return
# Then translate this into a reverse line number
# (count newlines that occur afterward)
reverse_lineno = target_data.count('\n', loc + len(lastlines))
return reverse_lineno
def write_lastlines_file(lastlines_dirpath, path, data):
"""Write data to lastlines file for path.
Args:
lastlines_dirpath: str; Dirpath to store lastlines files to.
path: str; Filepath to source file that data comes from.
data: str;
Returns:
str; Filepath that lastline data was written to.
"""
underscored = path.replace('/', '_')
dest_path = os.path.join(lastlines_dirpath, underscored)
open(dest_path, 'w').write(data)
return dest_path
def nonblocking(pipe):
"""Set python file object to nonblocking mode.
This allows us to take advantage of pipe.read()
where we don't have to specify a buflen.
Cuts down on a few lines we'd have to maintain.
Args:
pipe: file; File object to modify
Returns: pipe
"""
flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
fcntl.fcntl(pipe, fcntl.F_SETFL, flags| os.O_NONBLOCK)
return pipe
def launch_tails(follow_paths, lastlines_dirpath=None):
"""Launch a tail process for each follow_path.
Args:
follow_paths: list;
lastlines_dirpath: str;
Returns:
tuple; (procs, pipes) or
({path: subprocess.Popen, ...}, {file: path, ...})
"""
if lastlines_dirpath and not os.path.exists(lastlines_dirpath):
os.makedirs(lastlines_dirpath)
tail_cmd = ('/usr/bin/tail', '--retry', '--follow=name')
procs = {} # path -> tail_proc
pipes = {} # tail_proc.stdout -> path
for path in follow_paths:
cmd = list(tail_cmd)
if lastlines_dirpath:
reverse_lineno = lookup_lastlines(lastlines_dirpath, path)
if reverse_lineno is None:
reverse_lineno = 1
cmd.append('--lines=%d' % reverse_lineno)
cmd.append(path)
tail_proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
procs[path] = tail_proc
pipes[nonblocking(tail_proc.stdout)] = path
return procs, pipes
def poll_tail_pipes(pipes, lastlines_dirpath=None, waitsecs=5):
"""Wait on tail pipes for new data for waitsecs, return any new lines.
Args:
pipes: dict; {subprocess.Popen: follow_path, ...}
lastlines_dirpath: str; Path to write lastlines to.
waitsecs: int; Timeout to pass to select
Returns:
tuple; (lines, bad_pipes) or ([line, ...], [subprocess.Popen, ...])
"""
lines = []
bad_pipes = []
# Block until at least one is ready to read or waitsecs elapses
ready, _, _ = select.select(pipes.keys(), (), (), waitsecs)
for fi in ready:
path = pipes[fi]
data = fi.read()
if len(data) == 0:
# If no data, process is probably dead, add to bad_pipes
bad_pipes.append(fi)
continue
if lastlines_dirpath:
# Overwrite the lastlines file for this source path
# Probably just want to write the last 1-3 lines.
write_lastlines_file(lastlines_dirpath, path, data)
for line in data.splitlines():
lines.append('[%s]\t%s\n' % (path, line))
return lines, bad_pipes
def snuff(subprocs | """Parse data in patterns file and transform into alert_hook list.
Args:
patterns_file: file; File to read alert pattern definitions from.
warnfile: file; File to configure alert function to write warning to.
Returns:
list; Regex to alert function mapping.
[(regex, alert_function), ...]
"""
pattern_lines = patterns_file.readlines()
# expected pattern format:
# <msgtype> <newline> <regex> <newline> <alert> <newline> <newline>
# msgtype = a string categorizing the type of the message - used for
# enabling/disabling specific categories of warnings
# regex = a python regular expression
# alert = a string describing the alert message
# if the regex matches the line, this displayed warning will
# be the result of (alert % match.groups())
patterns = zip(pattern_lines[0::4], pattern_lines[1::4], | identifier_body |
monitors_util.py | that will generate the timestamp string.
Returns: str; 'timestamp\tmsg'
"""
if type(format) is str:
timestamp = time.strftime(format, time.localtime())
elif callable(format):
timestamp = str(format())
else:
raise InvalidTimestampFormat
return '%s\t%s' % (timestamp, msg)
def write_logline(logfile, msg, timestamp_format=None):
"""Write msg, possibly prepended with a timestamp, as a terminated line.
Args:
logfile: file; File object to .write() msg to.
msg: str; Message to write.
timestamp_format: str or callable; If specified will
be passed into prepend_timestamp along with msg.
"""
msg = msg.rstrip('\n')
if timestamp_format:
msg = prepend_timestamp(msg, timestamp_format)
logfile.write(msg + '\n')
def make_alert(warnfile, msg_type, msg_template, timestamp_format=None):
"""Create an alert generation function that writes to warnfile.
Args:
warnfile: file; File object to write msg's to.
msg_type: str; String describing the message type
msg_template: str; String template that function params
are passed through.
timestamp_format: str or callable; If specified will
be passed into prepend_timestamp along with msg.
Returns: function with a signature of (*params);
The format for a warning used here is:
%(timestamp)d\t%(msg_type)s\t%(status)s\n
"""
if timestamp_format is None:
timestamp_format = lambda: int(time.time())
def alert(*params):
formatted_msg = msg_type + "\t" + msg_template % params
timestamped_msg = prepend_timestamp(formatted_msg, timestamp_format)
print >> warnfile, timestamped_msg
return alert
def build_alert_hooks(patterns_file, warnfile):
"""Parse data in patterns file and transform into alert_hook list.
Args:
patterns_file: file; File to read alert pattern definitions from.
warnfile: file; File to configure alert function to write warning to.
Returns:
list; Regex to alert function mapping.
[(regex, alert_function), ...]
"""
pattern_lines = patterns_file.readlines()
# expected pattern format:
# <msgtype> <newline> <regex> <newline> <alert> <newline> <newline>
# msgtype = a string categorizing the type of the message - used for
# enabling/disabling specific categories of warnings
# regex = a python regular expression
# alert = a string describing the alert message
# if the regex matches the line, this displayed warning will
# be the result of (alert % match.groups())
patterns = zip(pattern_lines[0::4], pattern_lines[1::4],
pattern_lines[2::4])
# assert that the patterns are separated by empty lines
if sum(len(line.strip()) for line in pattern_lines[3::4]) > 0:
raise ValueError('warning patterns are not separated by blank lines')
hooks = []
for msgtype, regex, alert in patterns:
regex = re.compile(regex.rstrip('\n'))
alert_function = make_alert(warnfile, msgtype.rstrip('\n'),
alert.rstrip('\n'))
hooks.append((regex, alert_function))
return hooks
| input, logfile, log_timestamp_format=None, alert_hooks=()):
"""Continuously read lines from input stream and:
- Write them to log, possibly prefixed by timestamp.
- Watch for alert patterns.
Args:
input: file; Stream to read from.
logfile: file; Log file to write to
log_timestamp_format: str; Format to use for timestamping entries.
No timestamp is added if None.
alert_hooks: list; Generated from build_alert_hooks.
[(regex, alert_function), ...]
"""
while True:
line = input.readline()
if len(line) == 0:
# this should only happen if the remote console unexpectedly
# goes away. terminate this process so that we don't spin
# forever doing 0-length reads off of input
write_logline(logfile, TERM_MSG, log_timestamp_format)
break
if line == '\n':
# If it's just an empty line we discard and continue.
continue
write_logline(logfile, line, log_timestamp_format)
for regex, callback in alert_hooks:
match = re.match(regex, line.strip())
if match:
callback(*match.groups())
def lookup_lastlines(lastlines_dirpath, path):
"""Retrieve last lines seen for path.
Open corresponding lastline file for path
If there isn't one or isn't a match return None
Args:
lastlines_dirpath: str; Dirpath to store lastlines files to.
path: str; Filepath to source file that lastlines came from.
Returns:
str; Last lines seen if they exist
- Or -
None; Otherwise
"""
underscored = path.replace('/', '_')
try:
lastlines_file = open(os.path.join(lastlines_dirpath, underscored))
except (OSError, IOError):
return
lastlines = lastlines_file.read()
lastlines_file.close()
os.remove(lastlines_file.name)
if not lastlines:
return
try:
target_file = open(path)
except (OSError, IOError):
return
# Load it all in for now
target_data = target_file.read()
target_file.close()
# Get start loc in the target_data string, scanning from right
loc = target_data.rfind(lastlines)
if loc == -1:
return
# Then translate this into a reverse line number
# (count newlines that occur afterward)
reverse_lineno = target_data.count('\n', loc + len(lastlines))
return reverse_lineno
def write_lastlines_file(lastlines_dirpath, path, data):
"""Write data to lastlines file for path.
Args:
lastlines_dirpath: str; Dirpath to store lastlines files to.
path: str; Filepath to source file that data comes from.
data: str;
Returns:
str; Filepath that lastline data was written to.
"""
underscored = path.replace('/', '_')
dest_path = os.path.join(lastlines_dirpath, underscored)
open(dest_path, 'w').write(data)
return dest_path
def nonblocking(pipe):
"""Set python file object to nonblocking mode.
This allows us to take advantage of pipe.read()
where we don't have to specify a buflen.
Cuts down on a few lines we'd have to maintain.
Args:
pipe: file; File object to modify
Returns: pipe
"""
flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
fcntl.fcntl(pipe, fcntl.F_SETFL, flags| os.O_NONBLOCK)
return pipe
def launch_tails(follow_paths, lastlines_dirpath=None):
"""Launch a tail process for each follow_path.
Args:
follow_paths: list;
lastlines_dirpath: str;
Returns:
tuple; (procs, pipes) or
({path: subprocess.Popen, ...}, {file: path, ...})
"""
if lastlines_dirpath and not os.path.exists(lastlines_dirpath):
os.makedirs(lastlines_dirpath)
tail_cmd = ('/usr/bin/tail', '--retry', '--follow=name')
procs = {} # path -> tail_proc
pipes = {} # tail_proc.stdout -> path
for path in follow_paths:
cmd = list(tail_cmd)
if lastlines_dirpath:
reverse_lineno = lookup_lastlines(lastlines_dirpath, path)
if reverse_lineno is None:
reverse_lineno = 1
cmd.append('--lines=%d' % reverse_lineno)
cmd.append(path)
tail_proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
procs[path] = tail_proc
pipes[nonblocking(tail_proc.stdout)] = path
return procs, pipes
def poll_tail_pipes(pipes, lastlines_dirpath=None, waitsecs=5):
"""Wait on tail pipes for new data for waitsecs, return any new lines.
Args:
pipes: dict; {subprocess.Popen: follow_path, ...}
lastlines_dirpath: str; Path to write lastlines to.
waitsecs: int; Timeout to pass to select
Returns:
tuple; (lines, bad_pipes) or ([line, ...], [subprocess.Popen, ...])
"""
lines = []
bad_pipes = []
# Block until at least one is ready to read or waitsecs elapses
ready, _, _ = select.select(pipes.keys(), (), (), waitsecs)
for fi in ready:
path = pipes[fi]
data = fi.read()
if len(data) == 0:
# If no data, process is probably dead, add to bad_pipes
bad_pipes.append(fi)
continue
if lastlines_dirpath:
# Overwrite the lastlines file for this source path
# Probably just want to write the last 1-3 lines.
write_lastlines_file(lastlines_dirpath, path, data)
for line in data.splitlines():
lines.append('[%s]\t%s\n' % (path, line))
return lines, bad_pipes
def snuff(subprocs | def process_input( | random_line_split |
monitors_util.py | that will generate the timestamp string.
Returns: str; 'timestamp\tmsg'
"""
if type(format) is str:
timestamp = time.strftime(format, time.localtime())
elif callable(format):
timestamp = str(format())
else:
raise InvalidTimestampFormat
return '%s\t%s' % (timestamp, msg)
def write_logline(logfile, msg, timestamp_format=None):
"""Write msg, possibly prepended with a timestamp, as a terminated line.
Args:
logfile: file; File object to .write() msg to.
msg: str; Message to write.
timestamp_format: str or callable; If specified will
be passed into prepend_timestamp along with msg.
"""
msg = msg.rstrip('\n')
if timestamp_format:
msg = prepend_timestamp(msg, timestamp_format)
logfile.write(msg + '\n')
def make_alert(warnfile, msg_type, msg_template, timestamp_format=None):
"""Create an alert generation function that writes to warnfile.
Args:
warnfile: file; File object to write msg's to.
msg_type: str; String describing the message type
msg_template: str; String template that function params
are passed through.
timestamp_format: str or callable; If specified will
be passed into prepend_timestamp along with msg.
Returns: function with a signature of (*params);
The format for a warning used here is:
%(timestamp)d\t%(msg_type)s\t%(status)s\n
"""
if timestamp_format is None:
timestamp_format = lambda: int(time.time())
def alert(*params):
formatted_msg = msg_type + "\t" + msg_template % params
timestamped_msg = prepend_timestamp(formatted_msg, timestamp_format)
print >> warnfile, timestamped_msg
return alert
def build_alert_hooks(patterns_file, warnfile):
"""Parse data in patterns file and transform into alert_hook list.
Args:
patterns_file: file; File to read alert pattern definitions from.
warnfile: file; File to configure alert function to write warning to.
Returns:
list; Regex to alert function mapping.
[(regex, alert_function), ...]
"""
pattern_lines = patterns_file.readlines()
# expected pattern format:
# <msgtype> <newline> <regex> <newline> <alert> <newline> <newline>
# msgtype = a string categorizing the type of the message - used for
# enabling/disabling specific categories of warnings
# regex = a python regular expression
# alert = a string describing the alert message
# if the regex matches the line, this displayed warning will
# be the result of (alert % match.groups())
patterns = zip(pattern_lines[0::4], pattern_lines[1::4],
pattern_lines[2::4])
# assert that the patterns are separated by empty lines
if sum(len(line.strip()) for line in pattern_lines[3::4]) > 0:
raise ValueError('warning patterns are not separated by blank lines')
hooks = []
for msgtype, regex, alert in patterns:
regex = re.compile(regex.rstrip('\n'))
alert_function = make_alert(warnfile, msgtype.rstrip('\n'),
alert.rstrip('\n'))
hooks.append((regex, alert_function))
return hooks
def process_input(
input, logfile, log_timestamp_format=None, alert_hooks=()):
"""Continuously read lines from input stream and:
- Write them to log, possibly prefixed by timestamp.
- Watch for alert patterns.
Args:
input: file; Stream to read from.
logfile: file; Log file to write to
log_timestamp_format: str; Format to use for timestamping entries.
No timestamp is added if None.
alert_hooks: list; Generated from build_alert_hooks.
[(regex, alert_function), ...]
"""
while True:
line = input.readline()
if len(line) == 0:
# this should only happen if the remote console unexpectedly
# goes away. terminate this process so that we don't spin
# forever doing 0-length reads off of input
write_logline(logfile, TERM_MSG, log_timestamp_format)
break
if line == '\n':
# If it's just an empty line we discard and continue.
continue
write_logline(logfile, line, log_timestamp_format)
for regex, callback in alert_hooks:
match = re.match(regex, line.strip())
if match:
callback(*match.groups())
def lookup_lastlines(lastlines_dirpath, path):
"""Retrieve last lines seen for path.
Open corresponding lastline file for path
If there isn't one or isn't a match return None
Args:
lastlines_dirpath: str; Dirpath to store lastlines files to.
path: str; Filepath to source file that lastlines came from.
Returns:
str; Last lines seen if they exist
- Or -
None; Otherwise
"""
underscored = path.replace('/', '_')
try:
lastlines_file = open(os.path.join(lastlines_dirpath, underscored))
except (OSError, IOError):
return
lastlines = lastlines_file.read()
lastlines_file.close()
os.remove(lastlines_file.name)
if not lastlines:
return
try:
target_file = open(path)
except (OSError, IOError):
return
# Load it all in for now
target_data = target_file.read()
target_file.close()
# Get start loc in the target_data string, scanning from right
loc = target_data.rfind(lastlines)
if loc == -1:
return
# Then translate this into a reverse line number
# (count newlines that occur afterward)
reverse_lineno = target_data.count('\n', loc + len(lastlines))
return reverse_lineno
def write_lastlines_file(lastlines_dirpath, path, data):
"""Write data to lastlines file for path.
Args:
lastlines_dirpath: str; Dirpath to store lastlines files to.
path: str; Filepath to source file that data comes from.
data: str;
Returns:
str; Filepath that lastline data was written to.
"""
underscored = path.replace('/', '_')
dest_path = os.path.join(lastlines_dirpath, underscored)
open(dest_path, 'w').write(data)
return dest_path
def nonblocking(pipe):
"""Set python file object to nonblocking mode.
This allows us to take advantage of pipe.read()
where we don't have to specify a buflen.
Cuts down on a few lines we'd have to maintain.
Args:
pipe: file; File object to modify
Returns: pipe
"""
flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
fcntl.fcntl(pipe, fcntl.F_SETFL, flags| os.O_NONBLOCK)
return pipe
def launch_tails(follow_paths, lastlines_dirpath=None):
"""Launch a tail process for each follow_path.
Args:
follow_paths: list;
lastlines_dirpath: str;
Returns:
tuple; (procs, pipes) or
({path: subprocess.Popen, ...}, {file: path, ...})
"""
if lastlines_dirpath and not os.path.exists(lastlines_dirpath):
os.makedirs(lastlines_dirpath)
tail_cmd = ('/usr/bin/tail', '--retry', '--follow=name')
procs = {} # path -> tail_proc
pipes = {} # tail_proc.stdout -> path
for path in follow_paths:
cmd = list(tail_cmd)
if lastlines_dirpath:
reverse_lineno = lookup_lastlines(lastlines_dirpath, path)
if reverse_lineno is None:
reverse_lineno = 1
cmd.append('--lines=%d' % reverse_lineno)
cmd.append(path)
tail_proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
procs[path] = tail_proc
pipes[nonblocking(tail_proc.stdout)] = path
return procs, pipes
def poll_tail_pipes(pipes, lastlines_dirpath=None, waitsecs=5):
"""Wait on tail pipes for new data for waitsecs, return any new lines.
Args:
pipes: dict; {subprocess.Popen: follow_path, ...}
lastlines_dirpath: str; Path to write lastlines to.
waitsecs: int; Timeout to pass to select
Returns:
tuple; (lines, bad_pipes) or ([line, ...], [subprocess.Popen, ...])
"""
lines = []
bad_pipes = []
# Block until at least one is ready to read or waitsecs elapses
ready, _, _ = select.select(pipes.keys(), (), (), waitsecs)
for fi in ready:
path = pipes[fi]
data = fi.read()
if len(data) == 0:
# If no data, process is probably dead, add to bad_pipes
bad_pipes.append(fi)
continue
if lastlines_dirpath:
# Overwrite the lastlines file for this source path
# Probably just want to write the last 1-3 lines.
|
for line in data.splitlines():
lines.append('[%s]\t%s\n' % (path, line))
return lines, bad_pipes
def snuff(sub | write_lastlines_file(lastlines_dirpath, path, data) | conditional_block |
marker-attribute-on-non-trait.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(marker_trait_attr)]
#[marker] //~ ERROR attribute can only be applied to a trait
struct | {}
#[marker] //~ ERROR attribute can only be applied to a trait
impl Struct {}
#[marker] //~ ERROR attribute can only be applied to a trait
union Union {
x: i32,
}
#[marker] //~ ERROR attribute can only be applied to a trait
const CONST: usize = 10;
#[marker] //~ ERROR attribute can only be applied to a trait
fn function() {}
#[marker] //~ ERROR attribute can only be applied to a trait
type Type = ();
fn main() {}
| Struct | identifier_name |
marker-attribute-on-non-trait.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(marker_trait_attr)]
#[marker] //~ ERROR attribute can only be applied to a trait
struct Struct {}
#[marker] //~ ERROR attribute can only be applied to a trait
impl Struct {}
#[marker] //~ ERROR attribute can only be applied to a trait
union Union { |
#[marker] //~ ERROR attribute can only be applied to a trait
const CONST: usize = 10;
#[marker] //~ ERROR attribute can only be applied to a trait
fn function() {}
#[marker] //~ ERROR attribute can only be applied to a trait
type Type = ();
fn main() {} | x: i32,
} | random_line_split |
gdrive.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Google Drive database plugin."""
from __future__ import unicode_literals
import unittest
from plaso.formatters import gdrive as _ # pylint: disable=unused-import
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import gdrive
from tests.parsers.sqlite_plugins import test_lib
class | (test_lib.SQLitePluginTestCase):
"""Tests for the Google Drive database plugin."""
def testProcess(self):
"""Tests the Process function on a Google Drive database file."""
plugin = gdrive.GoogleDrivePlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(['snapshot.db'], plugin)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 30)
# Let's verify that we've got the correct balance of cloud and local
# entry events.
# 10 files mounting to:
# 20 Cloud Entries (two timestamps per entry).
# 10 Local Entries (one timestamp per entry).
local_entries = []
cloud_entries = []
for event in storage_writer.GetEvents():
event_data = self._GetEventDataOfEvent(storage_writer, event)
if event_data.data_type == 'gdrive:snapshot:local_entry':
local_entries.append(event)
else:
cloud_entries.append(event)
self.assertEqual(len(local_entries), 10)
self.assertEqual(len(cloud_entries), 20)
# Test one local and one cloud entry.
event = local_entries[5]
self.CheckTimestamp(event.timestamp, '2014-01-28 00:11:25.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
file_path = (
'%local_sync_root%/Top Secret/Enn meiri '
'leyndarmál/Sýnileiki - Örverpi.gdoc')
self.assertEqual(event_data.path, file_path)
expected_message = 'File Path: {0:s} Size: 184'.format(file_path)
self._TestGetMessageStrings(
event_data, expected_message, file_path)
event = cloud_entries[16]
self.CheckTimestamp(event.timestamp, '2014-01-28 00:12:27.000000')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_MODIFICATION)
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.document_type, 6)
expected_url = (
'https://docs.google.com/document/d/'
'1ypXwXhQWliiMSQN9S5M0K6Wh39XF4Uz4GmY-njMf-Z0/edit?usp=docslist_api')
self.assertEqual(event_data.url, expected_url)
expected_message = (
'File Path: /Almenningur/Saklausa hliðin '
'[Private] '
'Size: 0 '
'URL: {0:s} '
'Type: DOCUMENT').format(expected_url)
expected_short_message = '/Almenningur/Saklausa hliðin'
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
if __name__ == '__main__':
unittest.main()
| GoogleDrivePluginTest | identifier_name |
gdrive.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Google Drive database plugin."""
from __future__ import unicode_literals
import unittest
from plaso.formatters import gdrive as _ # pylint: disable=unused-import
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import gdrive
from tests.parsers.sqlite_plugins import test_lib
class GoogleDrivePluginTest(test_lib.SQLitePluginTestCase):
"""Tests for the Google Drive database plugin."""
def testProcess(self):
"""Tests the Process function on a Google Drive database file."""
plugin = gdrive.GoogleDrivePlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(['snapshot.db'], plugin)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 30)
# Let's verify that we've got the correct balance of cloud and local
# entry events.
# 10 files mounting to:
# 20 Cloud Entries (two timestamps per entry). | if event_data.data_type == 'gdrive:snapshot:local_entry':
local_entries.append(event)
else:
cloud_entries.append(event)
self.assertEqual(len(local_entries), 10)
self.assertEqual(len(cloud_entries), 20)
# Test one local and one cloud entry.
event = local_entries[5]
self.CheckTimestamp(event.timestamp, '2014-01-28 00:11:25.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
file_path = (
'%local_sync_root%/Top Secret/Enn meiri '
'leyndarmál/Sýnileiki - Örverpi.gdoc')
self.assertEqual(event_data.path, file_path)
expected_message = 'File Path: {0:s} Size: 184'.format(file_path)
self._TestGetMessageStrings(
event_data, expected_message, file_path)
event = cloud_entries[16]
self.CheckTimestamp(event.timestamp, '2014-01-28 00:12:27.000000')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_MODIFICATION)
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.document_type, 6)
expected_url = (
'https://docs.google.com/document/d/'
'1ypXwXhQWliiMSQN9S5M0K6Wh39XF4Uz4GmY-njMf-Z0/edit?usp=docslist_api')
self.assertEqual(event_data.url, expected_url)
expected_message = (
'File Path: /Almenningur/Saklausa hliðin '
'[Private] '
'Size: 0 '
'URL: {0:s} '
'Type: DOCUMENT').format(expected_url)
expected_short_message = '/Almenningur/Saklausa hliðin'
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
if __name__ == '__main__':
unittest.main() | # 10 Local Entries (one timestamp per entry).
local_entries = []
cloud_entries = []
for event in storage_writer.GetEvents():
event_data = self._GetEventDataOfEvent(storage_writer, event) | random_line_split |
gdrive.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Google Drive database plugin."""
from __future__ import unicode_literals
import unittest
from plaso.formatters import gdrive as _ # pylint: disable=unused-import
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import gdrive
from tests.parsers.sqlite_plugins import test_lib
class GoogleDrivePluginTest(test_lib.SQLitePluginTestCase):
"""Tests for the Google Drive database plugin."""
def testProcess(self):
|
self.assertEqual(len(local_entries), 10)
self.assertEqual(len(cloud_entries), 20)
# Test one local and one cloud entry.
event = local_entries[5]
self.CheckTimestamp(event.timestamp, '2014-01-28 00:11:25.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
file_path = (
'%local_sync_root%/Top Secret/Enn meiri '
'leyndarmál/Sýnileiki - Örverpi.gdoc')
self.assertEqual(event_data.path, file_path)
expected_message = 'File Path: {0:s} Size: 184'.format(file_path)
self._TestGetMessageStrings(
event_data, expected_message, file_path)
event = cloud_entries[16]
self.CheckTimestamp(event.timestamp, '2014-01-28 00:12:27.000000')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_MODIFICATION)
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.document_type, 6)
expected_url = (
'https://docs.google.com/document/d/'
'1ypXwXhQWliiMSQN9S5M0K6Wh39XF4Uz4GmY-njMf-Z0/edit?usp=docslist_api')
self.assertEqual(event_data.url, expected_url)
expected_message = (
'File Path: /Almenningur/Saklausa hliðin '
'[Private] '
'Size: 0 '
'URL: {0:s} '
'Type: DOCUMENT').format(expected_url)
expected_short_message = '/Almenningur/Saklausa hliðin'
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
if
__name__ == '__main__':
unittest.main()
| """Tests the Process function on a Google Drive database file."""
plugin = gdrive.GoogleDrivePlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(['snapshot.db'], plugin)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 30)
# Let's verify that we've got the correct balance of cloud and local
# entry events.
# 10 files mounting to:
# 20 Cloud Entries (two timestamps per entry).
# 10 Local Entries (one timestamp per entry).
local_entries = []
cloud_entries = []
for event in storage_writer.GetEvents():
event_data = self._GetEventDataOfEvent(storage_writer, event)
if event_data.data_type == 'gdrive:snapshot:local_entry':
local_entries.append(event)
else:
cloud_entries.append(event) | identifier_body |
gdrive.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Google Drive database plugin."""
from __future__ import unicode_literals
import unittest
from plaso.formatters import gdrive as _ # pylint: disable=unused-import
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import gdrive
from tests.parsers.sqlite_plugins import test_lib
class GoogleDrivePluginTest(test_lib.SQLitePluginTestCase):
"""Tests for the Google Drive database plugin."""
def testProcess(self):
"""Tests the Process function on a Google Drive database file."""
plugin = gdrive.GoogleDrivePlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(['snapshot.db'], plugin)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 30)
# Let's verify that we've got the correct balance of cloud and local
# entry events.
# 10 files mounting to:
# 20 Cloud Entries (two timestamps per entry).
# 10 Local Entries (one timestamp per entry).
local_entries = []
cloud_entries = []
for event in storage_writer.GetEvents():
event_data = self._GetEventDataOfEvent(storage_writer, event)
if event_data.data_type == 'gdrive:snapshot:local_entry':
local_entries.append(event)
else:
cloud_entries.append(event)
self.assertEqual(len(local_entries), 10)
self.assertEqual(len(cloud_entries), 20)
# Test one local and one cloud entry.
event = local_entries[5]
self.CheckTimestamp(event.timestamp, '2014-01-28 00:11:25.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
file_path = (
'%local_sync_root%/Top Secret/Enn meiri '
'leyndarmál/Sýnileiki - Örverpi.gdoc')
self.assertEqual(event_data.path, file_path)
expected_message = 'File Path: {0:s} Size: 184'.format(file_path)
self._TestGetMessageStrings(
event_data, expected_message, file_path)
event = cloud_entries[16]
self.CheckTimestamp(event.timestamp, '2014-01-28 00:12:27.000000')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_MODIFICATION)
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.document_type, 6)
expected_url = (
'https://docs.google.com/document/d/'
'1ypXwXhQWliiMSQN9S5M0K6Wh39XF4Uz4GmY-njMf-Z0/edit?usp=docslist_api')
self.assertEqual(event_data.url, expected_url)
expected_message = (
'File Path: /Almenningur/Saklausa hliðin '
'[Private] '
'Size: 0 '
'URL: {0:s} '
'Type: DOCUMENT').format(expected_url)
expected_short_message = '/Almenningur/Saklausa hliðin'
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
if __name__ == '__main__':
unitt | est.main()
| conditional_block |
|
0_setiplist.py | import os
serverA = open('serverlistA.list', 'r')
serverB = open('serverlistB.list', 'r')
numA = int(serverA.readline())
numB = int(serverB.readline())
iplistA = open('iplistA', 'w')
iplistB = open('iplistB', 'w')
sshconfig = open('/Users/iqua/.ssh/config', 'w')
csshconfig = open('/etc/clusters', 'w')
csshconfig.write("rackA ")
for i in range(numA):
port = int(serverA.readline())
content = "host A" + str(port) + "\n Hostname sing.cse.ust.hk\n User shuhao\n Port " + str(port) + "\n"
sshconfig.write(content)
hostname = "A" + str(port) + " "
csshconfig.write(hostname)
ipaddr = "192.168.6." + str(port - 30042) + "\n"
iplistA.write(ipaddr)
iplistA.close()
csshconfig.write("\nrackB ")
for i in range(numB):
port = int(serverB.readline())
content = "host B" + str(port) + "\n Hostname sing.cse.ust.hk\n User shuhao\n Port " + str(port) + "\n"
sshconfig.write(content)
hostname = "B" + str(port) + " "
csshconfig.write(hostname)
if (port == 30055):
port = 30050
ipaddr = "192.168.7." + str(port - 30048) + "\n"
iplistB.write(ipaddr)
iplistB.close()
sshconfig.close()
csshconfig.close()
serverA.close()
serverB.close()
csshconfig = open('/etc/clusters', 'r')
serverA = csshconfig.readline().split()
serverB = csshconfig.readline().split()
csshconfig.close()
for i in range(1, numA+1):
os.system("echo '" + str(i) + "' > iplist && cat iplistB >> iplist")
os.system("scp ./iplist " + serverA[i] + ":~/repnet/exp_code/iplist")
print "Done copying iplist to", serverA[i]
for j in range(1, numB+1):
os.system("echo '" + str(i+j) + "' > iplist && cat iplistA >> iplist")
cmd = "scp ./iplist " + serverB[j] + ":~/repnet/exp_code/iplist"
os.system(cmd) | os.system("rm iplist*") | print "Done copying iplist to", serverB[j]
| random_line_split |
0_setiplist.py | import os
serverA = open('serverlistA.list', 'r')
serverB = open('serverlistB.list', 'r')
numA = int(serverA.readline())
numB = int(serverB.readline())
iplistA = open('iplistA', 'w')
iplistB = open('iplistB', 'w')
sshconfig = open('/Users/iqua/.ssh/config', 'w')
csshconfig = open('/etc/clusters', 'w')
csshconfig.write("rackA ")
for i in range(numA):
port = int(serverA.readline())
content = "host A" + str(port) + "\n Hostname sing.cse.ust.hk\n User shuhao\n Port " + str(port) + "\n"
sshconfig.write(content)
hostname = "A" + str(port) + " "
csshconfig.write(hostname)
ipaddr = "192.168.6." + str(port - 30042) + "\n"
iplistA.write(ipaddr)
iplistA.close()
csshconfig.write("\nrackB ")
for i in range(numB):
port = int(serverB.readline())
content = "host B" + str(port) + "\n Hostname sing.cse.ust.hk\n User shuhao\n Port " + str(port) + "\n"
sshconfig.write(content)
hostname = "B" + str(port) + " "
csshconfig.write(hostname)
if (port == 30055):
port = 30050
ipaddr = "192.168.7." + str(port - 30048) + "\n"
iplistB.write(ipaddr)
iplistB.close()
sshconfig.close()
csshconfig.close()
serverA.close()
serverB.close()
csshconfig = open('/etc/clusters', 'r')
serverA = csshconfig.readline().split()
serverB = csshconfig.readline().split()
csshconfig.close()
for i in range(1, numA+1):
|
for j in range(1, numB+1):
os.system("echo '" + str(i+j) + "' > iplist && cat iplistA >> iplist")
cmd = "scp ./iplist " + serverB[j] + ":~/repnet/exp_code/iplist"
os.system(cmd)
print "Done copying iplist to", serverB[j]
os.system("rm iplist*")
| os.system("echo '" + str(i) + "' > iplist && cat iplistB >> iplist")
os.system("scp ./iplist " + serverA[i] + ":~/repnet/exp_code/iplist")
print "Done copying iplist to", serverA[i] | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.