file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
placement-in-syntax.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code, unused_variables)]
#![feature(box_heap)]
#![feature(placement_in_syntax)]
// Tests that the new `in` syntax works with unique pointers.
//
// Compare with new-box-syntax.rs
use std::boxed::{Box, HEAP};
struct Structure {
x: isize,
y: isize,
}
pub fn main() | {
let x: Box<isize> = in HEAP { 2 };
let b: Box<isize> = in HEAP { 1 + 2 };
let c = in HEAP { 3 + 4 };
let s: Box<Structure> = in HEAP {
Structure {
x: 3,
y: 4,
}
};
} | identifier_body |
|
lib.rs | /*!
# Kiss3d
Keep It Simple, Stupid 3d graphics engine.
This library is born from the frustration in front of the fact that today’s 3D
graphics library are:
* either too low level: you have to write your own shaders and opening a
window steals you 8 hours, 300 lines of code and 10L of coffee.
* or high level but too hard to understand/use: those are libraries made to
write beautiful animations or games. They have a lot of feature; too much
feature if you only want to draw a few geometries on the screen.
**Kiss3d** is not designed to be feature-complete or fast.
It is designed to be able to draw simple geometric figures and play with them
with one-liners.
## Features
Most features are one-liners.
* open a window with a default arc-ball camera and a point light.
* a first-person camera is available too and user-defined cameras are possible.
* display boxes, spheres, cones, cylinders, quads and lines.
* change an object color or texture.
* change an object transform (we use the [nalgebra](http://nalgebra.org) library
to do that). An object cannot be scaled though.
* create basic post-processing effects.
As an example, having a red, rotating cube with the light attached to the camera is as simple as:
```no_run
extern crate kiss3d;
extern crate nalgebra as na;
use na::Vec3;
use kiss3d::window::Window;
use kiss3d::light::Light;
fn main() {
let mut window = Window::new("Kiss3d: cube");
let mut c = window.add_cube(1.0, 1.0, 1.0);
c.set_color(1.0, 0.0, 0.0);
window.set_light(Light::StickToCamera);
while window.render() {
c.prepend_to_local_rotation(&Vec3::new(0.0f32, 0.014, 0.0));
}
}
```
Some controls are handled by default by the engine (they can be overridden by the user):
* `scroll`: zoom in / zoom out.
* `left click + drag`: look around.
* `right click + drag`: translate the view point.
* `enter`: look at the origin (0.0, 0.0, 0.0).
## Compilation
You will need the last nightly build of the [rust compiler](http://www.rust-lang.org)
and the official package manager: [cargo](https://github.com/rust-lang/cargo).
Simply add the following to your `Cargo.toml` file: | ```text
[dependencies.kiss3d]
git = "https://github.com/sebcrozet/kiss3d"
```
## Contributions
I’d love to see people improving this library for their own needs. However, keep in mind that
**Kiss3d** is KISS. One-liner features (from the user point of view) are preferred.
*/
#![deny(non_camel_case_types)]
#![deny(unused_parens)]
#![deny(non_upper_case_globals)]
#![deny(unused_qualifications)]
#![warn(missing_docs)] // FIXME: should be denied.
#![deny(unused_results)]
#![allow(unused_unsafe)] // FIXME: should be denied
#![allow(missing_copy_implementations)]
#![doc(html_root_url = "http://kiss3d.org/doc")]
extern crate libc;
extern crate time;
extern crate gl;
extern crate num;
extern crate nalgebra as na;
extern crate ncollide_procedural;
extern crate image;
extern crate freetype;
extern crate glfw;
mod error;
pub mod window;
pub mod scene;
pub mod camera;
pub mod light;
pub mod loader;
pub mod line_renderer;
pub mod point_renderer;
pub mod builtin;
pub mod post_processing;
pub mod resource;
pub mod text; | random_line_split |
|
__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2014 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Find all the available input interfaces and try to initialize them.
"""
import os
import glob
import logging
from ..inputreaderinterface import InputReaderInterface
__author__ = 'Bitcraze AB'
__all__ = ['InputInterface']
logger = logging.getLogger(__name__)
found_interfaces = [os.path.splitext(os.path.basename(f))[0] for f in
glob.glob(os.path.dirname(__file__) + "/[A-Za-z]*.py")]
if len(found_interfaces) == 0:
found_interfaces = [os.path.splitext(os.path.basename(f))[0] for
f in glob.glob(os.path.dirname(__file__) +
"/[A-Za-z]*.pyc")]
logger.info("Found interfaces: {}".format(found_interfaces))
initialized_interfaces = []
available_interfaces = []
for interface in found_interfaces:
try:
module = __import__(interface, globals(), locals(), [interface], 1)
main_name = getattr(module, "MODULE_MAIN")
initialized_interfaces.append(getattr(module, main_name)())
logger.info("Successfully initialized [{}]".format(interface))
except Exception as e:
logger.info("Could not initialize [{}]: {}".format(interface, e))
def devices():
# Todo: Support rescanning and adding/removing devices
if len(available_interfaces) == 0:
|
return available_interfaces
class InputInterface(InputReaderInterface):
def __init__(self, dev_name, dev_id, dev_reader):
super(InputInterface, self).__init__(dev_name, dev_id, dev_reader)
# These devices cannot be mapped and configured
self.supports_mapping = False
# Ask the reader if it wants to limit
# roll/pitch/yaw/thrust for all devices
self.limit_rp = dev_reader.limit_rp
self.limit_thrust = dev_reader.limit_thrust
self.limit_yaw = dev_reader.limit_yaw
def open(self):
self._reader.open(self.id)
def close(self):
self._reader.close(self.id)
def read(self, include_raw=False):
mydata = self._reader.read(self.id)
# Merge interface returned data into InputReader Data Item
for key in list(mydata.keys()):
self.data.set(key, mydata[key])
return self.data
| for reader in initialized_interfaces:
devs = reader.devices()
for dev in devs:
available_interfaces.append(InputInterface(
dev["name"], dev["id"], reader)) | conditional_block |
__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2014 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Find all the available input interfaces and try to initialize them.
"""
import os
import glob
import logging
from ..inputreaderinterface import InputReaderInterface
__author__ = 'Bitcraze AB'
__all__ = ['InputInterface']
logger = logging.getLogger(__name__)
found_interfaces = [os.path.splitext(os.path.basename(f))[0] for f in
glob.glob(os.path.dirname(__file__) + "/[A-Za-z]*.py")]
if len(found_interfaces) == 0:
found_interfaces = [os.path.splitext(os.path.basename(f))[0] for
f in glob.glob(os.path.dirname(__file__) +
"/[A-Za-z]*.pyc")]
logger.info("Found interfaces: {}".format(found_interfaces))
initialized_interfaces = []
available_interfaces = []
for interface in found_interfaces:
try:
module = __import__(interface, globals(), locals(), [interface], 1)
main_name = getattr(module, "MODULE_MAIN")
initialized_interfaces.append(getattr(module, main_name)())
logger.info("Successfully initialized [{}]".format(interface))
except Exception as e:
logger.info("Could not initialize [{}]: {}".format(interface, e))
def devices():
# Todo: Support rescanning and adding/removing devices
if len(available_interfaces) == 0:
for reader in initialized_interfaces:
devs = reader.devices()
for dev in devs:
available_interfaces.append(InputInterface(
dev["name"], dev["id"], reader))
return available_interfaces
class InputInterface(InputReaderInterface):
def __init__(self, dev_name, dev_id, dev_reader):
super(InputInterface, self).__init__(dev_name, dev_id, dev_reader)
# These devices cannot be mapped and configured
self.supports_mapping = False
# Ask the reader if it wants to limit
# roll/pitch/yaw/thrust for all devices
self.limit_rp = dev_reader.limit_rp
self.limit_thrust = dev_reader.limit_thrust
self.limit_yaw = dev_reader.limit_yaw
def open(self):
self._reader.open(self.id)
def close(self):
self._reader.close(self.id)
def | (self, include_raw=False):
mydata = self._reader.read(self.id)
# Merge interface returned data into InputReader Data Item
for key in list(mydata.keys()):
self.data.set(key, mydata[key])
return self.data
| read | identifier_name |
__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2014 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Find all the available input interfaces and try to initialize them.
"""
import os
import glob
import logging
from ..inputreaderinterface import InputReaderInterface
__author__ = 'Bitcraze AB'
__all__ = ['InputInterface']
logger = logging.getLogger(__name__)
found_interfaces = [os.path.splitext(os.path.basename(f))[0] for f in
glob.glob(os.path.dirname(__file__) + "/[A-Za-z]*.py")]
if len(found_interfaces) == 0:
found_interfaces = [os.path.splitext(os.path.basename(f))[0] for
f in glob.glob(os.path.dirname(__file__) +
"/[A-Za-z]*.pyc")]
logger.info("Found interfaces: {}".format(found_interfaces))
initialized_interfaces = []
available_interfaces = []
for interface in found_interfaces:
try:
module = __import__(interface, globals(), locals(), [interface], 1)
main_name = getattr(module, "MODULE_MAIN")
initialized_interfaces.append(getattr(module, main_name)())
logger.info("Successfully initialized [{}]".format(interface))
except Exception as e:
logger.info("Could not initialize [{}]: {}".format(interface, e))
def devices():
# Todo: Support rescanning and adding/removing devices
|
class InputInterface(InputReaderInterface):
def __init__(self, dev_name, dev_id, dev_reader):
super(InputInterface, self).__init__(dev_name, dev_id, dev_reader)
# These devices cannot be mapped and configured
self.supports_mapping = False
# Ask the reader if it wants to limit
# roll/pitch/yaw/thrust for all devices
self.limit_rp = dev_reader.limit_rp
self.limit_thrust = dev_reader.limit_thrust
self.limit_yaw = dev_reader.limit_yaw
def open(self):
self._reader.open(self.id)
def close(self):
self._reader.close(self.id)
def read(self, include_raw=False):
mydata = self._reader.read(self.id)
# Merge interface returned data into InputReader Data Item
for key in list(mydata.keys()):
self.data.set(key, mydata[key])
return self.data
| if len(available_interfaces) == 0:
for reader in initialized_interfaces:
devs = reader.devices()
for dev in devs:
available_interfaces.append(InputInterface(
dev["name"], dev["id"], reader))
return available_interfaces | identifier_body |
__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2014 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Find all the available input interfaces and try to initialize them.
"""
import os
import glob
import logging
from ..inputreaderinterface import InputReaderInterface
__author__ = 'Bitcraze AB'
__all__ = ['InputInterface']
logger = logging.getLogger(__name__)
found_interfaces = [os.path.splitext(os.path.basename(f))[0] for f in
glob.glob(os.path.dirname(__file__) + "/[A-Za-z]*.py")]
if len(found_interfaces) == 0:
found_interfaces = [os.path.splitext(os.path.basename(f))[0] for
f in glob.glob(os.path.dirname(__file__) +
"/[A-Za-z]*.pyc")]
logger.info("Found interfaces: {}".format(found_interfaces))
initialized_interfaces = []
available_interfaces = []
for interface in found_interfaces:
try:
module = __import__(interface, globals(), locals(), [interface], 1)
main_name = getattr(module, "MODULE_MAIN")
initialized_interfaces.append(getattr(module, main_name)())
logger.info("Successfully initialized [{}]".format(interface))
except Exception as e:
logger.info("Could not initialize [{}]: {}".format(interface, e))
| # Todo: Support rescanning and adding/removing devices
if len(available_interfaces) == 0:
for reader in initialized_interfaces:
devs = reader.devices()
for dev in devs:
available_interfaces.append(InputInterface(
dev["name"], dev["id"], reader))
return available_interfaces
class InputInterface(InputReaderInterface):
def __init__(self, dev_name, dev_id, dev_reader):
super(InputInterface, self).__init__(dev_name, dev_id, dev_reader)
# These devices cannot be mapped and configured
self.supports_mapping = False
# Ask the reader if it wants to limit
# roll/pitch/yaw/thrust for all devices
self.limit_rp = dev_reader.limit_rp
self.limit_thrust = dev_reader.limit_thrust
self.limit_yaw = dev_reader.limit_yaw
def open(self):
self._reader.open(self.id)
def close(self):
self._reader.close(self.id)
def read(self, include_raw=False):
mydata = self._reader.read(self.id)
# Merge interface returned data into InputReader Data Item
for key in list(mydata.keys()):
self.data.set(key, mydata[key])
return self.data | def devices(): | random_line_split |
mod.rs | //! Dedicated single thread actor-ref implementations
use std::thread;
use std::sync::mpsc::{channel, Sender};
use std::sync::{Arc, Mutex};
use std::fmt::{self, Debug, Formatter};
use {Actor, ActorSpawner};
use {ActorRef, ActorRefImpl, ActorRefEnum};
use {SendError};
#[cfg(test)]
mod tests;
/// A simplistic environment to run an actor in
/// which can act as ActorRef.
///
/// It uses one thread per actor.
pub struct ActorCell<Message: Send> {
tx: Sender<Message>,
actor: Arc<Mutex<Box<Actor<Message>>>>,
}
/// An ActorSpawner which spawns a dedicated thread for every
/// actor.
pub struct DedicatedThreadSpawner;
impl ActorSpawner for DedicatedThreadSpawner {
/// Create and ActorCell for the given actor.
fn spawn<Message, A>(&self, actor: A) -> ActorRef<Message>
where Message: Send + 'static, A: Actor<Message> + 'static
{
let (tx, rx) = channel();
let actor_box: Box<Actor<Message>> = Box::new(actor);
let actor = Arc::new(Mutex::new(actor_box));
let actor_for_thread = actor.clone();
thread::spawn( move|| {
let mut actor = actor_for_thread.lock().unwrap();
loop {
match rx.recv() {
Ok(msg) => {
debug!("Processing");
actor.process(msg);
},
Err(error) => {
debug!("Quitting: {:?}", error);
break;
},
}
}
});
ActorRef(
ActorRefEnum::DedicatedThread(
ActorCell {
tx: tx,
actor: actor
}
)
)
}
}
impl<Message: Send + 'static> ActorRefImpl<Message> for ActorCell<Message> {
fn | (&self, msg: Message) -> Result<(), SendError<Message>> {
Ok(try!(self.tx.send(msg)))
}
}
impl<Message: Send + 'static> Debug for ActorCell<Message> {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
write!(f, "ActorCell")
}
}
impl<Message: Send + 'static> Clone for ActorCell<Message> {
fn clone(&self) -> ActorCell<Message> {
ActorCell {
tx: self.tx.clone(),
actor: self.actor.clone(),
}
}
}
| send | identifier_name |
mod.rs | //! Dedicated single thread actor-ref implementations
use std::thread;
use std::sync::mpsc::{channel, Sender};
use std::sync::{Arc, Mutex};
use std::fmt::{self, Debug, Formatter};
use {Actor, ActorSpawner};
use {ActorRef, ActorRefImpl, ActorRefEnum};
use {SendError};
#[cfg(test)]
mod tests;
/// A simplistic environment to run an actor in
/// which can act as ActorRef.
///
/// It uses one thread per actor.
pub struct ActorCell<Message: Send> {
tx: Sender<Message>,
actor: Arc<Mutex<Box<Actor<Message>>>>,
}
/// An ActorSpawner which spawns a dedicated thread for every
/// actor.
pub struct DedicatedThreadSpawner;
impl ActorSpawner for DedicatedThreadSpawner {
/// Create and ActorCell for the given actor.
fn spawn<Message, A>(&self, actor: A) -> ActorRef<Message>
where Message: Send + 'static, A: Actor<Message> + 'static
| }
});
ActorRef(
ActorRefEnum::DedicatedThread(
ActorCell {
tx: tx,
actor: actor
}
)
)
}
}
impl<Message: Send + 'static> ActorRefImpl<Message> for ActorCell<Message> {
fn send(&self, msg: Message) -> Result<(), SendError<Message>> {
Ok(try!(self.tx.send(msg)))
}
}
impl<Message: Send + 'static> Debug for ActorCell<Message> {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
write!(f, "ActorCell")
}
}
impl<Message: Send + 'static> Clone for ActorCell<Message> {
fn clone(&self) -> ActorCell<Message> {
ActorCell {
tx: self.tx.clone(),
actor: self.actor.clone(),
}
}
}
| {
let (tx, rx) = channel();
let actor_box: Box<Actor<Message>> = Box::new(actor);
let actor = Arc::new(Mutex::new(actor_box));
let actor_for_thread = actor.clone();
thread::spawn( move|| {
let mut actor = actor_for_thread.lock().unwrap();
loop {
match rx.recv() {
Ok(msg) => {
debug!("Processing");
actor.process(msg);
},
Err(error) => {
debug!("Quitting: {:?}", error);
break;
},
} | identifier_body |
mod.rs | //! Dedicated single thread actor-ref implementations
use std::thread;
use std::sync::mpsc::{channel, Sender};
use std::sync::{Arc, Mutex};
use std::fmt::{self, Debug, Formatter};
use {Actor, ActorSpawner};
use {ActorRef, ActorRefImpl, ActorRefEnum};
use {SendError};
#[cfg(test)]
mod tests;
/// A simplistic environment to run an actor in
/// which can act as ActorRef.
///
/// It uses one thread per actor.
pub struct ActorCell<Message: Send> {
tx: Sender<Message>,
actor: Arc<Mutex<Box<Actor<Message>>>>,
}
/// An ActorSpawner which spawns a dedicated thread for every
/// actor.
pub struct DedicatedThreadSpawner;
impl ActorSpawner for DedicatedThreadSpawner {
/// Create and ActorCell for the given actor.
fn spawn<Message, A>(&self, actor: A) -> ActorRef<Message>
where Message: Send + 'static, A: Actor<Message> + 'static
{
let (tx, rx) = channel();
let actor_box: Box<Actor<Message>> = Box::new(actor);
let actor = Arc::new(Mutex::new(actor_box));
let actor_for_thread = actor.clone();
thread::spawn( move|| {
let mut actor = actor_for_thread.lock().unwrap();
loop {
match rx.recv() {
Ok(msg) => | ,
Err(error) => {
debug!("Quitting: {:?}", error);
break;
},
}
}
});
ActorRef(
ActorRefEnum::DedicatedThread(
ActorCell {
tx: tx,
actor: actor
}
)
)
}
}
impl<Message: Send + 'static> ActorRefImpl<Message> for ActorCell<Message> {
fn send(&self, msg: Message) -> Result<(), SendError<Message>> {
Ok(try!(self.tx.send(msg)))
}
}
impl<Message: Send + 'static> Debug for ActorCell<Message> {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
write!(f, "ActorCell")
}
}
impl<Message: Send + 'static> Clone for ActorCell<Message> {
fn clone(&self) -> ActorCell<Message> {
ActorCell {
tx: self.tx.clone(),
actor: self.actor.clone(),
}
}
}
| {
debug!("Processing");
actor.process(msg);
} | conditional_block |
mod.rs | //! Dedicated single thread actor-ref implementations
use std::thread;
use std::sync::mpsc::{channel, Sender};
use std::sync::{Arc, Mutex};
use std::fmt::{self, Debug, Formatter};
use {Actor, ActorSpawner};
use {ActorRef, ActorRefImpl, ActorRefEnum};
use {SendError};
#[cfg(test)]
mod tests;
/// A simplistic environment to run an actor in
/// which can act as ActorRef.
///
/// It uses one thread per actor.
pub struct ActorCell<Message: Send> {
tx: Sender<Message>,
actor: Arc<Mutex<Box<Actor<Message>>>>,
}
/// An ActorSpawner which spawns a dedicated thread for every
/// actor.
pub struct DedicatedThreadSpawner;
impl ActorSpawner for DedicatedThreadSpawner {
/// Create and ActorCell for the given actor.
fn spawn<Message, A>(&self, actor: A) -> ActorRef<Message>
where Message: Send + 'static, A: Actor<Message> + 'static
{
let (tx, rx) = channel();
let actor_box: Box<Actor<Message>> = Box::new(actor);
let actor = Arc::new(Mutex::new(actor_box));
let actor_for_thread = actor.clone();
thread::spawn( move|| {
let mut actor = actor_for_thread.lock().unwrap();
loop {
match rx.recv() {
Ok(msg) => {
debug!("Processing");
actor.process(msg);
},
Err(error) => {
debug!("Quitting: {:?}", error);
break;
},
}
}
});
ActorRef(
ActorRefEnum::DedicatedThread(
ActorCell {
tx: tx,
actor: actor
}
)
)
}
}
impl<Message: Send + 'static> ActorRefImpl<Message> for ActorCell<Message> {
fn send(&self, msg: Message) -> Result<(), SendError<Message>> {
Ok(try!(self.tx.send(msg))) | }
impl<Message: Send + 'static> Debug for ActorCell<Message> {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
write!(f, "ActorCell")
}
}
impl<Message: Send + 'static> Clone for ActorCell<Message> {
fn clone(&self) -> ActorCell<Message> {
ActorCell {
tx: self.tx.clone(),
actor: self.actor.clone(),
}
}
} | } | random_line_split |
PermalinkProvider.js | /*
* Copyright (c) 2008-2015 The Open Source Geospatial Foundation
*
* Published under the BSD license.
* See https://github.com/geoext/geoext2/blob/master/license.txt for the full
* text of the license.
*/
/*
* @include OpenLayers/Util.js
* @requires GeoExt/Version.js
*/
/**
* The permalink provider.
*
* Sample code displaying a new permalink each time the map is moved:
*
* // create permalink provider
* var permalinkProvider = Ext.create('GeoExt.state.PermalinkProvider', {});
* // set it in the state manager
* Ext.state.Manager.setProvider(permalinkProvider);
* // create a map panel, and make it stateful
* var mapPanel = Ext.create('GeoExt.panel.Map', { | * renderTo: "map",
* layers: [
* new OpenLayers.Layer.WMS(
* "Global Imagery",
* "http://maps.opengeo.org/geowebcache/service/wms",
* {layers: "bluemarble"}
* )
* ],
* stateId: "map",
* prettyStateKeys: true // for pretty permalinks
* });
* // display permalink each time state is changed
* permalinkProvider.on({
* statechange: function(provider, name, value) {
* alert(provider.getLink());
* }
* });
*
* @class GeoExt.state.PermalinkProvider
*/
Ext.define('GeoExt.state.PermalinkProvider', {
extend : 'Ext.state.Provider',
requires : [
'GeoExt.Version'
],
alias : 'widget.gx_permalinkprovider',
/**
*
*/
constructor: function(config){
this.callParent(arguments);
config = config || {};
var url = config.url;
delete config.url;
Ext.apply(this, config);
this.state = this.readURL(url);
},
/**
* Specifies whether type of state values should be encoded and decoded.
* Set it to `false` if you work with components that don't require
* encoding types, and want pretty permalinks.
*
* @property{Boolean}
* @private
*/
encodeType: true,
/**
* Create a state object from a URL.
*
* @param url {String} The URL to get the state from.
* @return {Object} The state object.
* @private
*/
readURL: function(url) {
var state = {};
var params = OpenLayers.Util.getParameters(url);
var k, split, stateId;
for(k in params) {
if(params.hasOwnProperty(k)) {
split = k.split("_");
if(split.length > 1) {
stateId = split[0];
state[stateId] = state[stateId] || {};
state[stateId][split.slice(1).join("_")] = this.encodeType ?
this.decodeValue(params[k]) : params[k];
}
}
}
return state;
},
/**
* Returns the permalink corresponding to the current state.
*
* @param base {String} The base URL, optional.
* @return {String} The permalink.
*/
getLink: function(base) {
base = base || document.location.href;
var params = {};
var id, k, state = this.state;
for(id in state) {
if(state.hasOwnProperty(id)) {
for(k in state[id]) {
params[id + "_" + k] = this.encodeType ?
unescape(this.encodeValue(state[id][k])) : state[id][k];
}
}
}
// merge params in the URL into the state params
OpenLayers.Util.applyDefaults(
params, OpenLayers.Util.getParameters(base));
var paramsStr = OpenLayers.Util.getParameterString(params);
var qMark = base.indexOf("?");
if(qMark > 0) {
base = base.substring(0, qMark);
}
return Ext.urlAppend(base, paramsStr);
}
}); | random_line_split |
|
PermalinkProvider.js | /*
* Copyright (c) 2008-2015 The Open Source Geospatial Foundation
*
* Published under the BSD license.
* See https://github.com/geoext/geoext2/blob/master/license.txt for the full
* text of the license.
*/
/*
* @include OpenLayers/Util.js
* @requires GeoExt/Version.js
*/
/**
* The permalink provider.
*
* Sample code displaying a new permalink each time the map is moved:
*
* // create permalink provider
* var permalinkProvider = Ext.create('GeoExt.state.PermalinkProvider', {});
* // set it in the state manager
* Ext.state.Manager.setProvider(permalinkProvider);
* // create a map panel, and make it stateful
* var mapPanel = Ext.create('GeoExt.panel.Map', {
* renderTo: "map",
* layers: [
* new OpenLayers.Layer.WMS(
* "Global Imagery",
* "http://maps.opengeo.org/geowebcache/service/wms",
* {layers: "bluemarble"}
* )
* ],
* stateId: "map",
* prettyStateKeys: true // for pretty permalinks
* });
* // display permalink each time state is changed
* permalinkProvider.on({
* statechange: function(provider, name, value) {
* alert(provider.getLink());
* }
* });
*
* @class GeoExt.state.PermalinkProvider
*/
Ext.define('GeoExt.state.PermalinkProvider', {
extend : 'Ext.state.Provider',
requires : [
'GeoExt.Version'
],
alias : 'widget.gx_permalinkprovider',
/**
*
*/
constructor: function(config){
this.callParent(arguments);
config = config || {};
var url = config.url;
delete config.url;
Ext.apply(this, config);
this.state = this.readURL(url);
},
/**
* Specifies whether type of state values should be encoded and decoded.
* Set it to `false` if you work with components that don't require
* encoding types, and want pretty permalinks.
*
* @property{Boolean}
* @private
*/
encodeType: true,
/**
* Create a state object from a URL.
*
* @param url {String} The URL to get the state from.
* @return {Object} The state object.
* @private
*/
readURL: function(url) {
var state = {};
var params = OpenLayers.Util.getParameters(url);
var k, split, stateId;
for(k in params) {
if(params.hasOwnProperty(k)) {
split = k.split("_");
if(split.length > 1) {
stateId = split[0];
state[stateId] = state[stateId] || {};
state[stateId][split.slice(1).join("_")] = this.encodeType ?
this.decodeValue(params[k]) : params[k];
}
}
}
return state;
},
/**
* Returns the permalink corresponding to the current state.
*
* @param base {String} The base URL, optional.
* @return {String} The permalink.
*/
getLink: function(base) {
base = base || document.location.href;
var params = {};
var id, k, state = this.state;
for(id in state) {
if(state.hasOwnProperty(id)) |
}
// merge params in the URL into the state params
OpenLayers.Util.applyDefaults(
params, OpenLayers.Util.getParameters(base));
var paramsStr = OpenLayers.Util.getParameterString(params);
var qMark = base.indexOf("?");
if(qMark > 0) {
base = base.substring(0, qMark);
}
return Ext.urlAppend(base, paramsStr);
}
});
| {
for(k in state[id]) {
params[id + "_" + k] = this.encodeType ?
unescape(this.encodeValue(state[id][k])) : state[id][k];
}
} | conditional_block |
download_content_as_file.ts | /*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {el} from "helpers/dom";
/**
* Utility method to download arbitrary content as a named file. Most params are self-explanatory.
*
* @param contents: The simplest `BlobPart[]` is any raw content (binary or text), but often just a
* string array. For much of our usage, (e.g., from an AJAX response) this will | * @param mimeType: An optional MIME type; defaults to plaintext.
*/
export function downloadAsFile(contents: BlobPart[], name: string, mimeType = "text/plain") {
const data = new Blob(contents, { type: mimeType });
const a = el("a", { href: URL.createObjectURL(data), download: name, style: "display:none" }, []);
document.body.appendChild(a); // Firefox requires this to be added to the DOM before click()
a.click();
document.body.removeChild(a);
} | * probably be a single string wrapped in an array.
* @param name: The desired filename for the download. | random_line_split |
download_content_as_file.ts | /*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {el} from "helpers/dom";
/**
* Utility method to download arbitrary content as a named file. Most params are self-explanatory.
*
* @param contents: The simplest `BlobPart[]` is any raw content (binary or text), but often just a
* string array. For much of our usage, (e.g., from an AJAX response) this will
* probably be a single string wrapped in an array.
* @param name: The desired filename for the download.
* @param mimeType: An optional MIME type; defaults to plaintext.
*/
export function | (contents: BlobPart[], name: string, mimeType = "text/plain") {
const data = new Blob(contents, { type: mimeType });
const a = el("a", { href: URL.createObjectURL(data), download: name, style: "display:none" }, []);
document.body.appendChild(a); // Firefox requires this to be added to the DOM before click()
a.click();
document.body.removeChild(a);
}
| downloadAsFile | identifier_name |
download_content_as_file.ts | /*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {el} from "helpers/dom";
/**
* Utility method to download arbitrary content as a named file. Most params are self-explanatory.
*
* @param contents: The simplest `BlobPart[]` is any raw content (binary or text), but often just a
* string array. For much of our usage, (e.g., from an AJAX response) this will
* probably be a single string wrapped in an array.
* @param name: The desired filename for the download.
* @param mimeType: An optional MIME type; defaults to plaintext.
*/
export function downloadAsFile(contents: BlobPart[], name: string, mimeType = "text/plain") | {
const data = new Blob(contents, { type: mimeType });
const a = el("a", { href: URL.createObjectURL(data), download: name, style: "display:none" }, []);
document.body.appendChild(a); // Firefox requires this to be added to the DOM before click()
a.click();
document.body.removeChild(a);
} | identifier_body |
|
mqtt_receiver.js | // Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
'use strict';
var EventEmitter = require('events').EventEmitter;
var util = require('util');
var debug = require('debug')('mqtt-common');
var Message = require('azure-iot-common').Message;
/**
* @class module:azure-iot-device-mqtt.MqttReceiver
* @classdesc Object that is used to receive and settle messages from the server.
*
* @param {Object} transport MQTT Client object.
* @throws {ReferenceError} If either mqttClient or topic_subscribe is falsy
* @emits message When a message is received
*/
/**
* @event module:azure-iot-mqtt-base.MqttReceiver#message
* @type {Message}
*/
function MqttReceiver(mqttClient, topic_subscribe) {
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_001: [If the topic_subscribe parameter is falsy, a ReferenceError shall be thrown.]*/
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_002: [If the mqttClient parameter is falsy, a ReferenceError shall be thrown.]*/
if (!mqttClient) { throw new ReferenceError('mqttClient cannot be falsy'); }
if (!topic_subscribe) { throw new ReferenceError('topic_subscribe cannot be falsy'); }
debug('creating a new MqttReceiver');
EventEmitter.call(this);
this._client = mqttClient;
this._topic_subscribe = topic_subscribe;
this._listenersInitialized = false;
var self = this;
this.on('removeListener', function () {
// stop listening for AMQP events if our consumers stop listening for our events
if (self._listenersInitialized && self.listeners('message').length === 0) {
debug('Stopped listening for messages');
self._removeListeners();
}
});
this.on('newListener', function (eventName) {
// lazy-init AMQP event listeners
if (!self._listenersInitialized && eventName === 'message') |
});
}
util.inherits(MqttReceiver, EventEmitter);
MqttReceiver.prototype._setupListeners = function () {
debug('subscribing to ' + this._topic_subscribe);
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_003: [When a listener is added for the message event, the topic should be subscribed to.]*/
this._client.subscribe(this._topic_subscribe, { qos: 1 }, function () {
debug('subscribed to ' + this._topic_subscribe);
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_004: [If there is a listener for the message event, a message event shall be emitted for each message received.]*/
this._client.on('message', this._onMqttMessage.bind(this));
this._listenersInitialized = true;
}.bind(this));
};
MqttReceiver.prototype._removeListeners = function () {
this._client.removeListener('message', this._onMqttMessage.bind(this));
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_006: [When there are no more listeners for the message event, the topic should be unsubscribed]*/
debug('unsubscribing from ' + this._topic_subscribe);
this._client.unsubscribe(this._topic_subscribe, function () {
debug('unsubscribed from ' + this._topic_subscribe);
this._listenersInitialized = false;
}.bind(this));
};
MqttReceiver.prototype._onMqttMessage = function (topic, message) {
// needs proper conversion to transport-agnostic message.
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_005: [When a message event is emitted, the parameter shall be of type Message]*/
var msg = new Message(message);
debug('Got a message: ' + message);
this.emit('message', msg);
};
module.exports = MqttReceiver; | {
debug('Now listening for messages');
self._setupListeners();
} | conditional_block |
mqtt_receiver.js | // Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
'use strict';
var EventEmitter = require('events').EventEmitter;
var util = require('util');
var debug = require('debug')('mqtt-common');
var Message = require('azure-iot-common').Message;
/**
* @class module:azure-iot-device-mqtt.MqttReceiver
* @classdesc Object that is used to receive and settle messages from the server.
*
* @param {Object} transport MQTT Client object.
* @throws {ReferenceError} If either mqttClient or topic_subscribe is falsy
* @emits message When a message is received
*/
/**
* @event module:azure-iot-mqtt-base.MqttReceiver#message
* @type {Message}
*/
function | (mqttClient, topic_subscribe) {
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_001: [If the topic_subscribe parameter is falsy, a ReferenceError shall be thrown.]*/
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_002: [If the mqttClient parameter is falsy, a ReferenceError shall be thrown.]*/
if (!mqttClient) { throw new ReferenceError('mqttClient cannot be falsy'); }
if (!topic_subscribe) { throw new ReferenceError('topic_subscribe cannot be falsy'); }
debug('creating a new MqttReceiver');
EventEmitter.call(this);
this._client = mqttClient;
this._topic_subscribe = topic_subscribe;
this._listenersInitialized = false;
var self = this;
this.on('removeListener', function () {
// stop listening for AMQP events if our consumers stop listening for our events
if (self._listenersInitialized && self.listeners('message').length === 0) {
debug('Stopped listening for messages');
self._removeListeners();
}
});
this.on('newListener', function (eventName) {
// lazy-init AMQP event listeners
if (!self._listenersInitialized && eventName === 'message') {
debug('Now listening for messages');
self._setupListeners();
}
});
}
util.inherits(MqttReceiver, EventEmitter);
MqttReceiver.prototype._setupListeners = function () {
debug('subscribing to ' + this._topic_subscribe);
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_003: [When a listener is added for the message event, the topic should be subscribed to.]*/
this._client.subscribe(this._topic_subscribe, { qos: 1 }, function () {
debug('subscribed to ' + this._topic_subscribe);
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_004: [If there is a listener for the message event, a message event shall be emitted for each message received.]*/
this._client.on('message', this._onMqttMessage.bind(this));
this._listenersInitialized = true;
}.bind(this));
};
MqttReceiver.prototype._removeListeners = function () {
this._client.removeListener('message', this._onMqttMessage.bind(this));
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_006: [When there are no more listeners for the message event, the topic should be unsubscribed]*/
debug('unsubscribing from ' + this._topic_subscribe);
this._client.unsubscribe(this._topic_subscribe, function () {
debug('unsubscribed from ' + this._topic_subscribe);
this._listenersInitialized = false;
}.bind(this));
};
MqttReceiver.prototype._onMqttMessage = function (topic, message) {
// needs proper conversion to transport-agnostic message.
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_005: [When a message event is emitted, the parameter shall be of type Message]*/
var msg = new Message(message);
debug('Got a message: ' + message);
this.emit('message', msg);
};
module.exports = MqttReceiver; | MqttReceiver | identifier_name |
mqtt_receiver.js | // Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
'use strict';
var EventEmitter = require('events').EventEmitter;
var util = require('util');
var debug = require('debug')('mqtt-common');
var Message = require('azure-iot-common').Message;
/**
* @class module:azure-iot-device-mqtt.MqttReceiver
* @classdesc Object that is used to receive and settle messages from the server.
*
* @param {Object} transport MQTT Client object.
* @throws {ReferenceError} If either mqttClient or topic_subscribe is falsy
* @emits message When a message is received
*/
/**
* @event module:azure-iot-mqtt-base.MqttReceiver#message
* @type {Message}
*/
function MqttReceiver(mqttClient, topic_subscribe) | });
this.on('newListener', function (eventName) {
// lazy-init AMQP event listeners
if (!self._listenersInitialized && eventName === 'message') {
debug('Now listening for messages');
self._setupListeners();
}
});
}
util.inherits(MqttReceiver, EventEmitter);
MqttReceiver.prototype._setupListeners = function () {
debug('subscribing to ' + this._topic_subscribe);
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_003: [When a listener is added for the message event, the topic should be subscribed to.]*/
this._client.subscribe(this._topic_subscribe, { qos: 1 }, function () {
debug('subscribed to ' + this._topic_subscribe);
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_004: [If there is a listener for the message event, a message event shall be emitted for each message received.]*/
this._client.on('message', this._onMqttMessage.bind(this));
this._listenersInitialized = true;
}.bind(this));
};
MqttReceiver.prototype._removeListeners = function () {
this._client.removeListener('message', this._onMqttMessage.bind(this));
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_006: [When there are no more listeners for the message event, the topic should be unsubscribed]*/
debug('unsubscribing from ' + this._topic_subscribe);
this._client.unsubscribe(this._topic_subscribe, function () {
debug('unsubscribed from ' + this._topic_subscribe);
this._listenersInitialized = false;
}.bind(this));
};
MqttReceiver.prototype._onMqttMessage = function (topic, message) {
// needs proper conversion to transport-agnostic message.
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_005: [When a message event is emitted, the parameter shall be of type Message]*/
var msg = new Message(message);
debug('Got a message: ' + message);
this.emit('message', msg);
};
module.exports = MqttReceiver; | {
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_001: [If the topic_subscribe parameter is falsy, a ReferenceError shall be thrown.]*/
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_002: [If the mqttClient parameter is falsy, a ReferenceError shall be thrown.]*/
if (!mqttClient) { throw new ReferenceError('mqttClient cannot be falsy'); }
if (!topic_subscribe) { throw new ReferenceError('topic_subscribe cannot be falsy'); }
debug('creating a new MqttReceiver');
EventEmitter.call(this);
this._client = mqttClient;
this._topic_subscribe = topic_subscribe;
this._listenersInitialized = false;
var self = this;
this.on('removeListener', function () {
// stop listening for AMQP events if our consumers stop listening for our events
if (self._listenersInitialized && self.listeners('message').length === 0) {
debug('Stopped listening for messages');
self._removeListeners();
} | identifier_body |
mqtt_receiver.js | // Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
'use strict';
var EventEmitter = require('events').EventEmitter;
var util = require('util');
var debug = require('debug')('mqtt-common');
var Message = require('azure-iot-common').Message;
/**
* @class module:azure-iot-device-mqtt.MqttReceiver
* @classdesc Object that is used to receive and settle messages from the server.
*
* @param {Object} transport MQTT Client object.
* @throws {ReferenceError} If either mqttClient or topic_subscribe is falsy
* @emits message When a message is received
*/
/**
* @event module:azure-iot-mqtt-base.MqttReceiver#message
* @type {Message}
*/
function MqttReceiver(mqttClient, topic_subscribe) {
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_001: [If the topic_subscribe parameter is falsy, a ReferenceError shall be thrown.]*/
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_002: [If the mqttClient parameter is falsy, a ReferenceError shall be thrown.]*/
if (!mqttClient) { throw new ReferenceError('mqttClient cannot be falsy'); }
if (!topic_subscribe) { throw new ReferenceError('topic_subscribe cannot be falsy'); }
debug('creating a new MqttReceiver'); | this._topic_subscribe = topic_subscribe;
this._listenersInitialized = false;
var self = this;
this.on('removeListener', function () {
// stop listening for AMQP events if our consumers stop listening for our events
if (self._listenersInitialized && self.listeners('message').length === 0) {
debug('Stopped listening for messages');
self._removeListeners();
}
});
this.on('newListener', function (eventName) {
// lazy-init AMQP event listeners
if (!self._listenersInitialized && eventName === 'message') {
debug('Now listening for messages');
self._setupListeners();
}
});
}
util.inherits(MqttReceiver, EventEmitter);
MqttReceiver.prototype._setupListeners = function () {
debug('subscribing to ' + this._topic_subscribe);
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_003: [When a listener is added for the message event, the topic should be subscribed to.]*/
this._client.subscribe(this._topic_subscribe, { qos: 1 }, function () {
debug('subscribed to ' + this._topic_subscribe);
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_004: [If there is a listener for the message event, a message event shall be emitted for each message received.]*/
this._client.on('message', this._onMqttMessage.bind(this));
this._listenersInitialized = true;
}.bind(this));
};
MqttReceiver.prototype._removeListeners = function () {
this._client.removeListener('message', this._onMqttMessage.bind(this));
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_006: [When there are no more listeners for the message event, the topic should be unsubscribed]*/
debug('unsubscribing from ' + this._topic_subscribe);
this._client.unsubscribe(this._topic_subscribe, function () {
debug('unsubscribed from ' + this._topic_subscribe);
this._listenersInitialized = false;
}.bind(this));
};
MqttReceiver.prototype._onMqttMessage = function (topic, message) {
// needs proper conversion to transport-agnostic message.
/*Codes_SRS_NODE_DEVICE_MQTT_RECEIVER_16_005: [When a message event is emitted, the parameter shall be of type Message]*/
var msg = new Message(message);
debug('Got a message: ' + message);
this.emit('message', msg);
};
module.exports = MqttReceiver; | EventEmitter.call(this);
this._client = mqttClient; | random_line_split |
TangoServices.py | #!/usr/bin/env python
import boto3
import sys
import json
import logging
from botocore.exceptions import ClientError
from CoinCollection import ValueRecord
logger = logging.getLogger("tango")
logger.setLevel(logging.DEBUG)
class Cache():
def stash(self, key, record):
logger.debug("Stash request for: " + key)
self.getTable().delete_item(
Key={
'coin_type': key
}
)
self.getTable().put_item(
Item={
'coin_type': key,
'value_record': record.serialize()
}
)
def retrieve(self, key):
try:
logger.debug("Retrieving record for: " + key)
response = self.getTable().get_item(
Key={
'coin_type': key,
}
)
except ClientError as e:
errorMsg = e.response['Error']['Message']
logger.error(errorMsg)
raise ValueError("Unable to retrieve for : " + key + " due to client error.")
else:
try:
rec = response['Item']['value_record']
logger.debug("Response was: " + rec)
return ValueRecord(key,'USD',0.0).fromJSON(rec)
except (KeyError) as kerr:
return None
def expired(self, key):
expired = True
try:
logger.debug("Expiration check for: " + key)
expired = self.retrieve(key).expired()
except (ValueError, AttributeError) as verr:
logger.error("Error occured checking expiration: " + str(verr))
expired = True
return expired
def getTable(self):
try:
self.table
except AttributeError:
self.table = self.getDB().Table('tango_cache') | return self.table
def getDB(self):
try:
self.dynamodb
except AttributeError:
self.dynamodb = boto3.resource('dynamodb', region_name='us-east-1') #, endpoint_url="http://localhost:8000")
return self.dynamodb
def createTangoCache(self):
table = self.getDB().create_table(
TableName='tango_cache',
KeySchema=[
{
'AttributeName': 'coin_type',
'KeyType': 'HASH' #Partition key
},
],
AttributeDefinitions=[
{
'AttributeName': 'coin_type',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
print("Table status:", table.table_status)
if __name__ == '__main__':
if (len(sys.argv) > 1):
if sys.argv[1] == 'createcache':
Cache().createTangoCache() | random_line_split |
|
TangoServices.py | #!/usr/bin/env python
import boto3
import sys
import json
import logging
from botocore.exceptions import ClientError
from CoinCollection import ValueRecord
logger = logging.getLogger("tango")
logger.setLevel(logging.DEBUG)
class Cache():
def stash(self, key, record):
logger.debug("Stash request for: " + key)
self.getTable().delete_item(
Key={
'coin_type': key
}
)
self.getTable().put_item(
Item={
'coin_type': key,
'value_record': record.serialize()
}
)
def retrieve(self, key):
|
def expired(self, key):
expired = True
try:
logger.debug("Expiration check for: " + key)
expired = self.retrieve(key).expired()
except (ValueError, AttributeError) as verr:
logger.error("Error occured checking expiration: " + str(verr))
expired = True
return expired
def getTable(self):
try:
self.table
except AttributeError:
self.table = self.getDB().Table('tango_cache')
return self.table
def getDB(self):
try:
self.dynamodb
except AttributeError:
self.dynamodb = boto3.resource('dynamodb', region_name='us-east-1') #, endpoint_url="http://localhost:8000")
return self.dynamodb
def createTangoCache(self):
table = self.getDB().create_table(
TableName='tango_cache',
KeySchema=[
{
'AttributeName': 'coin_type',
'KeyType': 'HASH' #Partition key
},
],
AttributeDefinitions=[
{
'AttributeName': 'coin_type',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
print("Table status:", table.table_status)
if __name__ == '__main__':
if (len(sys.argv) > 1):
if sys.argv[1] == 'createcache':
Cache().createTangoCache()
| try:
logger.debug("Retrieving record for: " + key)
response = self.getTable().get_item(
Key={
'coin_type': key,
}
)
except ClientError as e:
errorMsg = e.response['Error']['Message']
logger.error(errorMsg)
raise ValueError("Unable to retrieve for : " + key + " due to client error.")
else:
try:
rec = response['Item']['value_record']
logger.debug("Response was: " + rec)
return ValueRecord(key,'USD',0.0).fromJSON(rec)
except (KeyError) as kerr:
return None | identifier_body |
TangoServices.py | #!/usr/bin/env python
import boto3
import sys
import json
import logging
from botocore.exceptions import ClientError
from CoinCollection import ValueRecord
logger = logging.getLogger("tango")
logger.setLevel(logging.DEBUG)
class Cache():
def stash(self, key, record):
logger.debug("Stash request for: " + key)
self.getTable().delete_item(
Key={
'coin_type': key
}
)
self.getTable().put_item(
Item={
'coin_type': key,
'value_record': record.serialize()
}
)
def retrieve(self, key):
try:
logger.debug("Retrieving record for: " + key)
response = self.getTable().get_item(
Key={
'coin_type': key,
}
)
except ClientError as e:
errorMsg = e.response['Error']['Message']
logger.error(errorMsg)
raise ValueError("Unable to retrieve for : " + key + " due to client error.")
else:
try:
rec = response['Item']['value_record']
logger.debug("Response was: " + rec)
return ValueRecord(key,'USD',0.0).fromJSON(rec)
except (KeyError) as kerr:
return None
def expired(self, key):
expired = True
try:
logger.debug("Expiration check for: " + key)
expired = self.retrieve(key).expired()
except (ValueError, AttributeError) as verr:
logger.error("Error occured checking expiration: " + str(verr))
expired = True
return expired
def getTable(self):
try:
self.table
except AttributeError:
self.table = self.getDB().Table('tango_cache')
return self.table
def getDB(self):
try:
self.dynamodb
except AttributeError:
self.dynamodb = boto3.resource('dynamodb', region_name='us-east-1') #, endpoint_url="http://localhost:8000")
return self.dynamodb
def createTangoCache(self):
table = self.getDB().create_table(
TableName='tango_cache',
KeySchema=[
{
'AttributeName': 'coin_type',
'KeyType': 'HASH' #Partition key
},
],
AttributeDefinitions=[
{
'AttributeName': 'coin_type',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
print("Table status:", table.table_status)
if __name__ == '__main__':
if (len(sys.argv) > 1):
| if sys.argv[1] == 'createcache':
Cache().createTangoCache() | conditional_block |
|
TangoServices.py | #!/usr/bin/env python
import boto3
import sys
import json
import logging
from botocore.exceptions import ClientError
from CoinCollection import ValueRecord
logger = logging.getLogger("tango")
logger.setLevel(logging.DEBUG)
class | ():
def stash(self, key, record):
logger.debug("Stash request for: " + key)
self.getTable().delete_item(
Key={
'coin_type': key
}
)
self.getTable().put_item(
Item={
'coin_type': key,
'value_record': record.serialize()
}
)
def retrieve(self, key):
try:
logger.debug("Retrieving record for: " + key)
response = self.getTable().get_item(
Key={
'coin_type': key,
}
)
except ClientError as e:
errorMsg = e.response['Error']['Message']
logger.error(errorMsg)
raise ValueError("Unable to retrieve for : " + key + " due to client error.")
else:
try:
rec = response['Item']['value_record']
logger.debug("Response was: " + rec)
return ValueRecord(key,'USD',0.0).fromJSON(rec)
except (KeyError) as kerr:
return None
def expired(self, key):
expired = True
try:
logger.debug("Expiration check for: " + key)
expired = self.retrieve(key).expired()
except (ValueError, AttributeError) as verr:
logger.error("Error occured checking expiration: " + str(verr))
expired = True
return expired
def getTable(self):
try:
self.table
except AttributeError:
self.table = self.getDB().Table('tango_cache')
return self.table
def getDB(self):
try:
self.dynamodb
except AttributeError:
self.dynamodb = boto3.resource('dynamodb', region_name='us-east-1') #, endpoint_url="http://localhost:8000")
return self.dynamodb
def createTangoCache(self):
table = self.getDB().create_table(
TableName='tango_cache',
KeySchema=[
{
'AttributeName': 'coin_type',
'KeyType': 'HASH' #Partition key
},
],
AttributeDefinitions=[
{
'AttributeName': 'coin_type',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
print("Table status:", table.table_status)
if __name__ == '__main__':
if (len(sys.argv) > 1):
if sys.argv[1] == 'createcache':
Cache().createTangoCache()
| Cache | identifier_name |
testService5.ts | module app {
'use strict';
export interface ItestService5Options {
greeting: string;
}
export interface ItestService5 {
greet(name: string): string;
}
export interface ItestService5Provider extends ng.IServiceProvider {
configure(options: ItestService5Options): void;
}
class testService5Provider implements ItestService5Provider {
options: ItestService5Options = {
greeting: 'hello'
}
constructor() |
configure(options: ItestService5Options): void {
angular.extend(this.options, options);
}
$get(testService4: app.ItestService4): ItestService5 {
var service: ItestService5 = {
greet: (name: string) => {
return this.options.greeting + ' ' + testService4.greet(name);
}
};
return service;
}
}
angular
.module('app')
.provider('testService5', testService5Provider);
} | {
this.$get.$inject = ['testService4'];
} | identifier_body |
testService5.ts | module app {
'use strict';
export interface ItestService5Options {
greeting: string;
}
export interface ItestService5 {
greet(name: string): string;
}
export interface ItestService5Provider extends ng.IServiceProvider {
configure(options: ItestService5Options): void;
}
class | implements ItestService5Provider {
options: ItestService5Options = {
greeting: 'hello'
}
constructor() {
this.$get.$inject = ['testService4'];
}
configure(options: ItestService5Options): void {
angular.extend(this.options, options);
}
$get(testService4: app.ItestService4): ItestService5 {
var service: ItestService5 = {
greet: (name: string) => {
return this.options.greeting + ' ' + testService4.greet(name);
}
};
return service;
}
}
angular
.module('app')
.provider('testService5', testService5Provider);
} | testService5Provider | identifier_name |
testService5.ts | module app {
'use strict';
export interface ItestService5Options {
greeting: string;
}
export interface ItestService5 {
greet(name: string): string;
}
export interface ItestService5Provider extends ng.IServiceProvider {
configure(options: ItestService5Options): void;
}
class testService5Provider implements ItestService5Provider {
options: ItestService5Options = {
greeting: 'hello'
}
constructor() {
this.$get.$inject = ['testService4'];
}
configure(options: ItestService5Options): void {
angular.extend(this.options, options);
}
$get(testService4: app.ItestService4): ItestService5 {
var service: ItestService5 = {
greet: (name: string) => {
return this.options.greeting + ' ' + testService4.greet(name);
}
};
| return service;
}
}
angular
.module('app')
.provider('testService5', testService5Provider);
} | random_line_split |
|
setup.py | from cx_Freeze import setup, Executable
import sys
base = None
if sys.platform == "win32":
#base = "Win32GUI"
base = "Console"
executables = [
Executable("G_outgauge.py",
base=base,
icon="icon.ico"
)
]
include_files=[]
include_files.append(("LogitechLcdEnginesWrapper.dll","LogitechLcdEnginesWrapper.dll"))
buildOptions = dict(
compressed=False,
includes=[],
packages=[],
include_files=include_files,
excludes= ['_gtkagg', '_tkagg', 'bsddb', 'curses', 'email', 'pywin.debugger',
'pywin.debugger.dbgcon', 'pywin.dialogs', 'tcl',
'Tkconstants', 'Tkinter'],
zip_includes=[]
)
setup(
name = "G_outgauge.py", | description = "OutGauge python application for Logitech periperal with lcd color screen (G19)",
options=dict(build_exe=buildOptions),
executables = executables
) | version = "0.1", | random_line_split |
setup.py | from cx_Freeze import setup, Executable
import sys
base = None
if sys.platform == "win32":
#base = "Win32GUI"
|
executables = [
Executable("G_outgauge.py",
base=base,
icon="icon.ico"
)
]
include_files=[]
include_files.append(("LogitechLcdEnginesWrapper.dll","LogitechLcdEnginesWrapper.dll"))
buildOptions = dict(
compressed=False,
includes=[],
packages=[],
include_files=include_files,
excludes= ['_gtkagg', '_tkagg', 'bsddb', 'curses', 'email', 'pywin.debugger',
'pywin.debugger.dbgcon', 'pywin.dialogs', 'tcl',
'Tkconstants', 'Tkinter'],
zip_includes=[]
)
setup(
name = "G_outgauge.py",
version = "0.1",
description = "OutGauge python application for Logitech periperal with lcd color screen (G19)",
options=dict(build_exe=buildOptions),
executables = executables
) | base = "Console" | conditional_block |
automationactions.py | # coding=utf-8
"""
The Automations API endpoint actions
Note: This is a paid feature
Documentation: http://developer.mailchimp.com/documentation/mailchimp/reference/automations/
"""
from __future__ import unicode_literals
from mailchimp3.baseapi import BaseApi
class AutomationActions(BaseApi):
"""
Actions for the Automations endpoint.
"""
def __init__(self, *args, **kwargs):
"""
Initialize the endpoint
"""
super(AutomationActions, self).__init__(*args, **kwargs)
self.endpoint = 'automations'
self.workflow_id = None
# Paid feature
def pause(self, workflow_id):
"""
Pause all emails in a specific Automation workflow.
:param workflow_id: The unique id for the Automation workflow.
:type workflow_id: :py:class:`str` | """
self.workflow_id = workflow_id
return self._mc_client._post(url=self._build_path(workflow_id, 'actions/pause-all-emails'))
# Paid feature
def start(self, workflow_id):
"""
Start all emails in an Automation workflow.
:param workflow_id: The unique id for the Automation workflow.
:type workflow_id: :py:class:`str`
"""
self.workflow_id = workflow_id
return self._mc_client._post(url=self._build_path(workflow_id, 'actions/start-all-emails')) | random_line_split |
|
automationactions.py | # coding=utf-8
"""
The Automations API endpoint actions
Note: This is a paid feature
Documentation: http://developer.mailchimp.com/documentation/mailchimp/reference/automations/
"""
from __future__ import unicode_literals
from mailchimp3.baseapi import BaseApi
class | (BaseApi):
"""
Actions for the Automations endpoint.
"""
def __init__(self, *args, **kwargs):
"""
Initialize the endpoint
"""
super(AutomationActions, self).__init__(*args, **kwargs)
self.endpoint = 'automations'
self.workflow_id = None
# Paid feature
def pause(self, workflow_id):
"""
Pause all emails in a specific Automation workflow.
:param workflow_id: The unique id for the Automation workflow.
:type workflow_id: :py:class:`str`
"""
self.workflow_id = workflow_id
return self._mc_client._post(url=self._build_path(workflow_id, 'actions/pause-all-emails'))
# Paid feature
def start(self, workflow_id):
"""
Start all emails in an Automation workflow.
:param workflow_id: The unique id for the Automation workflow.
:type workflow_id: :py:class:`str`
"""
self.workflow_id = workflow_id
return self._mc_client._post(url=self._build_path(workflow_id, 'actions/start-all-emails'))
| AutomationActions | identifier_name |
automationactions.py | # coding=utf-8
"""
The Automations API endpoint actions
Note: This is a paid feature
Documentation: http://developer.mailchimp.com/documentation/mailchimp/reference/automations/
"""
from __future__ import unicode_literals
from mailchimp3.baseapi import BaseApi
class AutomationActions(BaseApi):
"""
Actions for the Automations endpoint.
"""
def __init__(self, *args, **kwargs):
"""
Initialize the endpoint
"""
super(AutomationActions, self).__init__(*args, **kwargs)
self.endpoint = 'automations'
self.workflow_id = None
# Paid feature
def pause(self, workflow_id):
"""
Pause all emails in a specific Automation workflow.
:param workflow_id: The unique id for the Automation workflow.
:type workflow_id: :py:class:`str`
"""
self.workflow_id = workflow_id
return self._mc_client._post(url=self._build_path(workflow_id, 'actions/pause-all-emails'))
# Paid feature
def start(self, workflow_id):
| """
Start all emails in an Automation workflow.
:param workflow_id: The unique id for the Automation workflow.
:type workflow_id: :py:class:`str`
"""
self.workflow_id = workflow_id
return self._mc_client._post(url=self._build_path(workflow_id, 'actions/start-all-emails')) | identifier_body |
|
shell.py | # The MIT License (MIT)
#
# Copyright (c) 2016 Frederic Guillot
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from cliff import app
from cliff import commandmanager
from pbr import version as app_version
import sys
from kanboard_cli.commands import application
from kanboard_cli.commands import project
from kanboard_cli.commands import task
from kanboard_cli import client
class KanboardShell(app.App):
def __init__(self):
super(KanboardShell, self).__init__(
description='Kanboard Command Line Client',
version=app_version.VersionInfo('kanboard_cli').version_string(),
command_manager=commandmanager.CommandManager('kanboard.cli'),
deferred_help=True)
self.client = None
self.is_super_user = True
def build_option_parser(self, description, version, argparse_kwargs=None):
parser = super(KanboardShell, self).build_option_parser(
description, version, argparse_kwargs=argparse_kwargs)
parser.add_argument(
'--url',
metavar='<api url>',
help='Kanboard API URL',
)
parser.add_argument(
'--username',
metavar='<api username>',
help='API username',
)
parser.add_argument(
'--password',
metavar='<api password>',
help='API password/token',
)
parser.add_argument(
'--auth-header',
metavar='<authentication header>',
help='API authentication header',
)
return parser
def initialize_app(self, argv):
client_manager = client.ClientManager(self.options)
self.client = client_manager.get_client()
self.is_super_user = client_manager.is_super_user()
self.command_manager.add_command('app version', application.ShowVersion)
self.command_manager.add_command('app timezone', application.ShowTimezone)
self.command_manager.add_command('project show', project.ShowProject)
self.command_manager.add_command('project list', project.ListProjects)
self.command_manager.add_command('task create', task.CreateTask)
self.command_manager.add_command('task list', task.ListTasks)
def main(argv=sys.argv[1:]):
|
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| return KanboardShell().run(argv) | identifier_body |
shell.py | # The MIT License (MIT)
#
# Copyright (c) 2016 Frederic Guillot
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from cliff import app
from cliff import commandmanager
from pbr import version as app_version
import sys
from kanboard_cli.commands import application
from kanboard_cli.commands import project
from kanboard_cli.commands import task
from kanboard_cli import client
class KanboardShell(app.App):
def __init__(self):
super(KanboardShell, self).__init__(
description='Kanboard Command Line Client',
version=app_version.VersionInfo('kanboard_cli').version_string(),
command_manager=commandmanager.CommandManager('kanboard.cli'),
deferred_help=True)
self.client = None
self.is_super_user = True
def build_option_parser(self, description, version, argparse_kwargs=None):
parser = super(KanboardShell, self).build_option_parser(
description, version, argparse_kwargs=argparse_kwargs)
parser.add_argument(
'--url',
metavar='<api url>',
help='Kanboard API URL',
)
parser.add_argument(
'--username',
metavar='<api username>',
help='API username',
)
parser.add_argument(
'--password',
metavar='<api password>',
help='API password/token',
)
parser.add_argument(
'--auth-header',
metavar='<authentication header>',
help='API authentication header',
)
return parser
def | (self, argv):
client_manager = client.ClientManager(self.options)
self.client = client_manager.get_client()
self.is_super_user = client_manager.is_super_user()
self.command_manager.add_command('app version', application.ShowVersion)
self.command_manager.add_command('app timezone', application.ShowTimezone)
self.command_manager.add_command('project show', project.ShowProject)
self.command_manager.add_command('project list', project.ListProjects)
self.command_manager.add_command('task create', task.CreateTask)
self.command_manager.add_command('task list', task.ListTasks)
def main(argv=sys.argv[1:]):
return KanboardShell().run(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| initialize_app | identifier_name |
shell.py | # The MIT License (MIT)
#
# Copyright (c) 2016 Frederic Guillot
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is | #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from cliff import app
from cliff import commandmanager
from pbr import version as app_version
import sys
from kanboard_cli.commands import application
from kanboard_cli.commands import project
from kanboard_cli.commands import task
from kanboard_cli import client
class KanboardShell(app.App):
def __init__(self):
super(KanboardShell, self).__init__(
description='Kanboard Command Line Client',
version=app_version.VersionInfo('kanboard_cli').version_string(),
command_manager=commandmanager.CommandManager('kanboard.cli'),
deferred_help=True)
self.client = None
self.is_super_user = True
def build_option_parser(self, description, version, argparse_kwargs=None):
parser = super(KanboardShell, self).build_option_parser(
description, version, argparse_kwargs=argparse_kwargs)
parser.add_argument(
'--url',
metavar='<api url>',
help='Kanboard API URL',
)
parser.add_argument(
'--username',
metavar='<api username>',
help='API username',
)
parser.add_argument(
'--password',
metavar='<api password>',
help='API password/token',
)
parser.add_argument(
'--auth-header',
metavar='<authentication header>',
help='API authentication header',
)
return parser
def initialize_app(self, argv):
client_manager = client.ClientManager(self.options)
self.client = client_manager.get_client()
self.is_super_user = client_manager.is_super_user()
self.command_manager.add_command('app version', application.ShowVersion)
self.command_manager.add_command('app timezone', application.ShowTimezone)
self.command_manager.add_command('project show', project.ShowProject)
self.command_manager.add_command('project list', project.ListProjects)
self.command_manager.add_command('task create', task.CreateTask)
self.command_manager.add_command('task list', task.ListTasks)
def main(argv=sys.argv[1:]):
return KanboardShell().run(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) | # furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software. | random_line_split |
shell.py | # The MIT License (MIT)
#
# Copyright (c) 2016 Frederic Guillot
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from cliff import app
from cliff import commandmanager
from pbr import version as app_version
import sys
from kanboard_cli.commands import application
from kanboard_cli.commands import project
from kanboard_cli.commands import task
from kanboard_cli import client
class KanboardShell(app.App):
def __init__(self):
super(KanboardShell, self).__init__(
description='Kanboard Command Line Client',
version=app_version.VersionInfo('kanboard_cli').version_string(),
command_manager=commandmanager.CommandManager('kanboard.cli'),
deferred_help=True)
self.client = None
self.is_super_user = True
def build_option_parser(self, description, version, argparse_kwargs=None):
parser = super(KanboardShell, self).build_option_parser(
description, version, argparse_kwargs=argparse_kwargs)
parser.add_argument(
'--url',
metavar='<api url>',
help='Kanboard API URL',
)
parser.add_argument(
'--username',
metavar='<api username>',
help='API username',
)
parser.add_argument(
'--password',
metavar='<api password>',
help='API password/token',
)
parser.add_argument(
'--auth-header',
metavar='<authentication header>',
help='API authentication header',
)
return parser
def initialize_app(self, argv):
client_manager = client.ClientManager(self.options)
self.client = client_manager.get_client()
self.is_super_user = client_manager.is_super_user()
self.command_manager.add_command('app version', application.ShowVersion)
self.command_manager.add_command('app timezone', application.ShowTimezone)
self.command_manager.add_command('project show', project.ShowProject)
self.command_manager.add_command('project list', project.ListProjects)
self.command_manager.add_command('task create', task.CreateTask)
self.command_manager.add_command('task list', task.ListTasks)
def main(argv=sys.argv[1:]):
return KanboardShell().run(argv)
if __name__ == '__main__':
| sys.exit(main(sys.argv[1:])) | conditional_block |
|
change_request.py | # -*- encoding: utf-8 -*-
from openerp.osv import osv, fields
class LeadToChangeRequestWizard(osv.TransientModel):
| def action_lead_to_change_request(self, cr, uid, ids, context=None):
# get the wizards and models
wizards = self.browse(cr, uid, ids, context=context)
lead_obj = self.pool["crm.lead"]
cr_obj = self.pool["change.management.change"]
attachment_obj = self.pool['ir.attachment']
for wizard in wizards:
# get the lead to transform
lead = wizard.lead_id
partner = self._find_matching_partner(cr, uid, context=context)
if not partner and (lead.partner_name or lead.contact_name):
partner_ids = lead_obj.handle_partner_assignation(
cr, uid, [lead.id], context=context
)
partner = partner_ids[lead.id]
# create new change request
vals = {
"description": lead.name,
"description_event": lead.description,
"email_from": lead.email_from,
"project_id": lead.project_id.id,
"stakeholder_id": partner,
"author_id": uid,
"change_category_id": wizard.change_category_id.id,
}
change_id = cr_obj.create(cr, uid, vals, context=None)
change = cr_obj.browse(cr, uid, change_id, context=None)
# move the mail thread
lead_obj.message_change_thread(
cr, uid, lead.id, change_id,
"change.management.change", context=context
)
# Move attachments
attachment_ids = attachment_obj.search(
cr, uid,
[('res_model', '=', 'crm.lead'), ('res_id', '=', lead.id)],
context=context
)
attachment_obj.write(
cr, uid, attachment_ids,
{'res_model': 'change.management.change', 'res_id': change_id},
context=context
)
# Archive the lead
lead_obj.write(
cr, uid, [lead.id], {'active': False}, context=context)
# delete the lead
# lead_obj.unlink(cr, uid, [lead.id], context=None)
# return the action to go to the form view of the new CR
view_id = self.pool.get('ir.ui.view').search(
cr, uid,
[
('model', '=', 'change.management.change'),
('name', '=', 'change_form_view')
]
)
return {
'name': 'CR created',
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'res_model': 'change.management.change',
'type': 'ir.actions.act_window',
'res_id': change_id,
'context': context
}
| """
wizard to convert a Lead into a Change Request and move the Mail Thread
"""
_name = "crm.lead2cr.wizard"
_inherit = 'crm.partner.binding'
_columns = {
"lead_id": fields.many2one(
"crm.lead", "Lead", domain=[("type", "=", "lead")]
),
# "project_id": fields.many2one("project.project", "Project"),
"change_category_id": fields.many2one(
"change.management.category", "Change Category"
),
}
_defaults = {
"lead_id": lambda self, cr, uid, context=None: context.get('active_id')
}
| identifier_body |
change_request.py | # -*- encoding: utf-8 -*-
from openerp.osv import osv, fields
class LeadToChangeRequestWizard(osv.TransientModel):
"""
wizard to convert a Lead into a Change Request and move the Mail Thread
"""
_name = "crm.lead2cr.wizard"
_inherit = 'crm.partner.binding'
_columns = {
"lead_id": fields.many2one(
"crm.lead", "Lead", domain=[("type", "=", "lead")]
),
# "project_id": fields.many2one("project.project", "Project"),
"change_category_id": fields.many2one(
"change.management.category", "Change Category"
),
}
_defaults = { | def action_lead_to_change_request(self, cr, uid, ids, context=None):
# get the wizards and models
wizards = self.browse(cr, uid, ids, context=context)
lead_obj = self.pool["crm.lead"]
cr_obj = self.pool["change.management.change"]
attachment_obj = self.pool['ir.attachment']
for wizard in wizards:
# get the lead to transform
lead = wizard.lead_id
partner = self._find_matching_partner(cr, uid, context=context)
if not partner and (lead.partner_name or lead.contact_name):
partner_ids = lead_obj.handle_partner_assignation(
cr, uid, [lead.id], context=context
)
partner = partner_ids[lead.id]
# create new change request
vals = {
"description": lead.name,
"description_event": lead.description,
"email_from": lead.email_from,
"project_id": lead.project_id.id,
"stakeholder_id": partner,
"author_id": uid,
"change_category_id": wizard.change_category_id.id,
}
change_id = cr_obj.create(cr, uid, vals, context=None)
change = cr_obj.browse(cr, uid, change_id, context=None)
# move the mail thread
lead_obj.message_change_thread(
cr, uid, lead.id, change_id,
"change.management.change", context=context
)
# Move attachments
attachment_ids = attachment_obj.search(
cr, uid,
[('res_model', '=', 'crm.lead'), ('res_id', '=', lead.id)],
context=context
)
attachment_obj.write(
cr, uid, attachment_ids,
{'res_model': 'change.management.change', 'res_id': change_id},
context=context
)
# Archive the lead
lead_obj.write(
cr, uid, [lead.id], {'active': False}, context=context)
# delete the lead
# lead_obj.unlink(cr, uid, [lead.id], context=None)
# return the action to go to the form view of the new CR
view_id = self.pool.get('ir.ui.view').search(
cr, uid,
[
('model', '=', 'change.management.change'),
('name', '=', 'change_form_view')
]
)
return {
'name': 'CR created',
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'res_model': 'change.management.change',
'type': 'ir.actions.act_window',
'res_id': change_id,
'context': context
} | "lead_id": lambda self, cr, uid, context=None: context.get('active_id')
}
| random_line_split |
change_request.py | # -*- encoding: utf-8 -*-
from openerp.osv import osv, fields
class LeadToChangeRequestWizard(osv.TransientModel):
"""
wizard to convert a Lead into a Change Request and move the Mail Thread
"""
_name = "crm.lead2cr.wizard"
_inherit = 'crm.partner.binding'
_columns = {
"lead_id": fields.many2one(
"crm.lead", "Lead", domain=[("type", "=", "lead")]
),
# "project_id": fields.many2one("project.project", "Project"),
"change_category_id": fields.many2one(
"change.management.category", "Change Category"
),
}
_defaults = {
"lead_id": lambda self, cr, uid, context=None: context.get('active_id')
}
def | (self, cr, uid, ids, context=None):
# get the wizards and models
wizards = self.browse(cr, uid, ids, context=context)
lead_obj = self.pool["crm.lead"]
cr_obj = self.pool["change.management.change"]
attachment_obj = self.pool['ir.attachment']
for wizard in wizards:
# get the lead to transform
lead = wizard.lead_id
partner = self._find_matching_partner(cr, uid, context=context)
if not partner and (lead.partner_name or lead.contact_name):
partner_ids = lead_obj.handle_partner_assignation(
cr, uid, [lead.id], context=context
)
partner = partner_ids[lead.id]
# create new change request
vals = {
"description": lead.name,
"description_event": lead.description,
"email_from": lead.email_from,
"project_id": lead.project_id.id,
"stakeholder_id": partner,
"author_id": uid,
"change_category_id": wizard.change_category_id.id,
}
change_id = cr_obj.create(cr, uid, vals, context=None)
change = cr_obj.browse(cr, uid, change_id, context=None)
# move the mail thread
lead_obj.message_change_thread(
cr, uid, lead.id, change_id,
"change.management.change", context=context
)
# Move attachments
attachment_ids = attachment_obj.search(
cr, uid,
[('res_model', '=', 'crm.lead'), ('res_id', '=', lead.id)],
context=context
)
attachment_obj.write(
cr, uid, attachment_ids,
{'res_model': 'change.management.change', 'res_id': change_id},
context=context
)
# Archive the lead
lead_obj.write(
cr, uid, [lead.id], {'active': False}, context=context)
# delete the lead
# lead_obj.unlink(cr, uid, [lead.id], context=None)
# return the action to go to the form view of the new CR
view_id = self.pool.get('ir.ui.view').search(
cr, uid,
[
('model', '=', 'change.management.change'),
('name', '=', 'change_form_view')
]
)
return {
'name': 'CR created',
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'res_model': 'change.management.change',
'type': 'ir.actions.act_window',
'res_id': change_id,
'context': context
}
| action_lead_to_change_request | identifier_name |
change_request.py | # -*- encoding: utf-8 -*-
from openerp.osv import osv, fields
class LeadToChangeRequestWizard(osv.TransientModel):
"""
wizard to convert a Lead into a Change Request and move the Mail Thread
"""
_name = "crm.lead2cr.wizard"
_inherit = 'crm.partner.binding'
_columns = {
"lead_id": fields.many2one(
"crm.lead", "Lead", domain=[("type", "=", "lead")]
),
# "project_id": fields.many2one("project.project", "Project"),
"change_category_id": fields.many2one(
"change.management.category", "Change Category"
),
}
_defaults = {
"lead_id": lambda self, cr, uid, context=None: context.get('active_id')
}
def action_lead_to_change_request(self, cr, uid, ids, context=None):
# get the wizards and models
wizards = self.browse(cr, uid, ids, context=context)
lead_obj = self.pool["crm.lead"]
cr_obj = self.pool["change.management.change"]
attachment_obj = self.pool['ir.attachment']
for wizard in wizards:
# get the lead to transform
| change = cr_obj.browse(cr, uid, change_id, context=None)
# move the mail thread
lead_obj.message_change_thread(
cr, uid, lead.id, change_id,
"change.management.change", context=context
)
# Move attachments
attachment_ids = attachment_obj.search(
cr, uid,
[('res_model', '=', 'crm.lead'), ('res_id', '=', lead.id)],
context=context
)
attachment_obj.write(
cr, uid, attachment_ids,
{'res_model': 'change.management.change', 'res_id': change_id},
context=context
)
# Archive the lead
lead_obj.write(
cr, uid, [lead.id], {'active': False}, context=context)
# delete the lead
# lead_obj.unlink(cr, uid, [lead.id], context=None)
# return the action to go to the form view of the new CR
view_id = self.pool.get('ir.ui.view').search(
cr, uid,
[
('model', '=', 'change.management.change'),
('name', '=', 'change_form_view')
]
)
return {
'name': 'CR created',
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'res_model': 'change.management.change',
'type': 'ir.actions.act_window',
'res_id': change_id,
'context': context
}
| lead = wizard.lead_id
partner = self._find_matching_partner(cr, uid, context=context)
if not partner and (lead.partner_name or lead.contact_name):
partner_ids = lead_obj.handle_partner_assignation(
cr, uid, [lead.id], context=context
)
partner = partner_ids[lead.id]
# create new change request
vals = {
"description": lead.name,
"description_event": lead.description,
"email_from": lead.email_from,
"project_id": lead.project_id.id,
"stakeholder_id": partner,
"author_id": uid,
"change_category_id": wizard.change_category_id.id,
}
change_id = cr_obj.create(cr, uid, vals, context=None) | conditional_block |
semverCompare.test.js | import { expect } from 'chai';
import semverCompare from './semverCompare';
describe('semverCompare', () => {
const chaos = [
'2.5.10.4159',
'0.5', | '0.4.1',
'1',
'1.1',
'2.5.0',
'2',
'2.5.10',
'10.5',
'1.25.4',
'1.2.15',
];
const order = [
'0.4.1',
'0.5',
'1',
'1.1',
'1.2.15',
'1.25.4',
'2',
'2.5.0',
'2.5.10',
'2.5.10.4159',
'10.5',
];
it('sorts arrays correctly', () => {
expect(chaos.sort(semverCompare)).to.deep.equal(order);
});
it('leaves equal version numbers in place', () => {
expect(['1', '1.0.0'].sort(semverCompare)).to.deep.equal(['1', '1.0.0']);
expect(['1.0.0', '1'].sort(semverCompare)).to.deep.equal(['1.0.0', '1']);
});
}); | random_line_split |
|
exponential-backoff.js | /**
* Attempts to try a function repeatedly until either success or max number of tries has been reached.
*
* @param {Function} toTry The function to try repeatedly. Thrown errors cause a retry. This function returns a Promise.
* @param {number} max The maximum number of function retries.
* @param {number} delay The base delay in ms. This doubles after every attempt.
* @param {Function} [predicate] A function to filter errors against. It receives error objects
* and must return true if the error is to be retried.
* @return {Promise} Rejects when toTry has failed max times. Resolves if successful once.
*/
function exponentialBackoff(toTry, max, delay, predicate) |
module.exports = exponentialBackoff;
| {
return toTry().catch((err) => {
if (max <= 0) {
return Promise.reject(err);
}
if (predicate == null || predicate(err)) {
// This delays the Promise by delay.
return new Promise((resolve) => setTimeout(resolve, delay))
.then(() => {
return exponentialBackoff(toTry, --max, delay * 2);
});
}
else {
return Promise.reject(err);
}
});
} | identifier_body |
exponential-backoff.js | /**
* Attempts to try a function repeatedly until either success or max number of tries has been reached.
*
* @param {Function} toTry The function to try repeatedly. Thrown errors cause a retry. This function returns a Promise.
* @param {number} max The maximum number of function retries.
* @param {number} delay The base delay in ms. This doubles after every attempt.
* @param {Function} [predicate] A function to filter errors against. It receives error objects
* and must return true if the error is to be retried.
* @return {Promise} Rejects when toTry has failed max times. Resolves if successful once.
*/
function | (toTry, max, delay, predicate) {
return toTry().catch((err) => {
if (max <= 0) {
return Promise.reject(err);
}
if (predicate == null || predicate(err)) {
// This delays the Promise by delay.
return new Promise((resolve) => setTimeout(resolve, delay))
.then(() => {
return exponentialBackoff(toTry, --max, delay * 2);
});
}
else {
return Promise.reject(err);
}
});
}
module.exports = exponentialBackoff;
| exponentialBackoff | identifier_name |
exponential-backoff.js | /**
* Attempts to try a function repeatedly until either success or max number of tries has been reached.
*
* @param {Function} toTry The function to try repeatedly. Thrown errors cause a retry. This function returns a Promise.
* @param {number} max The maximum number of function retries.
* @param {number} delay The base delay in ms. This doubles after every attempt.
* @param {Function} [predicate] A function to filter errors against. It receives error objects
* and must return true if the error is to be retried.
* @return {Promise} Rejects when toTry has failed max times. Resolves if successful once.
*/
function exponentialBackoff(toTry, max, delay, predicate) {
return toTry().catch((err) => {
if (max <= 0) {
return Promise.reject(err);
}
if (predicate == null || predicate(err)) { | // This delays the Promise by delay.
return new Promise((resolve) => setTimeout(resolve, delay))
.then(() => {
return exponentialBackoff(toTry, --max, delay * 2);
});
}
else {
return Promise.reject(err);
}
});
}
module.exports = exponentialBackoff; | random_line_split |
|
exponential-backoff.js | /**
* Attempts to try a function repeatedly until either success or max number of tries has been reached.
*
* @param {Function} toTry The function to try repeatedly. Thrown errors cause a retry. This function returns a Promise.
* @param {number} max The maximum number of function retries.
* @param {number} delay The base delay in ms. This doubles after every attempt.
* @param {Function} [predicate] A function to filter errors against. It receives error objects
* and must return true if the error is to be retried.
* @return {Promise} Rejects when toTry has failed max times. Resolves if successful once.
*/
function exponentialBackoff(toTry, max, delay, predicate) {
return toTry().catch((err) => {
if (max <= 0) {
return Promise.reject(err);
}
if (predicate == null || predicate(err)) {
// This delays the Promise by delay.
return new Promise((resolve) => setTimeout(resolve, delay))
.then(() => {
return exponentialBackoff(toTry, --max, delay * 2);
});
}
else |
});
}
module.exports = exponentialBackoff;
| {
return Promise.reject(err);
} | conditional_block |
base_transform.py | # -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
API Issues to work out:
- MatrixTransform and STTransform both have 'scale' and 'translate'
attributes, but they are used in very different ways. It would be nice
to keep this consistent, but how?
- Need a transform.map_rect function that returns the bounding rectangle of
a rect after transformation. Non-linear transforms might need to work
harder at this, but we can provide a default implementation that
works by mapping a selection of points across a grid within the original
rect.
"""
from __future__ import division
from ..shaders import Function
from ...util.event import EventEmitter
class BaseTransform(object):
"""
BaseTransform is a base class that defines a pair of complementary
coordinate mapping functions in both python and GLSL.
All BaseTransform subclasses define map() and imap() methods that map
an object through the forward or inverse transformation, respectively.
The two class variables glsl_map and glsl_imap are instances of
shaders.Function that define the forward- and inverse-mapping GLSL
function code.
Optionally, an inverse() method returns a new transform performing the
inverse mapping.
Note that although all classes should define both map() and imap(), it
is not necessarily the case that imap(map(x)) == x; there may be instances
where the inverse mapping is ambiguous or otherwise meaningless.
"""
glsl_map = None # Must be GLSL code
glsl_imap = None
# Flags used to describe the transformation. Subclasses should define each
# as True or False.
# (usually used for making optimization decisions)
# If True, then for any 3 colinear points, the
# transformed points will also be colinear.
Linear = None
# The transformation's effect on one axis is independent
# of the input position along any other axis.
Orthogonal = None
# If True, then the distance between two points is the
# same as the distance between the transformed points.
NonScaling = None
# Scale factors are applied equally to all axes.
Isometric = None
def __init__(self):
self._inverse = None
self._dynamic = False
self.changed = EventEmitter(source=self, type='transform_changed')
if self.glsl_map is not None:
self._shader_map = Function(self.glsl_map)
if self.glsl_imap is not None:
self._shader_imap = Function(self.glsl_imap)
def map(self, obj):
"""
Return *obj* mapped through the forward transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
def imap(self, obj):
"""
Return *obj* mapped through the inverse transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
@property
def inverse(self):
""" The inverse of this transform.
"""
if self._inverse is None:
self._inverse = InverseTransform(self)
return self._inverse
@property
def dynamic(self):
"""Boolean flag that indicates whether this transform is expected to
change frequently.
Transforms that are flagged as dynamic will not be collapsed in
``ChainTransform.simplified``. This allows changes to the transform
to propagate through the chain without requiring the chain to be
re-simplified.
"""
return self._dynamic
@dynamic.setter
def dynamic(self, d):
|
def shader_map(self):
"""
Return a shader Function that accepts only a single vec4 argument
and defines new attributes / uniforms supplying the Function with
any static input.
"""
return self._shader_map
def shader_imap(self):
"""
see shader_map.
"""
return self._shader_imap
def _shader_object(self):
""" This method allows transforms to be assigned directly to shader
template variables.
Example::
code = 'void main() { gl_Position = $transform($position); }'
func = shaders.Function(code)
tr = STTransform()
func['transform'] = tr # use tr's forward mapping for $function
"""
return self.shader_map()
def update(self, *args):
"""
Called to inform any listeners that this transform has changed.
"""
self.changed(*args)
def __mul__(self, tr):
"""
Transform multiplication returns a new transform that is equivalent to
the two operands performed in series.
By default, multiplying two Transforms `A * B` will return
ChainTransform([A, B]). Subclasses may redefine this operation to
return more optimized results.
To ensure that both operands have a chance to simplify the operation,
all subclasses should follow the same procedure. For `A * B`:
1. A.__mul__(B) attempts to generate an optimized transform product.
2. If that fails, it must:
* return super(A).__mul__(B) OR
* return NotImplemented if the superclass would return an
invalid result.
3. When BaseTransform.__mul__(A, B) is called, it returns
NotImplemented, which causes B.__rmul__(A) to be invoked.
4. B.__rmul__(A) attempts to generate an optimized transform product.
5. If that fails, it must:
* return super(B).__rmul__(A) OR
* return ChainTransform([B, A]) if the superclass would return
an invalid result.
6. When BaseTransform.__rmul__(B, A) is called, ChainTransform([A, B])
is returned.
"""
# switch to __rmul__ attempts.
# Don't use the "return NotImplemted" trick, because that won't work if
# self and tr are of the same type.
return tr.__rmul__(self)
def __rmul__(self, tr):
return ChainTransform([tr, self])
def __repr__(self):
return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
def __del__(self):
# we can remove ourselves from *all* events in this situation.
self.changed.disconnect()
class InverseTransform(BaseTransform):
def __init__(self, transform):
BaseTransform.__init__(self)
self._inverse = transform
self.map = transform.imap
self.imap = transform.map
@property
def Linear(self):
return self._inverse.Linear
@property
def Orthogonal(self):
return self._inverse.Orthogonal
@property
def NonScaling(self):
return self._inverse.NonScaling
@property
def Isometric(self):
return self._inverse.Isometric
@property
def shader_map(self):
return self._inverse.shader_imap
@property
def shader_imap(self):
return self._inverse.shader_map
def __repr__(self):
return ("<Inverse of %r>" % repr(self._inverse))
# import here to avoid import cycle; needed for BaseTransform.__mul__.
from .chain import ChainTransform # noqa
| self._dynamic = d | identifier_body |
base_transform.py | # -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
API Issues to work out:
- MatrixTransform and STTransform both have 'scale' and 'translate'
attributes, but they are used in very different ways. It would be nice
to keep this consistent, but how?
- Need a transform.map_rect function that returns the bounding rectangle of
a rect after transformation. Non-linear transforms might need to work
harder at this, but we can provide a default implementation that
works by mapping a selection of points across a grid within the original
rect.
"""
from __future__ import division
from ..shaders import Function
from ...util.event import EventEmitter
class BaseTransform(object):
"""
BaseTransform is a base class that defines a pair of complementary
coordinate mapping functions in both python and GLSL.
All BaseTransform subclasses define map() and imap() methods that map
an object through the forward or inverse transformation, respectively.
The two class variables glsl_map and glsl_imap are instances of
shaders.Function that define the forward- and inverse-mapping GLSL
function code.
Optionally, an inverse() method returns a new transform performing the
inverse mapping.
Note that although all classes should define both map() and imap(), it
is not necessarily the case that imap(map(x)) == x; there may be instances
where the inverse mapping is ambiguous or otherwise meaningless.
"""
glsl_map = None # Must be GLSL code
glsl_imap = None
# Flags used to describe the transformation. Subclasses should define each
# as True or False.
# (usually used for making optimization decisions)
# If True, then for any 3 colinear points, the
# transformed points will also be colinear.
Linear = None
# The transformation's effect on one axis is independent
# of the input position along any other axis.
Orthogonal = None
# If True, then the distance between two points is the
# same as the distance between the transformed points.
NonScaling = None
# Scale factors are applied equally to all axes.
Isometric = None
def __init__(self):
self._inverse = None
self._dynamic = False
self.changed = EventEmitter(source=self, type='transform_changed')
if self.glsl_map is not None:
self._shader_map = Function(self.glsl_map)
if self.glsl_imap is not None:
self._shader_imap = Function(self.glsl_imap)
def map(self, obj):
"""
Return *obj* mapped through the forward transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
def imap(self, obj):
"""
Return *obj* mapped through the inverse transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
@property
def inverse(self):
""" The inverse of this transform.
"""
if self._inverse is None:
self._inverse = InverseTransform(self)
return self._inverse
@property
def dynamic(self):
"""Boolean flag that indicates whether this transform is expected to
change frequently.
Transforms that are flagged as dynamic will not be collapsed in
``ChainTransform.simplified``. This allows changes to the transform
to propagate through the chain without requiring the chain to be
re-simplified.
"""
return self._dynamic
@dynamic.setter
def dynamic(self, d):
self._dynamic = d
def shader_map(self):
"""
Return a shader Function that accepts only a single vec4 argument
and defines new attributes / uniforms supplying the Function with
any static input.
"""
return self._shader_map
def shader_imap(self):
"""
see shader_map.
"""
return self._shader_imap
def _shader_object(self):
""" This method allows transforms to be assigned directly to shader
template variables.
Example::
code = 'void main() { gl_Position = $transform($position); }'
func = shaders.Function(code)
tr = STTransform()
func['transform'] = tr # use tr's forward mapping for $function
"""
return self.shader_map()
def | (self, *args):
"""
Called to inform any listeners that this transform has changed.
"""
self.changed(*args)
def __mul__(self, tr):
"""
Transform multiplication returns a new transform that is equivalent to
the two operands performed in series.
By default, multiplying two Transforms `A * B` will return
ChainTransform([A, B]). Subclasses may redefine this operation to
return more optimized results.
To ensure that both operands have a chance to simplify the operation,
all subclasses should follow the same procedure. For `A * B`:
1. A.__mul__(B) attempts to generate an optimized transform product.
2. If that fails, it must:
* return super(A).__mul__(B) OR
* return NotImplemented if the superclass would return an
invalid result.
3. When BaseTransform.__mul__(A, B) is called, it returns
NotImplemented, which causes B.__rmul__(A) to be invoked.
4. B.__rmul__(A) attempts to generate an optimized transform product.
5. If that fails, it must:
* return super(B).__rmul__(A) OR
* return ChainTransform([B, A]) if the superclass would return
an invalid result.
6. When BaseTransform.__rmul__(B, A) is called, ChainTransform([A, B])
is returned.
"""
# switch to __rmul__ attempts.
# Don't use the "return NotImplemted" trick, because that won't work if
# self and tr are of the same type.
return tr.__rmul__(self)
def __rmul__(self, tr):
return ChainTransform([tr, self])
def __repr__(self):
return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
def __del__(self):
# we can remove ourselves from *all* events in this situation.
self.changed.disconnect()
class InverseTransform(BaseTransform):
def __init__(self, transform):
BaseTransform.__init__(self)
self._inverse = transform
self.map = transform.imap
self.imap = transform.map
@property
def Linear(self):
return self._inverse.Linear
@property
def Orthogonal(self):
return self._inverse.Orthogonal
@property
def NonScaling(self):
return self._inverse.NonScaling
@property
def Isometric(self):
return self._inverse.Isometric
@property
def shader_map(self):
return self._inverse.shader_imap
@property
def shader_imap(self):
return self._inverse.shader_map
def __repr__(self):
return ("<Inverse of %r>" % repr(self._inverse))
# import here to avoid import cycle; needed for BaseTransform.__mul__.
from .chain import ChainTransform # noqa
| update | identifier_name |
base_transform.py | # -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
API Issues to work out:
- MatrixTransform and STTransform both have 'scale' and 'translate'
attributes, but they are used in very different ways. It would be nice
to keep this consistent, but how?
- Need a transform.map_rect function that returns the bounding rectangle of
a rect after transformation. Non-linear transforms might need to work
harder at this, but we can provide a default implementation that
works by mapping a selection of points across a grid within the original
rect.
"""
from __future__ import division
from ..shaders import Function
from ...util.event import EventEmitter
class BaseTransform(object):
"""
BaseTransform is a base class that defines a pair of complementary
coordinate mapping functions in both python and GLSL.
All BaseTransform subclasses define map() and imap() methods that map
an object through the forward or inverse transformation, respectively.
The two class variables glsl_map and glsl_imap are instances of
shaders.Function that define the forward- and inverse-mapping GLSL
function code.
Optionally, an inverse() method returns a new transform performing the
inverse mapping.
Note that although all classes should define both map() and imap(), it
is not necessarily the case that imap(map(x)) == x; there may be instances
where the inverse mapping is ambiguous or otherwise meaningless.
"""
glsl_map = None # Must be GLSL code
glsl_imap = None
# Flags used to describe the transformation. Subclasses should define each
# as True or False.
# (usually used for making optimization decisions)
# If True, then for any 3 colinear points, the
# transformed points will also be colinear.
Linear = None
# The transformation's effect on one axis is independent
# of the input position along any other axis.
Orthogonal = None
# If True, then the distance between two points is the
# same as the distance between the transformed points.
NonScaling = None
# Scale factors are applied equally to all axes.
Isometric = None
def __init__(self):
self._inverse = None
self._dynamic = False
self.changed = EventEmitter(source=self, type='transform_changed')
if self.glsl_map is not None:
|
if self.glsl_imap is not None:
self._shader_imap = Function(self.glsl_imap)
def map(self, obj):
"""
Return *obj* mapped through the forward transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
def imap(self, obj):
"""
Return *obj* mapped through the inverse transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
@property
def inverse(self):
""" The inverse of this transform.
"""
if self._inverse is None:
self._inverse = InverseTransform(self)
return self._inverse
@property
def dynamic(self):
"""Boolean flag that indicates whether this transform is expected to
change frequently.
Transforms that are flagged as dynamic will not be collapsed in
``ChainTransform.simplified``. This allows changes to the transform
to propagate through the chain without requiring the chain to be
re-simplified.
"""
return self._dynamic
@dynamic.setter
def dynamic(self, d):
self._dynamic = d
def shader_map(self):
"""
Return a shader Function that accepts only a single vec4 argument
and defines new attributes / uniforms supplying the Function with
any static input.
"""
return self._shader_map
def shader_imap(self):
"""
see shader_map.
"""
return self._shader_imap
def _shader_object(self):
""" This method allows transforms to be assigned directly to shader
template variables.
Example::
code = 'void main() { gl_Position = $transform($position); }'
func = shaders.Function(code)
tr = STTransform()
func['transform'] = tr # use tr's forward mapping for $function
"""
return self.shader_map()
def update(self, *args):
"""
Called to inform any listeners that this transform has changed.
"""
self.changed(*args)
def __mul__(self, tr):
"""
Transform multiplication returns a new transform that is equivalent to
the two operands performed in series.
By default, multiplying two Transforms `A * B` will return
ChainTransform([A, B]). Subclasses may redefine this operation to
return more optimized results.
To ensure that both operands have a chance to simplify the operation,
all subclasses should follow the same procedure. For `A * B`:
1. A.__mul__(B) attempts to generate an optimized transform product.
2. If that fails, it must:
* return super(A).__mul__(B) OR
* return NotImplemented if the superclass would return an
invalid result.
3. When BaseTransform.__mul__(A, B) is called, it returns
NotImplemented, which causes B.__rmul__(A) to be invoked.
4. B.__rmul__(A) attempts to generate an optimized transform product.
5. If that fails, it must:
* return super(B).__rmul__(A) OR
* return ChainTransform([B, A]) if the superclass would return
an invalid result.
6. When BaseTransform.__rmul__(B, A) is called, ChainTransform([A, B])
is returned.
"""
# switch to __rmul__ attempts.
# Don't use the "return NotImplemted" trick, because that won't work if
# self and tr are of the same type.
return tr.__rmul__(self)
def __rmul__(self, tr):
return ChainTransform([tr, self])
def __repr__(self):
return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
def __del__(self):
# we can remove ourselves from *all* events in this situation.
self.changed.disconnect()
class InverseTransform(BaseTransform):
def __init__(self, transform):
BaseTransform.__init__(self)
self._inverse = transform
self.map = transform.imap
self.imap = transform.map
@property
def Linear(self):
return self._inverse.Linear
@property
def Orthogonal(self):
return self._inverse.Orthogonal
@property
def NonScaling(self):
return self._inverse.NonScaling
@property
def Isometric(self):
return self._inverse.Isometric
@property
def shader_map(self):
return self._inverse.shader_imap
@property
def shader_imap(self):
return self._inverse.shader_map
def __repr__(self):
return ("<Inverse of %r>" % repr(self._inverse))
# import here to avoid import cycle; needed for BaseTransform.__mul__.
from .chain import ChainTransform # noqa
| self._shader_map = Function(self.glsl_map) | conditional_block |
base_transform.py | # -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
API Issues to work out:
- MatrixTransform and STTransform both have 'scale' and 'translate'
attributes, but they are used in very different ways. It would be nice
to keep this consistent, but how?
- Need a transform.map_rect function that returns the bounding rectangle of
a rect after transformation. Non-linear transforms might need to work
harder at this, but we can provide a default implementation that
works by mapping a selection of points across a grid within the original
rect.
"""
from __future__ import division
from ..shaders import Function
from ...util.event import EventEmitter
class BaseTransform(object):
"""
BaseTransform is a base class that defines a pair of complementary
coordinate mapping functions in both python and GLSL.
All BaseTransform subclasses define map() and imap() methods that map
an object through the forward or inverse transformation, respectively.
The two class variables glsl_map and glsl_imap are instances of
shaders.Function that define the forward- and inverse-mapping GLSL
function code.
Optionally, an inverse() method returns a new transform performing the
inverse mapping.
Note that although all classes should define both map() and imap(), it
is not necessarily the case that imap(map(x)) == x; there may be instances
where the inverse mapping is ambiguous or otherwise meaningless.
"""
glsl_map = None # Must be GLSL code
glsl_imap = None
# Flags used to describe the transformation. Subclasses should define each
# as True or False.
# (usually used for making optimization decisions)
| Linear = None
# The transformation's effect on one axis is independent
# of the input position along any other axis.
Orthogonal = None
# If True, then the distance between two points is the
# same as the distance between the transformed points.
NonScaling = None
# Scale factors are applied equally to all axes.
Isometric = None
def __init__(self):
self._inverse = None
self._dynamic = False
self.changed = EventEmitter(source=self, type='transform_changed')
if self.glsl_map is not None:
self._shader_map = Function(self.glsl_map)
if self.glsl_imap is not None:
self._shader_imap = Function(self.glsl_imap)
def map(self, obj):
"""
Return *obj* mapped through the forward transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
def imap(self, obj):
"""
Return *obj* mapped through the inverse transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
@property
def inverse(self):
""" The inverse of this transform.
"""
if self._inverse is None:
self._inverse = InverseTransform(self)
return self._inverse
@property
def dynamic(self):
"""Boolean flag that indicates whether this transform is expected to
change frequently.
Transforms that are flagged as dynamic will not be collapsed in
``ChainTransform.simplified``. This allows changes to the transform
to propagate through the chain without requiring the chain to be
re-simplified.
"""
return self._dynamic
@dynamic.setter
def dynamic(self, d):
self._dynamic = d
def shader_map(self):
"""
Return a shader Function that accepts only a single vec4 argument
and defines new attributes / uniforms supplying the Function with
any static input.
"""
return self._shader_map
def shader_imap(self):
"""
see shader_map.
"""
return self._shader_imap
def _shader_object(self):
""" This method allows transforms to be assigned directly to shader
template variables.
Example::
code = 'void main() { gl_Position = $transform($position); }'
func = shaders.Function(code)
tr = STTransform()
func['transform'] = tr # use tr's forward mapping for $function
"""
return self.shader_map()
def update(self, *args):
"""
Called to inform any listeners that this transform has changed.
"""
self.changed(*args)
def __mul__(self, tr):
"""
Transform multiplication returns a new transform that is equivalent to
the two operands performed in series.
By default, multiplying two Transforms `A * B` will return
ChainTransform([A, B]). Subclasses may redefine this operation to
return more optimized results.
To ensure that both operands have a chance to simplify the operation,
all subclasses should follow the same procedure. For `A * B`:
1. A.__mul__(B) attempts to generate an optimized transform product.
2. If that fails, it must:
* return super(A).__mul__(B) OR
* return NotImplemented if the superclass would return an
invalid result.
3. When BaseTransform.__mul__(A, B) is called, it returns
NotImplemented, which causes B.__rmul__(A) to be invoked.
4. B.__rmul__(A) attempts to generate an optimized transform product.
5. If that fails, it must:
* return super(B).__rmul__(A) OR
* return ChainTransform([B, A]) if the superclass would return
an invalid result.
6. When BaseTransform.__rmul__(B, A) is called, ChainTransform([A, B])
is returned.
"""
# switch to __rmul__ attempts.
# Don't use the "return NotImplemted" trick, because that won't work if
# self and tr are of the same type.
return tr.__rmul__(self)
def __rmul__(self, tr):
return ChainTransform([tr, self])
def __repr__(self):
return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
def __del__(self):
# we can remove ourselves from *all* events in this situation.
self.changed.disconnect()
class InverseTransform(BaseTransform):
def __init__(self, transform):
BaseTransform.__init__(self)
self._inverse = transform
self.map = transform.imap
self.imap = transform.map
@property
def Linear(self):
return self._inverse.Linear
@property
def Orthogonal(self):
return self._inverse.Orthogonal
@property
def NonScaling(self):
return self._inverse.NonScaling
@property
def Isometric(self):
return self._inverse.Isometric
@property
def shader_map(self):
return self._inverse.shader_imap
@property
def shader_imap(self):
return self._inverse.shader_map
def __repr__(self):
return ("<Inverse of %r>" % repr(self._inverse))
# import here to avoid import cycle; needed for BaseTransform.__mul__.
from .chain import ChainTransform # noqa | # If True, then for any 3 colinear points, the
# transformed points will also be colinear. | random_line_split |
test_config.py | """Test PbenchConfig class and objects
"""
import pytest
from pathlib import Path
from pbench import PbenchConfig
from pbench.common.exceptions import BadConfig
_config_path_prefix = Path("lib/pbench/test/unit/common/config")
class TestPbenchConfig:
def test_empty_config(self):
config = PbenchConfig(_config_path_prefix / "pbench.cfg")
assert config.TZ == "UTC", f"Unexpected TZ value, {config.TZ!r}"
assert (
config.log_fmt is None
), f"Unexpected log format value, {config.log_fmt!r}"
assert (
config.default_logging_level == "INFO"
), f"Unexpected default logging level, {config.default_logging_level!r}"
assert (
config.log_using_caller_directory is False
), f"Unexpected 'log using caller directory' boolean, {config.log_using_caller_directory!r}"
assert config.log_dir is None, f"Unexpected log directory, {config.log_dir!r}"
assert (
config.logger_type == "devlog"
), f"Unexpected logger type, {config.logger_type!r}"
with pytest.raises(AttributeError):
print(f"{config.logger_host!r}")
with pytest.raises(AttributeError):
print(f"{config.logger_port!r}")
assert "42" == config.get(
"other", "foobar"
), "Failed to fetch 'foobar' from 'DEFAULT' section"
assert "43" == config.get(
"other", "barfoo"
), "Failed to fetch 'barfoo' from 'other' section"
assert isinstance(
config.files, list
), f"Unexpected object class for 'files', {config.files.__class__!r}"
def test_log_dir_provided(self):
config = PbenchConfig(_config_path_prefix / "logdir.cfg")
assert ( |
def test_logger_type_provided(self):
config = PbenchConfig(_config_path_prefix / "hostport.cfg")
assert (
config.logger_type == "hostport"
), f"Unexpected logger type, {config.logger_type!r}"
assert (
config.logger_host == "logger.example.com"
), f"Unexpected logger host value, {config.logger_host!r}"
assert (
config.logger_port == "42"
), f"Unexpected logger port value, {config.logger_port!r}"
def test_logger_type_hostport_missing(self):
with pytest.raises(BadConfig):
PbenchConfig(_config_path_prefix / "hostport-missing.cfg")
with pytest.raises(BadConfig):
PbenchConfig(_config_path_prefix / "hostport-missing-port.cfg") | config.log_dir == "/srv/log/directory"
), f"Unexpected log directory, {config.log_dir!r}" | random_line_split |
test_config.py | """Test PbenchConfig class and objects
"""
import pytest
from pathlib import Path
from pbench import PbenchConfig
from pbench.common.exceptions import BadConfig
_config_path_prefix = Path("lib/pbench/test/unit/common/config")
class TestPbenchConfig:
def test_empty_config(self):
config = PbenchConfig(_config_path_prefix / "pbench.cfg")
assert config.TZ == "UTC", f"Unexpected TZ value, {config.TZ!r}"
assert (
config.log_fmt is None
), f"Unexpected log format value, {config.log_fmt!r}"
assert (
config.default_logging_level == "INFO"
), f"Unexpected default logging level, {config.default_logging_level!r}"
assert (
config.log_using_caller_directory is False
), f"Unexpected 'log using caller directory' boolean, {config.log_using_caller_directory!r}"
assert config.log_dir is None, f"Unexpected log directory, {config.log_dir!r}"
assert (
config.logger_type == "devlog"
), f"Unexpected logger type, {config.logger_type!r}"
with pytest.raises(AttributeError):
print(f"{config.logger_host!r}")
with pytest.raises(AttributeError):
print(f"{config.logger_port!r}")
assert "42" == config.get(
"other", "foobar"
), "Failed to fetch 'foobar' from 'DEFAULT' section"
assert "43" == config.get(
"other", "barfoo"
), "Failed to fetch 'barfoo' from 'other' section"
assert isinstance(
config.files, list
), f"Unexpected object class for 'files', {config.files.__class__!r}"
def test_log_dir_provided(self):
config = PbenchConfig(_config_path_prefix / "logdir.cfg")
assert (
config.log_dir == "/srv/log/directory"
), f"Unexpected log directory, {config.log_dir!r}"
def | (self):
config = PbenchConfig(_config_path_prefix / "hostport.cfg")
assert (
config.logger_type == "hostport"
), f"Unexpected logger type, {config.logger_type!r}"
assert (
config.logger_host == "logger.example.com"
), f"Unexpected logger host value, {config.logger_host!r}"
assert (
config.logger_port == "42"
), f"Unexpected logger port value, {config.logger_port!r}"
def test_logger_type_hostport_missing(self):
with pytest.raises(BadConfig):
PbenchConfig(_config_path_prefix / "hostport-missing.cfg")
with pytest.raises(BadConfig):
PbenchConfig(_config_path_prefix / "hostport-missing-port.cfg")
| test_logger_type_provided | identifier_name |
test_config.py | """Test PbenchConfig class and objects
"""
import pytest
from pathlib import Path
from pbench import PbenchConfig
from pbench.common.exceptions import BadConfig
_config_path_prefix = Path("lib/pbench/test/unit/common/config")
class TestPbenchConfig:
| assert "42" == config.get(
"other", "foobar"
), "Failed to fetch 'foobar' from 'DEFAULT' section"
assert "43" == config.get(
"other", "barfoo"
), "Failed to fetch 'barfoo' from 'other' section"
assert isinstance(
config.files, list
), f"Unexpected object class for 'files', {config.files.__class__!r}"
def test_log_dir_provided(self):
config = PbenchConfig(_config_path_prefix / "logdir.cfg")
assert (
config.log_dir == "/srv/log/directory"
), f"Unexpected log directory, {config.log_dir!r}"
def test_logger_type_provided(self):
config = PbenchConfig(_config_path_prefix / "hostport.cfg")
assert (
config.logger_type == "hostport"
), f"Unexpected logger type, {config.logger_type!r}"
assert (
config.logger_host == "logger.example.com"
), f"Unexpected logger host value, {config.logger_host!r}"
assert (
config.logger_port == "42"
), f"Unexpected logger port value, {config.logger_port!r}"
def test_logger_type_hostport_missing(self):
with pytest.raises(BadConfig):
PbenchConfig(_config_path_prefix / "hostport-missing.cfg")
with pytest.raises(BadConfig):
PbenchConfig(_config_path_prefix / "hostport-missing-port.cfg")
| def test_empty_config(self):
config = PbenchConfig(_config_path_prefix / "pbench.cfg")
assert config.TZ == "UTC", f"Unexpected TZ value, {config.TZ!r}"
assert (
config.log_fmt is None
), f"Unexpected log format value, {config.log_fmt!r}"
assert (
config.default_logging_level == "INFO"
), f"Unexpected default logging level, {config.default_logging_level!r}"
assert (
config.log_using_caller_directory is False
), f"Unexpected 'log using caller directory' boolean, {config.log_using_caller_directory!r}"
assert config.log_dir is None, f"Unexpected log directory, {config.log_dir!r}"
assert (
config.logger_type == "devlog"
), f"Unexpected logger type, {config.logger_type!r}"
with pytest.raises(AttributeError):
print(f"{config.logger_host!r}")
with pytest.raises(AttributeError):
print(f"{config.logger_port!r}") | identifier_body |
callback.rs | SIMPLEX,
Where::MIP { .. } => MIP,
Where::MIPSol { .. } => MIPSOL,
Where::MIPNode { .. } => MIPNODE,
Where::Message(_) => MESSAGE,
Where::Barrier { .. } => BARRIER,
}
}
}
/// The context object for Gurobi callback.
pub struct Callback<'a> {
cbdata: *mut ffi::c_void,
where_: Where,
model: &'a Model
}
pub trait New<'a> {
fn new(cbdata: *mut ffi::c_void, where_: i32, model: &'a Model) -> Result<Callback<'a>>;
}
impl<'a> New<'a> for Callback<'a> {
fn new(cbdata: *mut ffi::c_void, where_: i32, model: &'a Model) -> Result<Callback<'a>> {
let mut callback = Callback {
cbdata: cbdata,
where_: Where::Polling,
model: model
};
let where_ = match where_ {
POLLING => Where::Polling,
PRESOLVE => {
Where::PreSolve {
coldel: try!(callback.get_int(PRESOLVE, PRE_COLDEL)),
rowdel: try!(callback.get_int(PRESOLVE, PRE_ROWDEL)),
senchg: try!(callback.get_int(PRESOLVE, PRE_SENCHG)),
bndchg: try!(callback.get_int(PRESOLVE, PRE_BNDCHG)),
coecfg: try!(callback.get_int(PRESOLVE, PRE_COECHG))
}
}
SIMPLEX => {
Where::Simplex {
itrcnt: try!(callback.get_double(SIMPLEX, SPX_ITRCNT)),
objval: try!(callback.get_double(SIMPLEX, SPX_OBJVAL)),
priminf: try!(callback.get_double(SIMPLEX, SPX_PRIMINF)),
dualinf: try!(callback.get_double(SIMPLEX, SPX_DUALINF)),
ispert: try!(callback.get_int(SIMPLEX, SPX_ISPERT))
}
}
MIP => {
Where::MIP {
objbst: try!(callback.get_double(MIP, MIP_OBJBST)),
objbnd: try!(callback.get_double(MIP, MIP_OBJBND)),
nodcnt: try!(callback.get_double(MIP, MIP_NODCNT)),
solcnt: try!(callback.get_double(MIP, MIP_SOLCNT)),
cutcnt: try!(callback.get_int(MIP, MIP_CUTCNT)),
nodleft: try!(callback.get_double(MIP, MIP_NODLFT)),
itrcnt: try!(callback.get_double(MIP, MIP_ITRCNT))
}
}
MIPSOL => {
Where::MIPSol {
obj: try!(callback.get_double(MIPSOL, MIPSOL_OBJ)),
objbst: try!(callback.get_double(MIPSOL, MIPSOL_OBJBST)),
objbnd: try!(callback.get_double(MIPSOL, MIPSOL_OBJBND)),
nodcnt: try!(callback.get_double(MIPSOL, MIPSOL_NODCNT)),
solcnt: try!(callback.get_double(MIPSOL, MIPSOL_SOLCNT))
}
}
MIPNODE => {
Where::MIPNode {
status: try!(callback.get_int(MIPNODE, MIPNODE_STATUS)),
objbst: try!(callback.get_double(MIPNODE, MIPNODE_OBJBST)),
objbnd: try!(callback.get_double(MIPNODE, MIPNODE_OBJBND)),
nodcnt: try!(callback.get_double(MIPNODE, MIPNODE_NODCNT)),
solcnt: try!(callback.get_int(MIPNODE, MIPNODE_SOLCNT))
}
}
MESSAGE => Where::Message(try!(callback.get_string(MESSAGE, MSG_STRING)).trim().to_owned()),
BARRIER => {
Where::Barrier {
itrcnt: try!(callback.get_int(BARRIER, BARRIER_ITRCNT)),
primobj: try!(callback.get_double(BARRIER, BARRIER_PRIMOBJ)),
dualobj: try!(callback.get_double(BARRIER, BARRIER_DUALOBJ)),
priminf: try!(callback.get_double(BARRIER, BARRIER_PRIMINF)),
dualinf: try!(callback.get_double(BARRIER, BARRIER_DUALINF)),
compl: try!(callback.get_double(BARRIER, BARRIER_COMPL))
}
}
_ => panic!("Invalid callback location. {}", where_)
};
callback.where_ = where_;
Ok(callback)
}
}
impl<'a> Callback<'a> {
/// Retrieve the location where the callback called.
pub fn get_where(&self) -> Where { self.where_.clone() }
/// Retrive node relaxation solution values at the current node.
pub fn get_node_rel(&self, vars: &[Var]) -> Result<Vec<f64>> {
// memo: only MIPNode && status == Optimal
self.get_double_array(MIPNODE, MIPNODE_REL).map(|buf| vars.iter().map(|v| buf[v.index() as usize]).collect_vec())
}
/// Retrieve values from the current solution vector.
pub fn get_solution(&self, vars: &[Var]) -> Result<Vec<f64>> {
self.get_double_array(MIPSOL, MIPSOL_SOL).map(|buf| vars.iter().map(|v| buf[v.index() as usize]).collect_vec())
}
/// Provide a new feasible solution for a MIP model.
pub fn set_solution(&self, vars: &[Var], solution: &[f64]) -> Result<()> {
if vars.len() != solution.len() || vars.len() < self.model.vars.len() {
return Err(Error::InconsitentDims);
}
let mut buf = vec![0.0; self.model.vars.len()];
for (v, &sol) in Zip::new((vars.iter(), solution.iter())) {
let i = v.index() as usize;
buf[i] = sol;
}
self.check_apicall(unsafe { ffi::GRBcbsolution(self.cbdata, buf.as_ptr()) })
}
/// Retrieve the elapsed solver runtime [sec].
pub fn get_runtime(&self) -> Result<f64> {
if let Where::Polling = self.get_where() {
return Err(Error::FromAPI("bad call in callback".to_owned(), 40001));
}
self.get_double(self.get_where().into(), RUNTIME)
}
/// Add a new cutting plane to the MIP model.
pub fn add_cut(&self, lhs: LinExpr, sense: ConstrSense, rhs: f64) -> Result<()> {
let (vars, coeff, offset) = lhs.into();
self.check_apicall(unsafe {
ffi::GRBcbcut(self.cbdata,
coeff.len() as ffi::c_int,
vars.as_ptr(),
coeff.as_ptr(),
sense.into(),
rhs - offset)
})
}
/// Add a new lazy constraint to the MIP model.
pub fn add_lazy(&self, lhs: LinExpr, sense: ConstrSense, rhs: f64) -> Result<()> {
let (vars, coeff, offset) = lhs.into();
self.check_apicall(unsafe {
ffi::GRBcblazy(self.cbdata,
coeff.len() as ffi::c_int,
vars.as_ptr(),
coeff.as_ptr(),
sense.into(),
rhs - offset)
})
}
fn get_int(&self, where_: i32, what: i32) -> Result<i32> {
let mut buf = 0;
self.check_apicall(unsafe { ffi::GRBcbget(self.cbdata, where_, what, &mut buf as *mut i32 as *mut raw::c_void) }).and(Ok(buf.into()))
}
fn get_double(&self, where_: i32, what: i32) -> Result<f64> {
let mut buf = 0.0;
self.check_apicall(unsafe { ffi::GRBcbget(self.cbdata, where_, what, &mut buf as *mut f64 as *mut raw::c_void) }).and(Ok(buf.into()))
}
fn get_double_array(&self, where_: i32, what: i32) -> Result<Vec<f64>> {
let mut buf = vec![0.0; self.model.vars.len()];
self.check_apicall(unsafe { ffi::GRBcbget(self.cbdata, where_, what, transmute(buf.as_mut_ptr())) }).and(Ok(buf))
}
fn get_string(&self, where_: i32, what: i32) -> Result<String> {
let mut buf = null();
self.check_apicall(unsafe { ffi::GRBcbget(self.cbdata, where_, what, &mut buf as *mut *const i8 as *mut raw::c_void) })
.and(Ok(unsafe { util::from_c_str(buf) }))
}
fn check_apicall(&self, error: ffi::c_int) -> Result<()> {
if error != 0 {
return Err(Error::FromAPI("Callback error".to_owned(), 40000));
}
Ok(())
}
}
impl<'a> Deref for Callback<'a> {
type Target = Model;
fn | deref | identifier_name |
|
callback.rs | OL_SOL: i32 = 4001;
const MIPSOL_OBJ: i32 = 4002;
const MIPSOL_OBJBST: i32 = 4003;
const MIPSOL_OBJBND: i32 = 4004;
const MIPSOL_NODCNT: i32 = 4005;
const MIPSOL_SOLCNT: i32 = 4006;
#[allow(dead_code)]
const MIPSOL_OBJBNDC: i32 = 4007;
const MIPNODE_STATUS: i32 = 5001;
const MIPNODE_REL: i32 = 5002;
const MIPNODE_OBJBST: i32 = 5003;
const MIPNODE_OBJBND: i32 = 5004;
const MIPNODE_NODCNT: i32 = 5005;
const MIPNODE_SOLCNT: i32 = 5006;
#[allow(dead_code)]
const MIPNODE_BRVAR: i32 = 5007;
#[allow(dead_code)]
const MIPNODE_OBJBNDC: i32 = 5008;
const MSG_STRING: i32 = 6001;
const RUNTIME: i32 = 6002;
const BARRIER_ITRCNT: i32 = 7001;
const BARRIER_PRIMOBJ: i32 = 7002;
const BARRIER_DUALOBJ: i32 = 7003;
const BARRIER_PRIMINF: i32 = 7004;
const BARRIER_DUALINF: i32 = 7005;
const BARRIER_COMPL: i32 = 7006;
/// Location where the callback called
///
/// If you want to get more information, see [official
/// manual](https://www.gurobi.com/documentation/6.5/refman/callback_codes.html).
#[derive(Debug, Clone)]
pub enum Where {
/// Periodic polling callback
Polling,
/// Currently performing presolve
PreSolve {
/// The number of columns removed by presolve to this point.
coldel: i32,
/// The number of rows removed by presolve to this point.
rowdel: i32,
/// The number of constraint senses changed by presolve to this point.
senchg: i32,
/// The number of variable bounds changed by presolve to this point.
bndchg: i32,
/// The number of coefficients changed by presolve to this point.
coecfg: i32
},
/// Currently in simplex
Simplex {
/// Current simplex iteration count.
itrcnt: f64,
/// Current simplex objective value.
objval: f64,
/// Current primal infeasibility.
priminf: f64,
/// Current dual infeasibility.
dualinf: f64,
/// Is problem current perturbed?
ispert: i32
},
/// Currently in MIP
MIP {
/// Current best objective.
objbst: f64,
/// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64,
/// Current count of feasible solutions found.
solcnt: f64,
/// Current count of cutting planes applied.
cutcnt: i32,
/// Current unexplored node count.
nodleft: f64,
/// Current simplex iteration count.
itrcnt: f64
},
/// Found a new MIP incumbent
MIPSol {
/// Objective value for new solution.
obj: f64,
/// Current best objective.
objbst: f64,
/// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64,
/// Current count of feasible solutions found.
solcnt: f64
},
/// Currently exploring a MIP node
MIPNode {
/// Optimization status of current MIP node (see the Status Code section for further information).
status: i32,
/// Current best objective.
objbst: f64,
/// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64,
/// Current count of feasible solutions found.
solcnt: i32
},
/// Printing a log message
Message(String),
/// Currently in barrier.
Barrier {
/// Current barrier iteration count.
itrcnt: i32,
/// Primal objective value for current barrier iterate.
primobj: f64,
/// Dual objective value for current barrier iterate.
dualobj: f64,
/// Primal infeasibility for current barrier iterate.
priminf: f64,
/// Dual infeasibility for current barrier iterate.
dualinf: f64,
/// Complementarity violation for current barrier iterate.
compl: f64
}
}
impl Into<i32> for Where {
fn into(self) -> i32 {
match self {
Where::Polling => POLLING,
Where::PreSolve { .. } => PRESOLVE,
Where::Simplex { .. } => SIMPLEX,
Where::MIP { .. } => MIP,
Where::MIPSol { .. } => MIPSOL,
Where::MIPNode { .. } => MIPNODE,
Where::Message(_) => MESSAGE,
Where::Barrier { .. } => BARRIER,
}
}
}
/// The context object for Gurobi callback.
pub struct Callback<'a> {
cbdata: *mut ffi::c_void,
where_: Where,
model: &'a Model
}
pub trait New<'a> {
fn new(cbdata: *mut ffi::c_void, where_: i32, model: &'a Model) -> Result<Callback<'a>>;
}
impl<'a> New<'a> for Callback<'a> {
fn new(cbdata: *mut ffi::c_void, where_: i32, model: &'a Model) -> Result<Callback<'a>> | Where::Simplex {
itrcnt: try!(callback.get_double(SIMPLEX, SPX_ITRCNT)),
objval: try!(callback.get_double(SIMPLEX, SPX_OBJVAL)),
priminf: try!(callback.get_double(SIMPLEX, SPX_PRIMINF)),
dualinf: try!(callback.get_double(SIMPLEX, SPX_DUALINF)),
ispert: try!(callback.get_int(SIMPLEX, SPX_ISPERT))
}
}
MIP => {
Where::MIP {
objbst: try!(callback.get_double(MIP, MIP_OBJBST)),
objbnd: try!(callback.get_double(MIP, MIP_OBJBND)),
nodcnt: try!(callback.get_double(MIP, MIP_NODCNT)),
solcnt: try!(callback.get_double(MIP, MIP_SOLCNT)),
cutcnt: try!(callback.get_int(MIP, MIP_CUTCNT)),
nodleft: try!(callback.get_double(MIP, MIP_NODLFT)),
itrcnt: try!(callback.get_double(MIP, MIP_ITRCNT))
}
}
MIPSOL => {
Where::MIPSol {
obj: try!(callback.get_double(MIPSOL, MIPSOL_OBJ)),
objbst: try!(callback.get_double(MIPSOL, MIPSOL_OBJBST)),
objbnd: try!(callback.get_double(MIPSOL, MIPSOL_OBJBND)),
nodcnt: try!(callback.get_double(MIPSOL, MIPSOL_NODCNT)),
solcnt: try!(callback.get_double(MIPSOL, MIPSOL_SOLCNT))
}
}
MIPNODE => {
Where::MIPNode {
status: try!(callback.get_int(MIPNODE, MIPNODE_STATUS)),
objbst: try!(callback.get_double(MIPNODE, MIPNODE_OBJBST)),
objbnd: try!(callback.get_double(MIPNODE, MIPNODE_OBJBND)),
nodcnt: try!(callback.get_double(MIPNODE, MIPNODE_NODCNT)),
solcnt: try!(callback.get_int(MIPNODE, MIPNODE_SOLCNT))
}
}
MESSAGE => Where::Message(try!(callback.get_string(MESSAGE, MSG_STRING)).trim().to_owned()),
BARRIER => {
Where::Barrier {
itrcnt: try!(callback.get_int(BARRIER | {
let mut callback = Callback {
cbdata: cbdata,
where_: Where::Polling,
model: model
};
let where_ = match where_ {
POLLING => Where::Polling,
PRESOLVE => {
Where::PreSolve {
coldel: try!(callback.get_int(PRESOLVE, PRE_COLDEL)),
rowdel: try!(callback.get_int(PRESOLVE, PRE_ROWDEL)),
senchg: try!(callback.get_int(PRESOLVE, PRE_SENCHG)),
bndchg: try!(callback.get_int(PRESOLVE, PRE_BNDCHG)),
coecfg: try!(callback.get_int(PRESOLVE, PRE_COECHG))
}
}
SIMPLEX => { | identifier_body |
callback.rs | _SOL: i32 = 4001;
const MIPSOL_OBJ: i32 = 4002;
const MIPSOL_OBJBST: i32 = 4003;
const MIPSOL_OBJBND: i32 = 4004;
const MIPSOL_NODCNT: i32 = 4005;
const MIPSOL_SOLCNT: i32 = 4006;
#[allow(dead_code)]
const MIPSOL_OBJBNDC: i32 = 4007;
const MIPNODE_STATUS: i32 = 5001;
const MIPNODE_REL: i32 = 5002;
const MIPNODE_OBJBST: i32 = 5003;
const MIPNODE_OBJBND: i32 = 5004;
const MIPNODE_NODCNT: i32 = 5005;
const MIPNODE_SOLCNT: i32 = 5006;
#[allow(dead_code)]
const MIPNODE_BRVAR: i32 = 5007;
#[allow(dead_code)]
const MIPNODE_OBJBNDC: i32 = 5008;
const MSG_STRING: i32 = 6001;
const RUNTIME: i32 = 6002;
const BARRIER_ITRCNT: i32 = 7001;
const BARRIER_PRIMOBJ: i32 = 7002;
const BARRIER_DUALOBJ: i32 = 7003;
const BARRIER_PRIMINF: i32 = 7004;
const BARRIER_DUALINF: i32 = 7005;
const BARRIER_COMPL: i32 = 7006;
/// Location where the callback called
///
/// If you want to get more information, see [official
/// manual](https://www.gurobi.com/documentation/6.5/refman/callback_codes.html).
#[derive(Debug, Clone)]
pub enum Where {
/// Periodic polling callback
Polling,
/// Currently performing presolve
PreSolve {
/// The number of columns removed by presolve to this point.
coldel: i32,
/// The number of rows removed by presolve to this point.
rowdel: i32,
/// The number of constraint senses changed by presolve to this point.
senchg: i32,
/// The number of variable bounds changed by presolve to this point.
bndchg: i32,
/// The number of coefficients changed by presolve to this point.
coecfg: i32
},
/// Currently in simplex
Simplex {
/// Current simplex iteration count.
itrcnt: f64,
/// Current simplex objective value.
objval: f64,
/// Current primal infeasibility.
priminf: f64,
/// Current dual infeasibility.
dualinf: f64,
/// Is problem current perturbed?
ispert: i32
},
/// Currently in MIP
MIP {
/// Current best objective.
objbst: f64,
/// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64,
/// Current count of feasible solutions found.
solcnt: f64,
/// Current count of cutting planes applied.
cutcnt: i32,
/// Current unexplored node count.
nodleft: f64,
/// Current simplex iteration count.
itrcnt: f64
},
/// Found a new MIP incumbent
MIPSol {
/// Objective value for new solution.
obj: f64,
/// Current best objective.
objbst: f64,
/// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64,
/// Current count of feasible solutions found.
solcnt: f64
},
/// Currently exploring a MIP node
MIPNode {
/// Optimization status of current MIP node (see the Status Code section for further information).
status: i32,
/// Current best objective.
objbst: f64,
/// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64,
/// Current count of feasible solutions found.
solcnt: i32
},
/// Printing a log message
Message(String),
/// Currently in barrier.
Barrier {
/// Current barrier iteration count.
itrcnt: i32,
/// Primal objective value for current barrier iterate.
primobj: f64,
/// Dual objective value for current barrier iterate.
dualobj: f64,
/// Primal infeasibility for current barrier iterate.
priminf: f64,
/// Dual infeasibility for current barrier iterate.
dualinf: f64,
/// Complementarity violation for current barrier iterate.
compl: f64
}
}
impl Into<i32> for Where {
fn into(self) -> i32 {
match self {
Where::Polling => POLLING,
Where::PreSolve { .. } => PRESOLVE,
Where::Simplex { .. } => SIMPLEX,
Where::MIP { .. } => MIP,
Where::MIPSol { .. } => MIPSOL,
Where::MIPNode { .. } => MIPNODE,
Where::Message(_) => MESSAGE,
Where::Barrier { .. } => BARRIER,
}
}
}
/// The context object for Gurobi callback.
pub struct Callback<'a> {
cbdata: *mut ffi::c_void,
where_: Where,
model: &'a Model
}
pub trait New<'a> {
fn new(cbdata: *mut ffi::c_void, where_: i32, model: &'a Model) -> Result<Callback<'a>>;
}
impl<'a> New<'a> for Callback<'a> {
fn new(cbdata: *mut ffi::c_void, where_: i32, model: &'a Model) -> Result<Callback<'a>> {
let mut callback = Callback {
cbdata: cbdata,
where_: Where::Polling,
model: model
};
let where_ = match where_ {
POLLING => Where::Polling,
PRESOLVE => {
Where::PreSolve {
coldel: try!(callback.get_int(PRESOLVE, PRE_COLDEL)),
rowdel: try!(callback.get_int(PRESOLVE, PRE_ROWDEL)),
senchg: try!(callback.get_int(PRESOLVE, PRE_SENCHG)),
bndchg: try!(callback.get_int(PRESOLVE, PRE_BNDCHG)),
coecfg: try!(callback.get_int(PRESOLVE, PRE_COECHG))
}
}
SIMPLEX => |
MIP => {
Where::MIP {
objbst: try!(callback.get_double(MIP, MIP_OBJBST)),
objbnd: try!(callback.get_double(MIP, MIP_OBJBND)),
nodcnt: try!(callback.get_double(MIP, MIP_NODCNT)),
solcnt: try!(callback.get_double(MIP, MIP_SOLCNT)),
cutcnt: try!(callback.get_int(MIP, MIP_CUTCNT)),
nodleft: try!(callback.get_double(MIP, MIP_NODLFT)),
itrcnt: try!(callback.get_double(MIP, MIP_ITRCNT))
}
}
MIPSOL => {
Where::MIPSol {
obj: try!(callback.get_double(MIPSOL, MIPSOL_OBJ)),
objbst: try!(callback.get_double(MIPSOL, MIPSOL_OBJBST)),
objbnd: try!(callback.get_double(MIPSOL, MIPSOL_OBJBND)),
nodcnt: try!(callback.get_double(MIPSOL, MIPSOL_NODCNT)),
solcnt: try!(callback.get_double(MIPSOL, MIPSOL_SOLCNT))
}
}
MIPNODE => {
Where::MIPNode {
status: try!(callback.get_int(MIPNODE, MIPNODE_STATUS)),
objbst: try!(callback.get_double(MIPNODE, MIPNODE_OBJBST)),
objbnd: try!(callback.get_double(MIPNODE, MIPNODE_OBJBND)),
nodcnt: try!(callback.get_double(MIPNODE, MIPNODE_NODCNT)),
solcnt: try!(callback.get_int(MIPNODE, MIPNODE_SOLCNT))
}
}
MESSAGE => Where::Message(try!(callback.get_string(MESSAGE, MSG_STRING)).trim().to_owned()),
BARRIER => {
Where::Barrier {
itrcnt: try!(callback.get_int(BARRIER | {
Where::Simplex {
itrcnt: try!(callback.get_double(SIMPLEX, SPX_ITRCNT)),
objval: try!(callback.get_double(SIMPLEX, SPX_OBJVAL)),
priminf: try!(callback.get_double(SIMPLEX, SPX_PRIMINF)),
dualinf: try!(callback.get_double(SIMPLEX, SPX_DUALINF)),
ispert: try!(callback.get_int(SIMPLEX, SPX_ISPERT))
}
} | conditional_block |
callback.rs | OL_SOL: i32 = 4001;
const MIPSOL_OBJ: i32 = 4002;
const MIPSOL_OBJBST: i32 = 4003;
const MIPSOL_OBJBND: i32 = 4004;
const MIPSOL_NODCNT: i32 = 4005;
const MIPSOL_SOLCNT: i32 = 4006;
#[allow(dead_code)]
const MIPSOL_OBJBNDC: i32 = 4007;
const MIPNODE_STATUS: i32 = 5001;
const MIPNODE_REL: i32 = 5002;
const MIPNODE_OBJBST: i32 = 5003;
const MIPNODE_OBJBND: i32 = 5004;
const MIPNODE_NODCNT: i32 = 5005;
const MIPNODE_SOLCNT: i32 = 5006;
#[allow(dead_code)]
const MIPNODE_BRVAR: i32 = 5007;
#[allow(dead_code)]
const MIPNODE_OBJBNDC: i32 = 5008;
const MSG_STRING: i32 = 6001;
const RUNTIME: i32 = 6002;
const BARRIER_ITRCNT: i32 = 7001;
const BARRIER_PRIMOBJ: i32 = 7002;
const BARRIER_DUALOBJ: i32 = 7003;
const BARRIER_PRIMINF: i32 = 7004;
const BARRIER_DUALINF: i32 = 7005;
const BARRIER_COMPL: i32 = 7006;
/// Location where the callback called
///
/// If you want to get more information, see [official
/// manual](https://www.gurobi.com/documentation/6.5/refman/callback_codes.html).
#[derive(Debug, Clone)]
pub enum Where {
/// Periodic polling callback
Polling,
/// Currently performing presolve
PreSolve {
/// The number of columns removed by presolve to this point.
coldel: i32,
/// The number of rows removed by presolve to this point.
rowdel: i32,
/// The number of constraint senses changed by presolve to this point.
senchg: i32,
/// The number of variable bounds changed by presolve to this point.
bndchg: i32,
/// The number of coefficients changed by presolve to this point.
coecfg: i32
},
/// Currently in simplex
Simplex {
/// Current simplex iteration count.
itrcnt: f64,
/// Current simplex objective value.
objval: f64,
/// Current primal infeasibility.
priminf: f64,
/// Current dual infeasibility.
dualinf: f64,
/// Is problem current perturbed?
ispert: i32
},
/// Currently in MIP
MIP {
/// Current best objective.
objbst: f64,
/// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64,
/// Current count of feasible solutions found.
solcnt: f64,
/// Current count of cutting planes applied.
cutcnt: i32,
/// Current unexplored node count.
nodleft: f64,
/// Current simplex iteration count.
itrcnt: f64
},
/// Found a new MIP incumbent
MIPSol {
/// Objective value for new solution.
obj: f64,
/// Current best objective.
objbst: f64,
/// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64,
/// Current count of feasible solutions found.
solcnt: f64
},
/// Currently exploring a MIP node
MIPNode {
/// Optimization status of current MIP node (see the Status Code section for further information).
status: i32,
/// Current best objective.
objbst: f64, | /// Current count of feasible solutions found.
solcnt: i32
},
/// Printing a log message
Message(String),
/// Currently in barrier.
Barrier {
/// Current barrier iteration count.
itrcnt: i32,
/// Primal objective value for current barrier iterate.
primobj: f64,
/// Dual objective value for current barrier iterate.
dualobj: f64,
/// Primal infeasibility for current barrier iterate.
priminf: f64,
/// Dual infeasibility for current barrier iterate.
dualinf: f64,
/// Complementarity violation for current barrier iterate.
compl: f64
}
}
impl Into<i32> for Where {
fn into(self) -> i32 {
match self {
Where::Polling => POLLING,
Where::PreSolve { .. } => PRESOLVE,
Where::Simplex { .. } => SIMPLEX,
Where::MIP { .. } => MIP,
Where::MIPSol { .. } => MIPSOL,
Where::MIPNode { .. } => MIPNODE,
Where::Message(_) => MESSAGE,
Where::Barrier { .. } => BARRIER,
}
}
}
/// The context object for Gurobi callback.
pub struct Callback<'a> {
cbdata: *mut ffi::c_void,
where_: Where,
model: &'a Model
}
pub trait New<'a> {
fn new(cbdata: *mut ffi::c_void, where_: i32, model: &'a Model) -> Result<Callback<'a>>;
}
impl<'a> New<'a> for Callback<'a> {
fn new(cbdata: *mut ffi::c_void, where_: i32, model: &'a Model) -> Result<Callback<'a>> {
let mut callback = Callback {
cbdata: cbdata,
where_: Where::Polling,
model: model
};
let where_ = match where_ {
POLLING => Where::Polling,
PRESOLVE => {
Where::PreSolve {
coldel: try!(callback.get_int(PRESOLVE, PRE_COLDEL)),
rowdel: try!(callback.get_int(PRESOLVE, PRE_ROWDEL)),
senchg: try!(callback.get_int(PRESOLVE, PRE_SENCHG)),
bndchg: try!(callback.get_int(PRESOLVE, PRE_BNDCHG)),
coecfg: try!(callback.get_int(PRESOLVE, PRE_COECHG))
}
}
SIMPLEX => {
Where::Simplex {
itrcnt: try!(callback.get_double(SIMPLEX, SPX_ITRCNT)),
objval: try!(callback.get_double(SIMPLEX, SPX_OBJVAL)),
priminf: try!(callback.get_double(SIMPLEX, SPX_PRIMINF)),
dualinf: try!(callback.get_double(SIMPLEX, SPX_DUALINF)),
ispert: try!(callback.get_int(SIMPLEX, SPX_ISPERT))
}
}
MIP => {
Where::MIP {
objbst: try!(callback.get_double(MIP, MIP_OBJBST)),
objbnd: try!(callback.get_double(MIP, MIP_OBJBND)),
nodcnt: try!(callback.get_double(MIP, MIP_NODCNT)),
solcnt: try!(callback.get_double(MIP, MIP_SOLCNT)),
cutcnt: try!(callback.get_int(MIP, MIP_CUTCNT)),
nodleft: try!(callback.get_double(MIP, MIP_NODLFT)),
itrcnt: try!(callback.get_double(MIP, MIP_ITRCNT))
}
}
MIPSOL => {
Where::MIPSol {
obj: try!(callback.get_double(MIPSOL, MIPSOL_OBJ)),
objbst: try!(callback.get_double(MIPSOL, MIPSOL_OBJBST)),
objbnd: try!(callback.get_double(MIPSOL, MIPSOL_OBJBND)),
nodcnt: try!(callback.get_double(MIPSOL, MIPSOL_NODCNT)),
solcnt: try!(callback.get_double(MIPSOL, MIPSOL_SOLCNT))
}
}
MIPNODE => {
Where::MIPNode {
status: try!(callback.get_int(MIPNODE, MIPNODE_STATUS)),
objbst: try!(callback.get_double(MIPNODE, MIPNODE_OBJBST)),
objbnd: try!(callback.get_double(MIPNODE, MIPNODE_OBJBND)),
nodcnt: try!(callback.get_double(MIPNODE, MIPNODE_NODCNT)),
solcnt: try!(callback.get_int(MIPNODE, MIPNODE_SOLCNT))
}
}
MESSAGE => Where::Message(try!(callback.get_string(MESSAGE, MSG_STRING)).trim().to_owned()),
BARRIER => {
Where::Barrier {
itrcnt: try!(callback.get_int(BARRIER, | /// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64, | random_line_split |
graphcool-ids.js | exports.strength = {
"1": "cjersov3s14fu01832cku6ivc",
"2": "cjertinv814oj0183jx107b9u",
"3": "cjertith714om0183tr2huxiw",
"4": "cjertj05014or0183we74xnsb",
"5": "cjertj34914ov0183sl7kmqm1",
"6": "cjertj5ol14p10183cmzat5y9",
"7": "cjertj8a814p60183av1hlcty",
"8": "cjertjbqr14pc0183uo7656p7",
"9": "cjertjebc14ph0183025ls377"
};
exports.intelligence = {
"1": "cjertjj9i14pm01835g18maaz",
"2": "cjertjmjt14pq01838b9o4h92",
"3": "cjertjpvq14px0183ff0j6i8o",
"4": "cjertjspz14q00183gypv8iaa",
"5": "cjertjy5i14q70183vq4en94t",
"6": "cjertk2k914qa0183zxqjv2q3",
"7": "cjertk64814qe0183s8gtzive",
"8": "cjertkf4i14qj0183iafpggqn",
"9": "cjertkih114qm01833ztywguw"
};
exports.special = {
"1": "cjertkmfs14qr0183zvz5p90r",
"2": "cjertkqcd14qv01833umx8bs6",
"3": "cjertkthm14r10183p8ymqv7a",
"4": "cjertkxl714r401837ddh78bw",
"5": "cjertl0i914r80183y161rfvw",
"6": "cjertl3mj14rc018322rxbnb3",
"7": "cjertl7pv14rg01835d77dxcm",
"8": "cjertlb0h14rj0183wfrkgp4x",
"9": "cjertleik14rm01837rde1qzn"
};
exports.trait = {
"Hero": "cjes4rlte16z90183tpe0zwp8",
"Villain": "cjes4ro4e16zc0183wukhv8yy",
"Blue Lantern": "cjes4rtu116zg0183b2u6zvqh",
"Green Lantern": "cjes4s2gj16zk0183uon6d2qy",
"Indigo Tribe": "cjes4s9rt16zo0183dblaykto",
"Orange Lantern": "cjes4sgpd16zs0183ri5sae4f",
"Red Lantern": "cjes4smq216zw01834711alj5",
"Sinestro Corps": "cjes4ssvn17000183imycm8h7",
"Star Sapphire": "cjes4sz0t17030183x780qp5t",
"White Lantern": "cjes4t5r617070183n5f60a53",
"Black Lantern": "cjes4tna6170b0183yxydoxc8",
"Scout Regiment": "cjes4ufa5170g0183ne60w0n5", | "Military Police Regiment": "cjes4ushb170o0183gqvjd1pr",
"Titan": "cjes4v11x170t0183yn2ripoe",
"Cadet Corps": "cjes4vgkg170x0183kj04xyew",
"Human": "cjf8iuklm49ie0183uw7s0iq4"
}; | "Garrison Regiment": "cjes4ulp5170k0183wz5ewix6", | random_line_split |
page-change-time.js | /*!
* jQuery Mobile v@VERSION
* http://jquerymobile.com/
*
* Copyright 2011, jQuery Project
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*/
// This is code that can be used as a simple bookmarklet for timing
// the load, enhancment, and transition of a changePage() request.
(function( $, window, undefined ) {
function getTime() |
var startChange, stopChange, startLoad, stopLoad, startEnhance, stopEnhance, startTransition, stopTransition, lock = 0;
$( document )
.bind( "pagebeforechange", function( e, data) {
if ( typeof data.toPage === "string" ) {
startChange = stopChange = startLoad = stopLoad = startEnhance = stopEnhance = startTransition = stopTransition = getTime();
}
})
.bind( "pagebeforeload", function() {
startLoad = stopLoad = getTime();
})
.bind( "pagebeforecreate", function() {
if ( ++lock === 1 ) {
stopLoad = startEnhance = stopEnhance = getTime();
}
})
.bind( "pageinit", function() {
if ( --lock === 0 ) {
stopEnhance = getTime();
}
})
.bind( "pagebeforeshow", function() {
startTransition = stopTransition = getTime();
})
.bind( "pageshow", function() {
stopTransition = getTime();
})
.bind( "pagechange", function( e, data ) {
if ( typeof data.toPage === "object" ) {
stopChange = getTime();
alert("load + processing: " + ( stopLoad - startLoad )
+ "\nenhance: " + ( stopEnhance - startEnhance )
+ "\ntransition: " + ( stopTransition - startTransition )
+ "\ntotalTime: " + ( stopChange - startChange ) );
startChange = stopChange = startLoad = stopLoad = startEnhance = stopEnhance = startTransition = stopTransition = 0;
}
});
})( jQuery, window );
| {
return ( new Date() ).getTime();
} | identifier_body |
page-change-time.js | /*!
* jQuery Mobile v@VERSION
* http://jquerymobile.com/
*
* Copyright 2011, jQuery Project
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*/
// This is code that can be used as a simple bookmarklet for timing
// the load, enhancment, and transition of a changePage() request.
(function( $, window, undefined ) {
function getTime() {
return ( new Date() ).getTime();
}
var startChange, stopChange, startLoad, stopLoad, startEnhance, stopEnhance, startTransition, stopTransition, lock = 0;
$( document )
.bind( "pagebeforechange", function( e, data) {
if ( typeof data.toPage === "string" ) |
})
.bind( "pagebeforeload", function() {
startLoad = stopLoad = getTime();
})
.bind( "pagebeforecreate", function() {
if ( ++lock === 1 ) {
stopLoad = startEnhance = stopEnhance = getTime();
}
})
.bind( "pageinit", function() {
if ( --lock === 0 ) {
stopEnhance = getTime();
}
})
.bind( "pagebeforeshow", function() {
startTransition = stopTransition = getTime();
})
.bind( "pageshow", function() {
stopTransition = getTime();
})
.bind( "pagechange", function( e, data ) {
if ( typeof data.toPage === "object" ) {
stopChange = getTime();
alert("load + processing: " + ( stopLoad - startLoad )
+ "\nenhance: " + ( stopEnhance - startEnhance )
+ "\ntransition: " + ( stopTransition - startTransition )
+ "\ntotalTime: " + ( stopChange - startChange ) );
startChange = stopChange = startLoad = stopLoad = startEnhance = stopEnhance = startTransition = stopTransition = 0;
}
});
})( jQuery, window );
| {
startChange = stopChange = startLoad = stopLoad = startEnhance = stopEnhance = startTransition = stopTransition = getTime();
} | conditional_block |
page-change-time.js | /*!
* jQuery Mobile v@VERSION
* http://jquerymobile.com/
*
* Copyright 2011, jQuery Project
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*/
// This is code that can be used as a simple bookmarklet for timing
// the load, enhancment, and transition of a changePage() request.
(function( $, window, undefined ) {
function getTime() {
return ( new Date() ).getTime();
}
var startChange, stopChange, startLoad, stopLoad, startEnhance, stopEnhance, startTransition, stopTransition, lock = 0;
$( document )
.bind( "pagebeforechange", function( e, data) {
if ( typeof data.toPage === "string" ) {
startChange = stopChange = startLoad = stopLoad = startEnhance = stopEnhance = startTransition = stopTransition = getTime();
}
})
.bind( "pagebeforeload", function() {
startLoad = stopLoad = getTime();
})
.bind( "pagebeforecreate", function() {
if ( ++lock === 1 ) {
stopLoad = startEnhance = stopEnhance = getTime();
}
})
.bind( "pageinit", function() {
if ( --lock === 0 ) {
stopEnhance = getTime();
}
})
.bind( "pagebeforeshow", function() {
startTransition = stopTransition = getTime();
})
.bind( "pageshow", function() {
stopTransition = getTime();
})
.bind( "pagechange", function( e, data ) {
if ( typeof data.toPage === "object" ) {
stopChange = getTime();
alert("load + processing: " + ( stopLoad - startLoad ) |
startChange = stopChange = startLoad = stopLoad = startEnhance = stopEnhance = startTransition = stopTransition = 0;
}
});
})( jQuery, window ); | + "\nenhance: " + ( stopEnhance - startEnhance )
+ "\ntransition: " + ( stopTransition - startTransition )
+ "\ntotalTime: " + ( stopChange - startChange ) ); | random_line_split |
page-change-time.js | /*!
* jQuery Mobile v@VERSION
* http://jquerymobile.com/
*
* Copyright 2011, jQuery Project
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*/
// This is code that can be used as a simple bookmarklet for timing
// the load, enhancment, and transition of a changePage() request.
(function( $, window, undefined ) {
function | () {
return ( new Date() ).getTime();
}
var startChange, stopChange, startLoad, stopLoad, startEnhance, stopEnhance, startTransition, stopTransition, lock = 0;
$( document )
.bind( "pagebeforechange", function( e, data) {
if ( typeof data.toPage === "string" ) {
startChange = stopChange = startLoad = stopLoad = startEnhance = stopEnhance = startTransition = stopTransition = getTime();
}
})
.bind( "pagebeforeload", function() {
startLoad = stopLoad = getTime();
})
.bind( "pagebeforecreate", function() {
if ( ++lock === 1 ) {
stopLoad = startEnhance = stopEnhance = getTime();
}
})
.bind( "pageinit", function() {
if ( --lock === 0 ) {
stopEnhance = getTime();
}
})
.bind( "pagebeforeshow", function() {
startTransition = stopTransition = getTime();
})
.bind( "pageshow", function() {
stopTransition = getTime();
})
.bind( "pagechange", function( e, data ) {
if ( typeof data.toPage === "object" ) {
stopChange = getTime();
alert("load + processing: " + ( stopLoad - startLoad )
+ "\nenhance: " + ( stopEnhance - startEnhance )
+ "\ntransition: " + ( stopTransition - startTransition )
+ "\ntotalTime: " + ( stopChange - startChange ) );
startChange = stopChange = startLoad = stopLoad = startEnhance = stopEnhance = startTransition = stopTransition = 0;
}
});
})( jQuery, window );
| getTime | identifier_name |
DEPProfilesTable.tsx | import * as React from "react";
import ReactTable, {TableProps, Column} from "react-table";
import selectTableHoc from "react-table/lib/hoc/selectTable";
import {DEPAccount, DEPProfile} from "../../store/dep/types";
import {DEPProfileName} from "../react-table/DEPProfileName";
import {JSONAPIDataObject} from "../../store/json-api";
// import "react-table/react-table.css";
export interface IDEPProfilesTableProps {
loading: boolean;
data: Array<JSONAPIDataObject<DEPProfile>>;
onToggleSelection: () => void;
onToggleAll: () => void;
}
const columns: Column[] = [
{
Cell: DEPProfileName,
Header: "Name",
accessor: "attributes.profile_name",
id: "profile_name",
},
{
Header: "UUID",
accessor: "attributes.uuid", | const ReactSelectTable = selectTableHoc(ReactTable);
export const DEPProfilesTable = ({ data, ...props }: IDEPProfilesTableProps & Partial<TableProps>) => (
<ReactSelectTable
keyField="id"
selectType="checkbox"
data={data}
columns={columns}
{...props}
/>
); | id: "uuid",
},
];
| random_line_split |
paginator.py | class Paginator(object):
def __init__(self, collection, page_number=0, limit=20, total=-1):
self.collection = collection
self.page_number = int(page_number)
self.limit = int(limit)
self.total = int(total)
@property
def page(self):
start = self.page_number * self.limit
end = start + self.limit
try:
return self.collection[start:end]
except Exception as detail:
print detail
return []
@property
def current_page(self):
return self.page_number + 1
@property
def page_count(self):
if self.total != -1:
pages = abs(self.total / self.limit)+1
return pages
else:
return None
@property
def has_previous(self):
return True if (self.page_number > 0) else False
@property
def has_next(self):
return True if (len(self.page) == self.limit) else False
@property
def previous_page(self):
if self.has_previous:
return self.page_number-1
@property
def next_page(self):
if self.has_next:
return self.page_number+1
def previous_page_link(self, request):
return self.__build_url(self.previous_page, request.full_url())
def next_page_link(self, request):
return self.__build_url(self.next_page, request.full_url())
def | (self, page_num, url):
import re
#check if there is a query string
if url.find('?') != -1:
if re.search(r'page=\d',url) != None:
page_str = "&page=%d" % page_num
return re.sub(r'&page=\d+', page_str, url)
else:
return "%s&page=%d" % (url, page_num)
else:
return "%s?page=%d" % (url, page_num)
| __build_url | identifier_name |
paginator.py | class Paginator(object):
def __init__(self, collection, page_number=0, limit=20, total=-1):
self.collection = collection | self.page_number = int(page_number)
self.limit = int(limit)
self.total = int(total)
@property
def page(self):
start = self.page_number * self.limit
end = start + self.limit
try:
return self.collection[start:end]
except Exception as detail:
print detail
return []
@property
def current_page(self):
return self.page_number + 1
@property
def page_count(self):
if self.total != -1:
pages = abs(self.total / self.limit)+1
return pages
else:
return None
@property
def has_previous(self):
return True if (self.page_number > 0) else False
@property
def has_next(self):
return True if (len(self.page) == self.limit) else False
@property
def previous_page(self):
if self.has_previous:
return self.page_number-1
@property
def next_page(self):
if self.has_next:
return self.page_number+1
def previous_page_link(self, request):
return self.__build_url(self.previous_page, request.full_url())
def next_page_link(self, request):
return self.__build_url(self.next_page, request.full_url())
def __build_url(self, page_num, url):
import re
#check if there is a query string
if url.find('?') != -1:
if re.search(r'page=\d',url) != None:
page_str = "&page=%d" % page_num
return re.sub(r'&page=\d+', page_str, url)
else:
return "%s&page=%d" % (url, page_num)
else:
return "%s?page=%d" % (url, page_num) | random_line_split |
|
paginator.py | class Paginator(object):
def __init__(self, collection, page_number=0, limit=20, total=-1):
self.collection = collection
self.page_number = int(page_number)
self.limit = int(limit)
self.total = int(total)
@property
def page(self):
start = self.page_number * self.limit
end = start + self.limit
try:
return self.collection[start:end]
except Exception as detail:
print detail
return []
@property
def current_page(self):
|
@property
def page_count(self):
if self.total != -1:
pages = abs(self.total / self.limit)+1
return pages
else:
return None
@property
def has_previous(self):
return True if (self.page_number > 0) else False
@property
def has_next(self):
return True if (len(self.page) == self.limit) else False
@property
def previous_page(self):
if self.has_previous:
return self.page_number-1
@property
def next_page(self):
if self.has_next:
return self.page_number+1
def previous_page_link(self, request):
return self.__build_url(self.previous_page, request.full_url())
def next_page_link(self, request):
return self.__build_url(self.next_page, request.full_url())
def __build_url(self, page_num, url):
import re
#check if there is a query string
if url.find('?') != -1:
if re.search(r'page=\d',url) != None:
page_str = "&page=%d" % page_num
return re.sub(r'&page=\d+', page_str, url)
else:
return "%s&page=%d" % (url, page_num)
else:
return "%s?page=%d" % (url, page_num)
| return self.page_number + 1 | identifier_body |
paginator.py | class Paginator(object):
def __init__(self, collection, page_number=0, limit=20, total=-1):
self.collection = collection
self.page_number = int(page_number)
self.limit = int(limit)
self.total = int(total)
@property
def page(self):
start = self.page_number * self.limit
end = start + self.limit
try:
return self.collection[start:end]
except Exception as detail:
print detail
return []
@property
def current_page(self):
return self.page_number + 1
@property
def page_count(self):
if self.total != -1:
|
else:
return None
@property
def has_previous(self):
return True if (self.page_number > 0) else False
@property
def has_next(self):
return True if (len(self.page) == self.limit) else False
@property
def previous_page(self):
if self.has_previous:
return self.page_number-1
@property
def next_page(self):
if self.has_next:
return self.page_number+1
def previous_page_link(self, request):
return self.__build_url(self.previous_page, request.full_url())
def next_page_link(self, request):
return self.__build_url(self.next_page, request.full_url())
def __build_url(self, page_num, url):
import re
#check if there is a query string
if url.find('?') != -1:
if re.search(r'page=\d',url) != None:
page_str = "&page=%d" % page_num
return re.sub(r'&page=\d+', page_str, url)
else:
return "%s&page=%d" % (url, page_num)
else:
return "%s?page=%d" % (url, page_num)
| pages = abs(self.total / self.limit)+1
return pages | conditional_block |
qsdateutil.py | report.
'''
import datetime as dt
from datetime import timedelta
import time as t
import numpy as np
import os
import pandas as pd
def _cache_dates():
''' Caches dates '''
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure you have NYSE_dates.txt in the qstkutil directory"
datestxt = np.loadtxt(filename, dtype=str)
dates = []
for i in datestxt:
dates.append(dt.datetime.strptime(i, "%m/%d/%Y"))
return pd.TimeSeries(index=dates, data=dates)
GTS_DATES = _cache_dates()
def getMonthNames():
return(['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'])
def getYears(funds):
years=[]
for date in funds.index:
if(not(date.year in years)):
years.append(date.year)
return(years)
def getMonths(funds,year):
months=[]
for date in funds.index:
if((date.year==year) and not(date.month in months)):
months.append(date.month)
return(months)
def getDays(funds,year,month):
days=[]
for date in funds.index:
if((date.year==year) and (date.month==month)):
days.append(date)
return(days)
def getDaysBetween(ts_start, ts_end):
days=[]
for i in range(0,(ts_end-ts_start).days):
days.append(ts_start+timedelta(days=1)*i)
return(days)
def getFirstDay(funds,year,month):
for date in funds.index:
if((date.year==year) and (date.month==month)):
return(date)
return('ERROR')
def getLastDay(funds,year,month):
return_date = 'ERROR'
for date in funds.index:
if((date.year==year) and (date.month==month)):
return_date = date
return(return_date)
def getNextOptionClose(day, trade_days, offset=0):
#get third friday in month of day
#get first of month
year_off=0
if day.month+offset > 12:
year_off = 1
offset = offset - 12
first = dt.datetime(day.year+year_off, day.month+offset, 1, hour=16)
#get weekday
day_num = first.weekday()
#get first friday (friday - weekday) add 7 if less than 1
dif = 5 - day_num
if dif < 1:
dif = dif+7
#move to third friday
dif = dif + 14
friday = first+dt.timedelta(days=(dif-1))
#if friday is a holiday, options expire then
if friday in trade_days:
month_close = first + dt.timedelta(days=dif)
else:
month_close = friday
#if day is past the day after that
if month_close < day:
return_date = getNextOptionClose(day, trade_days, offset=1)
else:
return_date = month_close
return(return_date)
def getLastOptionClose(day, trade_days):
start = day
while getNextOptionClose(day, trade_days)>=start:
day= day - dt.timedelta(days=1)
return(getNextOptionClose(day, trade_days))
def getNYSEoffset(mark, offset):
''' Returns NYSE date offset by number of days '''
mark = mark.replace(hour=0, minute=0, second=0, microsecond=0)
i = GTS_DATES.index.searchsorted(mark, side='right')
# If there is no exact match, take first date in past
if GTS_DATES[i] != mark:
i -= 1
ret = GTS_DATES[i + offset]
ret = ret.replace(hour=16)
return ret
def getNYSEdays(startday = dt.datetime(1964,7,5), endday = dt.datetime(2020,12,31),
timeofday = dt.timedelta(0)):
"""
@summary: Create a list of timestamps between startday and endday (inclusive)
that correspond to the days there was trading at the NYSE. This function
depends on a separately created a file that lists all days since July 4,
1962 that the NYSE has been open, going forward to 2020 (based
on the holidays that NYSE recognizes).
@param startday: First timestamp to consider (inclusive)
@param endday: Last day to consider (inclusive)
@return list: of timestamps between startday and endday on which NYSE traded
@rtype datetime
"""
start = startday - timeofday
end = endday - timeofday
dates = GTS_DATES[start:end]
ret = [x + timeofday for x in dates]
return(ret)
def getNextNNYSEdays(startday, days, timeofday):
"""
@summary: Create a list of timestamps from startday that is days days long
that correspond to the days there was trading at NYSE. This function
depends on the file used in getNYSEdays and assumes the dates within are
in order.
@param startday: First timestamp to consider (inclusive)
@param days: Number of timestamps to return
@return list: List of timestamps starting at startday on which NYSE traded
@rtype datetime
"""
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure to set the value for QS in config.sh or\n"
print "in local.sh and then \'source local.sh\'.\n"
datestxt = np.loadtxt(filename,dtype=str)
dates=[]
for i in datestxt:
if(len(dates)<days):
if((dt.datetime.strptime(i,"%m/%d/%Y")+timeofday)>=startday):
dates.append(dt.datetime.strptime(i,"%m/%d/%Y")+timeofday)
return(dates)
def getPrevNNYSEday(startday, timeofday):
"""
@summary: This function returns the last valid trading day before the start
day, or returns the start day if it is a valid trading day. This function
depends on the file used in getNYSEdays and assumes the dates within are
in order.
@param startday: First timestamp to consider (inclusive)
@param days: Number of timestamps to return
@return list: List of timestamps starting at startday on which NYSE traded
@rtype datetime
"""
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure to set the value for QS in config.sh or\n"
print "in local.sh and then \'source local.sh\'.\n"
datestxt = np.loadtxt(filename,dtype=str)
#''' Set return to first day '''
dtReturn = dt.datetime.strptime( datestxt[0],"%m/%d/%Y")+timeofday
#''' Loop through all but first '''
for i in datestxt[1:]:
dtNext = dt.datetime.strptime(i,"%m/%d/%Y")
#''' If we are > startday, then use previous valid day '''
if( dtNext > startday ):
break
dtReturn = dtNext + timeofday
return(dtReturn)
def ymd2epoch(year, month, day):
"""
@summary: Convert YMD info into a unix epoch value.
@param year: The year
@param month: The month
@param day: The day
@return epoch: number of seconds since epoch
"""
return(t.mktime(dt.date(year,month,day).timetuple()))
def epoch2date(ts):
"""
@summary Convert seconds since epoch into date
@param ts: Seconds since epoch
@return thedate: A date object
"""
tm = t.gmtime(ts)
return(dt.date(tm.tm_year,tm.tm_mon,tm.tm_mday))
def _trade_dates(dt_start, dt_end, s_period):
|
dr_range = pd.DateRange(dt_start | '''
@summary: Generate dates on which we need to trade
@param c_strat: Strategy config class
@param dt_start: Start date
@param dt_end: End date
'''
ldt_timestamps = getNYSEdays(dt_start,
dt_end, dt.timedelta(hours=16) )
# Use pandas reindex method instead
# Note, dates are index as well as values, we select based on index
# but return values since it is a numpy array of datetimes instead of
# pandas specific.
ts_dates = pd.TimeSeries(index=ldt_timestamps, data=ldt_timestamps)
# These are the dates we want
if s_period[:2] == 'BW':
# special case for biweekly | identifier_body |
qsdateutil.py | report.
'''
import datetime as dt
from datetime import timedelta
import time as t
import numpy as np
import os
import pandas as pd
def _cache_dates():
''' Caches dates '''
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure you have NYSE_dates.txt in the qstkutil directory"
datestxt = np.loadtxt(filename, dtype=str)
dates = []
for i in datestxt:
dates.append(dt.datetime.strptime(i, "%m/%d/%Y"))
return pd.TimeSeries(index=dates, data=dates)
GTS_DATES = _cache_dates()
def getMonthNames():
return(['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'])
def getYears(funds):
years=[]
for date in funds.index:
if(not(date.year in years)):
years.append(date.year)
return(years)
def getMonths(funds,year):
months=[]
for date in funds.index:
if((date.year==year) and not(date.month in months)):
months.append(date.month)
return(months)
def getDays(funds,year,month):
days=[]
for date in funds.index:
if((date.year==year) and (date.month==month)):
days.append(date)
return(days)
def getDaysBetween(ts_start, ts_end):
days=[]
for i in range(0,(ts_end-ts_start).days):
days.append(ts_start+timedelta(days=1)*i)
return(days)
def getFirstDay(funds,year,month):
for date in funds.index:
if((date.year==year) and (date.month==month)):
return(date)
return('ERROR')
def getLastDay(funds,year,month):
return_date = 'ERROR'
for date in funds.index:
if((date.year==year) and (date.month==month)):
return_date = date
return(return_date)
def getNextOptionClose(day, trade_days, offset=0):
#get third friday in month of day
#get first of month
year_off=0
if day.month+offset > 12:
year_off = 1
offset = offset - 12
first = dt.datetime(day.year+year_off, day.month+offset, 1, hour=16)
#get weekday
day_num = first.weekday()
#get first friday (friday - weekday) add 7 if less than 1
dif = 5 - day_num
if dif < 1:
dif = dif+7
#move to third friday
dif = dif + 14
friday = first+dt.timedelta(days=(dif-1))
#if friday is a holiday, options expire then
if friday in trade_days:
month_close = first + dt.timedelta(days=dif)
else:
month_close = friday
#if day is past the day after that
if month_close < day:
return_date = getNextOptionClose(day, trade_days, offset=1)
else:
return_date = month_close
return(return_date)
def getLastOptionClose(day, trade_days):
start = day
while getNextOptionClose(day, trade_days)>=start:
day= day - dt.timedelta(days=1)
return(getNextOptionClose(day, trade_days))
def getNYSEoffset(mark, offset):
''' Returns NYSE date offset by number of days '''
mark = mark.replace(hour=0, minute=0, second=0, microsecond=0)
i = GTS_DATES.index.searchsorted(mark, side='right')
# If there is no exact match, take first date in past
if GTS_DATES[i] != mark:
|
ret = GTS_DATES[i + offset]
ret = ret.replace(hour=16)
return ret
def getNYSEdays(startday = dt.datetime(1964,7,5), endday = dt.datetime(2020,12,31),
timeofday = dt.timedelta(0)):
"""
@summary: Create a list of timestamps between startday and endday (inclusive)
that correspond to the days there was trading at the NYSE. This function
depends on a separately created a file that lists all days since July 4,
1962 that the NYSE has been open, going forward to 2020 (based
on the holidays that NYSE recognizes).
@param startday: First timestamp to consider (inclusive)
@param endday: Last day to consider (inclusive)
@return list: of timestamps between startday and endday on which NYSE traded
@rtype datetime
"""
start = startday - timeofday
end = endday - timeofday
dates = GTS_DATES[start:end]
ret = [x + timeofday for x in dates]
return(ret)
def getNextNNYSEdays(startday, days, timeofday):
"""
@summary: Create a list of timestamps from startday that is days days long
that correspond to the days there was trading at NYSE. This function
depends on the file used in getNYSEdays and assumes the dates within are
in order.
@param startday: First timestamp to consider (inclusive)
@param days: Number of timestamps to return
@return list: List of timestamps starting at startday on which NYSE traded
@rtype datetime
"""
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure to set the value for QS in config.sh or\n"
print "in local.sh and then \'source local.sh\'.\n"
datestxt = np.loadtxt(filename,dtype=str)
dates=[]
for i in datestxt:
if(len(dates)<days):
if((dt.datetime.strptime(i,"%m/%d/%Y")+timeofday)>=startday):
dates.append(dt.datetime.strptime(i,"%m/%d/%Y")+timeofday)
return(dates)
def getPrevNNYSEday(startday, timeofday):
"""
@summary: This function returns the last valid trading day before the start
day, or returns the start day if it is a valid trading day. This function
depends on the file used in getNYSEdays and assumes the dates within are
in order.
@param startday: First timestamp to consider (inclusive)
@param days: Number of timestamps to return
@return list: List of timestamps starting at startday on which NYSE traded
@rtype datetime
"""
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure to set the value for QS in config.sh or\n"
print "in local.sh and then \'source local.sh\'.\n"
datestxt = np.loadtxt(filename,dtype=str)
#''' Set return to first day '''
dtReturn = dt.datetime.strptime( datestxt[0],"%m/%d/%Y")+timeofday
#''' Loop through all but first '''
for i in datestxt[1:]:
dtNext = dt.datetime.strptime(i,"%m/%d/%Y")
#''' If we are > startday, then use previous valid day '''
if( dtNext > startday ):
break
dtReturn = dtNext + timeofday
return(dtReturn)
def ymd2epoch(year, month, day):
"""
@summary: Convert YMD info into a unix epoch value.
@param year: The year
@param month: The month
@param day: The day
@return epoch: number of seconds since epoch
"""
return(t.mktime(dt.date(year,month,day).timetuple()))
def epoch2date(ts):
"""
@summary Convert seconds since epoch into date
@param ts: Seconds since epoch
@return thedate: A date object
"""
tm = t.gmtime(ts)
return(dt.date(tm.tm_year,tm.tm_mon,tm.tm_mday))
def _trade_dates(dt_start, dt_end, s_period):
'''
@summary: Generate dates on which we need to trade
@param c_strat: Strategy config class
@param dt_start: Start date
@param dt_end: End date
'''
ldt_timestamps = getNYSEdays(dt_start,
dt_end, dt.timedelta(hours=16) )
# Use pandas reindex method instead
# Note, dates are index as well as values, we select based on index
# but return values since it is a numpy array of datetimes instead of
# pandas specific.
ts_dates = pd.TimeSeries(index=ldt_timestamps, data=ldt_timestamps)
# These are the dates we want
if s_period[:2] == 'BW':
# special case for biweekly
dr_range = pd.DateRange(dt_start | i -= 1 | conditional_block |
qsdateutil.py | report.
'''
import datetime as dt
from datetime import timedelta
import time as t
import numpy as np
import os
import pandas as pd
def _cache_dates():
''' Caches dates '''
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure you have NYSE_dates.txt in the qstkutil directory"
datestxt = np.loadtxt(filename, dtype=str)
dates = []
for i in datestxt:
dates.append(dt.datetime.strptime(i, "%m/%d/%Y"))
return pd.TimeSeries(index=dates, data=dates)
GTS_DATES = _cache_dates()
def getMonthNames():
return(['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'])
def getYears(funds):
years=[]
for date in funds.index:
if(not(date.year in years)):
years.append(date.year)
return(years)
def getMonths(funds,year):
months=[]
for date in funds.index:
if((date.year==year) and not(date.month in months)):
months.append(date.month)
return(months)
def getDays(funds,year,month):
days=[]
for date in funds.index:
if((date.year==year) and (date.month==month)):
days.append(date)
return(days)
def getDaysBetween(ts_start, ts_end):
days=[]
for i in range(0,(ts_end-ts_start).days):
days.append(ts_start+timedelta(days=1)*i)
return(days)
def getFirstDay(funds,year,month):
for date in funds.index:
if((date.year==year) and (date.month==month)):
return(date)
return('ERROR')
def getLastDay(funds,year,month):
return_date = 'ERROR'
for date in funds.index:
if((date.year==year) and (date.month==month)):
return_date = date
return(return_date)
def getNextOptionClose(day, trade_days, offset=0):
#get third friday in month of day
#get first of month
year_off=0
if day.month+offset > 12:
year_off = 1
offset = offset - 12
first = dt.datetime(day.year+year_off, day.month+offset, 1, hour=16)
#get weekday
day_num = first.weekday()
#get first friday (friday - weekday) add 7 if less than 1
dif = 5 - day_num
if dif < 1:
dif = dif+7
#move to third friday
dif = dif + 14
friday = first+dt.timedelta(days=(dif-1))
#if friday is a holiday, options expire then
if friday in trade_days:
month_close = first + dt.timedelta(days=dif)
else:
month_close = friday
#if day is past the day after that
if month_close < day:
return_date = getNextOptionClose(day, trade_days, offset=1)
else:
return_date = month_close
return(return_date)
def getLastOptionClose(day, trade_days):
start = day
while getNextOptionClose(day, trade_days)>=start:
day= day - dt.timedelta(days=1)
return(getNextOptionClose(day, trade_days))
def getNYSEoffset(mark, offset):
''' Returns NYSE date offset by number of days '''
mark = mark.replace(hour=0, minute=0, second=0, microsecond=0)
i = GTS_DATES.index.searchsorted(mark, side='right')
# If there is no exact match, take first date in past
if GTS_DATES[i] != mark:
i -= 1
|
ret = ret.replace(hour=16)
return ret
def getNYSEdays(startday = dt.datetime(1964,7,5), endday = dt.datetime(2020,12,31),
timeofday = dt.timedelta(0)):
"""
@summary: Create a list of timestamps between startday and endday (inclusive)
that correspond to the days there was trading at the NYSE. This function
depends on a separately created a file that lists all days since July 4,
1962 that the NYSE has been open, going forward to 2020 (based
on the holidays that NYSE recognizes).
@param startday: First timestamp to consider (inclusive)
@param endday: Last day to consider (inclusive)
@return list: of timestamps between startday and endday on which NYSE traded
@rtype datetime
"""
start = startday - timeofday
end = endday - timeofday
dates = GTS_DATES[start:end]
ret = [x + timeofday for x in dates]
return(ret)
def getNextNNYSEdays(startday, days, timeofday):
"""
@summary: Create a list of timestamps from startday that is days days long
that correspond to the days there was trading at NYSE. This function
depends on the file used in getNYSEdays and assumes the dates within are
in order.
@param startday: First timestamp to consider (inclusive)
@param days: Number of timestamps to return
@return list: List of timestamps starting at startday on which NYSE traded
@rtype datetime
"""
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure to set the value for QS in config.sh or\n"
print "in local.sh and then \'source local.sh\'.\n"
datestxt = np.loadtxt(filename,dtype=str)
dates=[]
for i in datestxt:
if(len(dates)<days):
if((dt.datetime.strptime(i,"%m/%d/%Y")+timeofday)>=startday):
dates.append(dt.datetime.strptime(i,"%m/%d/%Y")+timeofday)
return(dates)
def getPrevNNYSEday(startday, timeofday):
"""
@summary: This function returns the last valid trading day before the start
day, or returns the start day if it is a valid trading day. This function
depends on the file used in getNYSEdays and assumes the dates within are
in order.
@param startday: First timestamp to consider (inclusive)
@param days: Number of timestamps to return
@return list: List of timestamps starting at startday on which NYSE traded
@rtype datetime
"""
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure to set the value for QS in config.sh or\n"
print "in local.sh and then \'source local.sh\'.\n"
datestxt = np.loadtxt(filename,dtype=str)
#''' Set return to first day '''
dtReturn = dt.datetime.strptime( datestxt[0],"%m/%d/%Y")+timeofday
#''' Loop through all but first '''
for i in datestxt[1:]:
dtNext = dt.datetime.strptime(i,"%m/%d/%Y")
#''' If we are > startday, then use previous valid day '''
if( dtNext > startday ):
break
dtReturn = dtNext + timeofday
return(dtReturn)
def ymd2epoch(year, month, day):
"""
@summary: Convert YMD info into a unix epoch value.
@param year: The year
@param month: The month
@param day: The day
@return epoch: number of seconds since epoch
"""
return(t.mktime(dt.date(year,month,day).timetuple()))
def epoch2date(ts):
"""
@summary Convert seconds since epoch into date
@param ts: Seconds since epoch
@return thedate: A date object
"""
tm = t.gmtime(ts)
return(dt.date(tm.tm_year,tm.tm_mon,tm.tm_mday))
def _trade_dates(dt_start, dt_end, s_period):
'''
@summary: Generate dates on which we need to trade
@param c_strat: Strategy config class
@param dt_start: Start date
@param dt_end: End date
'''
ldt_timestamps = getNYSEdays(dt_start,
dt_end, dt.timedelta(hours=16) )
# Use pandas reindex method instead
# Note, dates are index as well as values, we select based on index
# but return values since it is a numpy array of datetimes instead of
# pandas specific.
ts_dates = pd.TimeSeries(index=ldt_timestamps, data=ldt_timestamps)
# These are the dates we want
if s_period[:2] == 'BW':
# special case for biweekly
dr_range = pd.DateRange | ret = GTS_DATES[i + offset] | random_line_split |
qsdateutil.py | report.
'''
import datetime as dt
from datetime import timedelta
import time as t
import numpy as np
import os
import pandas as pd
def _cache_dates():
''' Caches dates '''
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure you have NYSE_dates.txt in the qstkutil directory"
datestxt = np.loadtxt(filename, dtype=str)
dates = []
for i in datestxt:
dates.append(dt.datetime.strptime(i, "%m/%d/%Y"))
return pd.TimeSeries(index=dates, data=dates)
GTS_DATES = _cache_dates()
def getMonthNames():
return(['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'])
def getYears(funds):
years=[]
for date in funds.index:
if(not(date.year in years)):
years.append(date.year)
return(years)
def getMonths(funds,year):
months=[]
for date in funds.index:
if((date.year==year) and not(date.month in months)):
months.append(date.month)
return(months)
def getDays(funds,year,month):
days=[]
for date in funds.index:
if((date.year==year) and (date.month==month)):
days.append(date)
return(days)
def getDaysBetween(ts_start, ts_end):
days=[]
for i in range(0,(ts_end-ts_start).days):
days.append(ts_start+timedelta(days=1)*i)
return(days)
def getFirstDay(funds,year,month):
for date in funds.index:
if((date.year==year) and (date.month==month)):
return(date)
return('ERROR')
def getLastDay(funds,year,month):
return_date = 'ERROR'
for date in funds.index:
if((date.year==year) and (date.month==month)):
return_date = date
return(return_date)
def getNextOptionClose(day, trade_days, offset=0):
#get third friday in month of day
#get first of month
year_off=0
if day.month+offset > 12:
year_off = 1
offset = offset - 12
first = dt.datetime(day.year+year_off, day.month+offset, 1, hour=16)
#get weekday
day_num = first.weekday()
#get first friday (friday - weekday) add 7 if less than 1
dif = 5 - day_num
if dif < 1:
dif = dif+7
#move to third friday
dif = dif + 14
friday = first+dt.timedelta(days=(dif-1))
#if friday is a holiday, options expire then
if friday in trade_days:
month_close = first + dt.timedelta(days=dif)
else:
month_close = friday
#if day is past the day after that
if month_close < day:
return_date = getNextOptionClose(day, trade_days, offset=1)
else:
return_date = month_close
return(return_date)
def getLastOptionClose(day, trade_days):
start = day
while getNextOptionClose(day, trade_days)>=start:
day= day - dt.timedelta(days=1)
return(getNextOptionClose(day, trade_days))
def getNYSEoffset(mark, offset):
''' Returns NYSE date offset by number of days '''
mark = mark.replace(hour=0, minute=0, second=0, microsecond=0)
i = GTS_DATES.index.searchsorted(mark, side='right')
# If there is no exact match, take first date in past
if GTS_DATES[i] != mark:
i -= 1
ret = GTS_DATES[i + offset]
ret = ret.replace(hour=16)
return ret
def getNYSEdays(startday = dt.datetime(1964,7,5), endday = dt.datetime(2020,12,31),
timeofday = dt.timedelta(0)):
"""
@summary: Create a list of timestamps between startday and endday (inclusive)
that correspond to the days there was trading at the NYSE. This function
depends on a separately created a file that lists all days since July 4,
1962 that the NYSE has been open, going forward to 2020 (based
on the holidays that NYSE recognizes).
@param startday: First timestamp to consider (inclusive)
@param endday: Last day to consider (inclusive)
@return list: of timestamps between startday and endday on which NYSE traded
@rtype datetime
"""
start = startday - timeofday
end = endday - timeofday
dates = GTS_DATES[start:end]
ret = [x + timeofday for x in dates]
return(ret)
def | (startday, days, timeofday):
"""
@summary: Create a list of timestamps from startday that is days days long
that correspond to the days there was trading at NYSE. This function
depends on the file used in getNYSEdays and assumes the dates within are
in order.
@param startday: First timestamp to consider (inclusive)
@param days: Number of timestamps to return
@return list: List of timestamps starting at startday on which NYSE traded
@rtype datetime
"""
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure to set the value for QS in config.sh or\n"
print "in local.sh and then \'source local.sh\'.\n"
datestxt = np.loadtxt(filename,dtype=str)
dates=[]
for i in datestxt:
if(len(dates)<days):
if((dt.datetime.strptime(i,"%m/%d/%Y")+timeofday)>=startday):
dates.append(dt.datetime.strptime(i,"%m/%d/%Y")+timeofday)
return(dates)
def getPrevNNYSEday(startday, timeofday):
"""
@summary: This function returns the last valid trading day before the start
day, or returns the start day if it is a valid trading day. This function
depends on the file used in getNYSEdays and assumes the dates within are
in order.
@param startday: First timestamp to consider (inclusive)
@param days: Number of timestamps to return
@return list: List of timestamps starting at startday on which NYSE traded
@rtype datetime
"""
try:
# filename = os.environ['QS'] + "/qstkutil/NYSE_dates.txt"
filename = os.path.join(os.path.dirname(__file__), 'NYSE_dates.txt')
except KeyError:
print "Please be sure to set the value for QS in config.sh or\n"
print "in local.sh and then \'source local.sh\'.\n"
datestxt = np.loadtxt(filename,dtype=str)
#''' Set return to first day '''
dtReturn = dt.datetime.strptime( datestxt[0],"%m/%d/%Y")+timeofday
#''' Loop through all but first '''
for i in datestxt[1:]:
dtNext = dt.datetime.strptime(i,"%m/%d/%Y")
#''' If we are > startday, then use previous valid day '''
if( dtNext > startday ):
break
dtReturn = dtNext + timeofday
return(dtReturn)
def ymd2epoch(year, month, day):
"""
@summary: Convert YMD info into a unix epoch value.
@param year: The year
@param month: The month
@param day: The day
@return epoch: number of seconds since epoch
"""
return(t.mktime(dt.date(year,month,day).timetuple()))
def epoch2date(ts):
"""
@summary Convert seconds since epoch into date
@param ts: Seconds since epoch
@return thedate: A date object
"""
tm = t.gmtime(ts)
return(dt.date(tm.tm_year,tm.tm_mon,tm.tm_mday))
def _trade_dates(dt_start, dt_end, s_period):
'''
@summary: Generate dates on which we need to trade
@param c_strat: Strategy config class
@param dt_start: Start date
@param dt_end: End date
'''
ldt_timestamps = getNYSEdays(dt_start,
dt_end, dt.timedelta(hours=16) )
# Use pandas reindex method instead
# Note, dates are index as well as values, we select based on index
# but return values since it is a numpy array of datetimes instead of
# pandas specific.
ts_dates = pd.TimeSeries(index=ldt_timestamps, data=ldt_timestamps)
# These are the dates we want
if s_period[:2] == 'BW':
# special case for biweekly
dr_range = pd.DateRange(dt | getNextNNYSEdays | identifier_name |
ResponsiveLayout.tsx | import * as React from "react"
import {
PropertyControls,
Frame,
addPropertyControls,
ControlType,
} from "framer"
// Responsive Layout Component
// @steveruizok
type Child = React.ReactElement<any>
const defaultStyle: React.CSSProperties = {
height: "100%",
display: "flex",
flexDirection: "column",
alignItems: "center",
justifyContent: "center",
textAlign: "center",
color: "#333",
fontSize: "3em",
fontWeight: "bold",
background: "rgba(255, 255, 255, 1)",
border: "2px solid #eee",
borderRadius: "2px",
overflow: "hidden",
padding: "16px",
}
const DefaultDisplay = props => (
<div style={defaultStyle}>
<p>R</p>
</div>
)
// Define type of property
interface Props {
layouts: Child[]
height: number
width: number
}
export const ResponsiveLayout = props => {
const container = React.useRef<HTMLDivElement>()
const [state, setState] = React.useState({
height: props.height,
width: props.width,
})
const getLayout = width => {
const { layouts } = props
const ls = [...layouts]
if (ls.length === 1) return ls[0]
// Sort filtered layouts
const sorted = ls.sort((a, b) => a.props.width - b.props.width)
console.log(sorted.map(f => f.props.width))
// Filter sorted to only those that fit under this component's width
const filtered = sorted.filter((l: Child) => {
// console.log(width, l.props.width)
return l.props.width < width
})
// if nothing fits, return the most narrow layout
if (filtered.length === 0) {
return sorted[0]
}
// otherwise, return the largest layout among those that fit
return filtered[filtered.length - 1]
}
const updateLayout = () => {
if (!container.current) {
return
}
const width = container.current.offsetWidth
setState({
...state,
width,
})
}
React.useEffect(() => {
updateLayout()
window.addEventListener("resize", updateLayout)
return () => window.removeEventListener("resize", updateLayout)
}, [])
React.useEffect(() => {
updateLayout()
}, [props.height, props.width, container.current])
const { layouts } = props
// If we don't have layouts, use the default display.
if (layouts.length === 0) {
return <DefaultDisplay />
}
const { width } = state
const l = getLayout(width)
const layout = React.cloneElement(l, {
width: "100%",
height: "100%",
})
// Now that element to fill this frame's frame (frame frame... framer)
return (
<Frame ref={container} width="100%" height="100%">
{layout}
</Frame>
)
}
| propertyControl: {
type: ControlType.ComponentInstance,
},
},
}) | addPropertyControls(ResponsiveLayout, {
layouts: {
type: ControlType.Array,
title: "Layouts", | random_line_split |
ResponsiveLayout.tsx | import * as React from "react"
import {
PropertyControls,
Frame,
addPropertyControls,
ControlType,
} from "framer"
// Responsive Layout Component
// @steveruizok
type Child = React.ReactElement<any>
const defaultStyle: React.CSSProperties = {
height: "100%",
display: "flex",
flexDirection: "column",
alignItems: "center",
justifyContent: "center",
textAlign: "center",
color: "#333",
fontSize: "3em",
fontWeight: "bold",
background: "rgba(255, 255, 255, 1)",
border: "2px solid #eee",
borderRadius: "2px",
overflow: "hidden",
padding: "16px",
}
const DefaultDisplay = props => (
<div style={defaultStyle}>
<p>R</p>
</div>
)
// Define type of property
interface Props {
layouts: Child[]
height: number
width: number
}
export const ResponsiveLayout = props => {
const container = React.useRef<HTMLDivElement>()
const [state, setState] = React.useState({
height: props.height,
width: props.width,
})
const getLayout = width => {
const { layouts } = props
const ls = [...layouts]
if (ls.length === 1) return ls[0]
// Sort filtered layouts
const sorted = ls.sort((a, b) => a.props.width - b.props.width)
console.log(sorted.map(f => f.props.width))
// Filter sorted to only those that fit under this component's width
const filtered = sorted.filter((l: Child) => {
// console.log(width, l.props.width)
return l.props.width < width
})
// if nothing fits, return the most narrow layout
if (filtered.length === 0) |
// otherwise, return the largest layout among those that fit
return filtered[filtered.length - 1]
}
const updateLayout = () => {
if (!container.current) {
return
}
const width = container.current.offsetWidth
setState({
...state,
width,
})
}
React.useEffect(() => {
updateLayout()
window.addEventListener("resize", updateLayout)
return () => window.removeEventListener("resize", updateLayout)
}, [])
React.useEffect(() => {
updateLayout()
}, [props.height, props.width, container.current])
const { layouts } = props
// If we don't have layouts, use the default display.
if (layouts.length === 0) {
return <DefaultDisplay />
}
const { width } = state
const l = getLayout(width)
const layout = React.cloneElement(l, {
width: "100%",
height: "100%",
})
// Now that element to fill this frame's frame (frame frame... framer)
return (
<Frame ref={container} width="100%" height="100%">
{layout}
</Frame>
)
}
addPropertyControls(ResponsiveLayout, {
layouts: {
type: ControlType.Array,
title: "Layouts",
propertyControl: {
type: ControlType.ComponentInstance,
},
},
})
| {
return sorted[0]
} | conditional_block |
function.rs | use object::values::Value;
use object::string::InString;
use vm::opcode::Instruction;
#[derive(PartialEq,Eq,Hash,Debug,Clone)]
pub struct LocVar {
varname: InString,
startpc: u32, // first point where variable is active
endpc: u32 // first point where variable is dead
}
// Masks for vararg
pub const HASARG : u8 = 1;
pub const ISVARARG: u8 = 2;
pub const NEEDSARG: u8 = 4;
/// Function prototypes
#[derive(PartialEq,Eq,Hash,Debug,Clone)]
pub struct Proto {
k: Vec<Value>, // constants used by the function
code: Vec<Instruction>,
p: Vec<Proto>, // functions defined inside the function
lineinfo: Vec<u32>, // map from instructions to source lines
locvars: Vec<LocVar>, // information about local variables
upvalues: Vec<InString>, // upvalue names
source: InString, // source code for this function
linedefined: u32,
lastlinedefined: u32,
//TODO: gclist?
nups: u8, // number of upvalues
numparams: u8,
is_vararg: u8, // OR'd values of HASARG, ISVARARG, NEEDSARG
maxstacksize: u8
}
impl LocVar {
pub fn new(varname: InString, startpc: u32, endpc: u32) -> LocVar
{
LocVar {
varname: varname,
startpc: startpc,
endpc: endpc
}
}
}
impl Proto {
pub fn new(source: InString,
linedefined: u32,
lastlinedefined: u32,
nups: u8,
numparams: u8,
is_vararg: u8,
maxstacksize: u8)
-> Proto {
Proto {
| code: Vec::new(),
p: Vec::new(),
lineinfo: Vec::new(),
locvars: Vec::new(),
upvalues: Vec::new(),
source: source,
linedefined: linedefined,
lastlinedefined: lastlinedefined,
nups: nups,
numparams: numparams,
is_vararg: is_vararg,
maxstacksize: maxstacksize
}
}
} | k: Vec::new(),
| random_line_split |
function.rs | use object::values::Value;
use object::string::InString;
use vm::opcode::Instruction;
#[derive(PartialEq,Eq,Hash,Debug,Clone)]
pub struct LocVar {
varname: InString,
startpc: u32, // first point where variable is active
endpc: u32 // first point where variable is dead
}
// Masks for vararg
pub const HASARG : u8 = 1;
pub const ISVARARG: u8 = 2;
pub const NEEDSARG: u8 = 4;
/// Function prototypes
#[derive(PartialEq,Eq,Hash,Debug,Clone)]
pub struct Proto {
k: Vec<Value>, // constants used by the function
code: Vec<Instruction>,
p: Vec<Proto>, // functions defined inside the function
lineinfo: Vec<u32>, // map from instructions to source lines
locvars: Vec<LocVar>, // information about local variables
upvalues: Vec<InString>, // upvalue names
source: InString, // source code for this function
linedefined: u32,
lastlinedefined: u32,
//TODO: gclist?
nups: u8, // number of upvalues
numparams: u8,
is_vararg: u8, // OR'd values of HASARG, ISVARARG, NEEDSARG
maxstacksize: u8
}
impl LocVar {
pub fn new(varname: InString, startpc: u32, endpc: u32) -> LocVar
|
}
impl Proto {
pub fn new(source: InString,
linedefined: u32,
lastlinedefined: u32,
nups: u8,
numparams: u8,
is_vararg: u8,
maxstacksize: u8)
-> Proto {
Proto {
k: Vec::new(),
code: Vec::new(),
p: Vec::new(),
lineinfo: Vec::new(),
locvars: Vec::new(),
upvalues: Vec::new(),
source: source,
linedefined: linedefined,
lastlinedefined: lastlinedefined,
nups: nups,
numparams: numparams,
is_vararg: is_vararg,
maxstacksize: maxstacksize
}
}
}
| {
LocVar {
varname: varname,
startpc: startpc,
endpc: endpc
}
} | identifier_body |
function.rs | use object::values::Value;
use object::string::InString;
use vm::opcode::Instruction;
#[derive(PartialEq,Eq,Hash,Debug,Clone)]
pub struct LocVar {
varname: InString,
startpc: u32, // first point where variable is active
endpc: u32 // first point where variable is dead
}
// Masks for vararg
pub const HASARG : u8 = 1;
pub const ISVARARG: u8 = 2;
pub const NEEDSARG: u8 = 4;
/// Function prototypes
#[derive(PartialEq,Eq,Hash,Debug,Clone)]
pub struct Proto {
k: Vec<Value>, // constants used by the function
code: Vec<Instruction>,
p: Vec<Proto>, // functions defined inside the function
lineinfo: Vec<u32>, // map from instructions to source lines
locvars: Vec<LocVar>, // information about local variables
upvalues: Vec<InString>, // upvalue names
source: InString, // source code for this function
linedefined: u32,
lastlinedefined: u32,
//TODO: gclist?
nups: u8, // number of upvalues
numparams: u8,
is_vararg: u8, // OR'd values of HASARG, ISVARARG, NEEDSARG
maxstacksize: u8
}
impl LocVar {
pub fn new(varname: InString, startpc: u32, endpc: u32) -> LocVar
{
LocVar {
varname: varname,
startpc: startpc,
endpc: endpc
}
}
}
impl Proto {
pub fn | (source: InString,
linedefined: u32,
lastlinedefined: u32,
nups: u8,
numparams: u8,
is_vararg: u8,
maxstacksize: u8)
-> Proto {
Proto {
k: Vec::new(),
code: Vec::new(),
p: Vec::new(),
lineinfo: Vec::new(),
locvars: Vec::new(),
upvalues: Vec::new(),
source: source,
linedefined: linedefined,
lastlinedefined: lastlinedefined,
nups: nups,
numparams: numparams,
is_vararg: is_vararg,
maxstacksize: maxstacksize
}
}
}
| new | identifier_name |
app.py | from flask import Flask, request
import sendgrid
import json
import requests
import os
app = Flask(__name__)
SENDGRID_USER = os.getenv('SENDGRID_USER')
SENDGRID_PASS = os.getenv('SENDGRID_PASS')
ONENOTE_TOKEN = os.getenv('ONENOTE_TOKEN')
# Make the WSGI interface available at the top level so wfastcgi can get it.
wsgi_app = app.wsgi_app
sg = sendgrid.SendGridClient(SENDGRID_USER, SENDGRID_PASS)
# Receive a POST from the SendGrid Event Webhook
@app.route('/event', methods = ['POST'])
def event():
message = sendgrid.Mail()
message.add_to('Elmer Thomas <[email protected]>')
message.set_subject('Bounce Alert')
data = request.stream.read().decode("utf-8")
data = json.loads(data)
for i in range(len(data)):
# For a list of all event types see: https://sendgrid.com/docs/API_Reference/Webhooks/event.html
event = data[i]['event']
if event == "bounce":
# Create and post the OneNote message
url = "https://www.onenote.com/api/v1.0/pages"
auth = 'Bearer ' + ONENOTE_TOKEN
body = "An email from " + data[i]['email'] + " bounced. You might want to do something about that :)"
payload = "<!DOCTYPE HTML><html><head><title>Bounced Email Alert</title></head>"
payload += "<body>" + body + "</body></html>"
headers = {'Authorization':auth,'Content-type':'text/html'}
res = requests.post(url, headers=headers, data=payload)
# Send an email alert
mail = "An email sent to " + data[i]['email'] + " bounced. Return value from OneNote is: " + res.text
message.set_html(mail)
message.set_text(mail)
message.set_from('Elmer Thomas <[email protected]>')
status, msg = sg.send(message)
return "HTTP/1.1 200 OK"
@app.route('/', methods = ['GET'])
def hello():
|
@app.route('/tos', methods = ['GET'])
def tos():
return "Terms of Service Placeholder."
@app.route('/privacy', methods = ['GET'])
def privacy():
return "Privacy Policy Placeholder."
if __name__ == '__main__':
import os
HOST = os.environ.get('SERVER_HOST', 'localhost')
try:
PORT = int(os.environ.get('SERVER_PORT', '5555'))
except ValueError:
PORT = 5555
app.run(HOST, PORT)
| """Renders a sample page."""
return "Hello Universe!" | identifier_body |
app.py | from flask import Flask, request
import sendgrid
import json
import requests
import os
app = Flask(__name__)
SENDGRID_USER = os.getenv('SENDGRID_USER')
SENDGRID_PASS = os.getenv('SENDGRID_PASS')
ONENOTE_TOKEN = os.getenv('ONENOTE_TOKEN')
# Make the WSGI interface available at the top level so wfastcgi can get it.
wsgi_app = app.wsgi_app
sg = sendgrid.SendGridClient(SENDGRID_USER, SENDGRID_PASS)
# Receive a POST from the SendGrid Event Webhook
@app.route('/event', methods = ['POST'])
def event():
message = sendgrid.Mail()
message.add_to('Elmer Thomas <[email protected]>')
message.set_subject('Bounce Alert')
data = request.stream.read().decode("utf-8")
data = json.loads(data)
for i in range(len(data)):
# For a list of all event types see: https://sendgrid.com/docs/API_Reference/Webhooks/event.html
event = data[i]['event']
if event == "bounce":
# Create and post the OneNote message
url = "https://www.onenote.com/api/v1.0/pages"
auth = 'Bearer ' + ONENOTE_TOKEN
body = "An email from " + data[i]['email'] + " bounced. You might want to do something about that :)"
payload = "<!DOCTYPE HTML><html><head><title>Bounced Email Alert</title></head>"
payload += "<body>" + body + "</body></html>"
headers = {'Authorization':auth,'Content-type':'text/html'}
res = requests.post(url, headers=headers, data=payload)
# Send an email alert
mail = "An email sent to " + data[i]['email'] + " bounced. Return value from OneNote is: " + res.text
message.set_html(mail)
message.set_text(mail)
message.set_from('Elmer Thomas <[email protected]>')
status, msg = sg.send(message)
return "HTTP/1.1 200 OK"
@app.route('/', methods = ['GET'])
def hello():
"""Renders a sample page."""
return "Hello Universe!"
@app.route('/tos', methods = ['GET'])
def tos():
return "Terms of Service Placeholder."
@app.route('/privacy', methods = ['GET'])
def | ():
return "Privacy Policy Placeholder."
if __name__ == '__main__':
import os
HOST = os.environ.get('SERVER_HOST', 'localhost')
try:
PORT = int(os.environ.get('SERVER_PORT', '5555'))
except ValueError:
PORT = 5555
app.run(HOST, PORT)
| privacy | identifier_name |
app.py | from flask import Flask, request
import sendgrid
import json
import requests
import os
app = Flask(__name__)
SENDGRID_USER = os.getenv('SENDGRID_USER')
SENDGRID_PASS = os.getenv('SENDGRID_PASS')
ONENOTE_TOKEN = os.getenv('ONENOTE_TOKEN')
# Make the WSGI interface available at the top level so wfastcgi can get it.
wsgi_app = app.wsgi_app
sg = sendgrid.SendGridClient(SENDGRID_USER, SENDGRID_PASS)
# Receive a POST from the SendGrid Event Webhook
@app.route('/event', methods = ['POST'])
def event():
message = sendgrid.Mail()
message.add_to('Elmer Thomas <[email protected]>')
message.set_subject('Bounce Alert')
data = request.stream.read().decode("utf-8")
data = json.loads(data)
for i in range(len(data)):
# For a list of all event types see: https://sendgrid.com/docs/API_Reference/Webhooks/event.html
event = data[i]['event']
if event == "bounce":
# Create and post the OneNote message
url = "https://www.onenote.com/api/v1.0/pages"
auth = 'Bearer ' + ONENOTE_TOKEN
body = "An email from " + data[i]['email'] + " bounced. You might want to do something about that :)"
payload = "<!DOCTYPE HTML><html><head><title>Bounced Email Alert</title></head>"
payload += "<body>" + body + "</body></html>"
headers = {'Authorization':auth,'Content-type':'text/html'}
res = requests.post(url, headers=headers, data=payload)
# Send an email alert
mail = "An email sent to " + data[i]['email'] + " bounced. Return value from OneNote is: " + res.text
message.set_html(mail)
message.set_text(mail)
message.set_from('Elmer Thomas <[email protected]>')
status, msg = sg.send(message)
return "HTTP/1.1 200 OK"
@app.route('/', methods = ['GET'])
def hello():
"""Renders a sample page."""
return "Hello Universe!"
@app.route('/tos', methods = ['GET'])
def tos():
return "Terms of Service Placeholder."
@app.route('/privacy', methods = ['GET'])
def privacy():
return "Privacy Policy Placeholder."
if __name__ == '__main__':
| import os
HOST = os.environ.get('SERVER_HOST', 'localhost')
try:
PORT = int(os.environ.get('SERVER_PORT', '5555'))
except ValueError:
PORT = 5555
app.run(HOST, PORT) | conditional_block |
|
app.py | from flask import Flask, request
import sendgrid
import json
import requests
import os
app = Flask(__name__)
SENDGRID_USER = os.getenv('SENDGRID_USER')
SENDGRID_PASS = os.getenv('SENDGRID_PASS')
ONENOTE_TOKEN = os.getenv('ONENOTE_TOKEN')
# Make the WSGI interface available at the top level so wfastcgi can get it.
wsgi_app = app.wsgi_app
sg = sendgrid.SendGridClient(SENDGRID_USER, SENDGRID_PASS)
# Receive a POST from the SendGrid Event Webhook
@app.route('/event', methods = ['POST'])
def event():
message = sendgrid.Mail()
message.add_to('Elmer Thomas <[email protected]>')
message.set_subject('Bounce Alert')
data = request.stream.read().decode("utf-8")
data = json.loads(data)
for i in range(len(data)):
# For a list of all event types see: https://sendgrid.com/docs/API_Reference/Webhooks/event.html
event = data[i]['event']
if event == "bounce":
# Create and post the OneNote message | auth = 'Bearer ' + ONENOTE_TOKEN
body = "An email from " + data[i]['email'] + " bounced. You might want to do something about that :)"
payload = "<!DOCTYPE HTML><html><head><title>Bounced Email Alert</title></head>"
payload += "<body>" + body + "</body></html>"
headers = {'Authorization':auth,'Content-type':'text/html'}
res = requests.post(url, headers=headers, data=payload)
# Send an email alert
mail = "An email sent to " + data[i]['email'] + " bounced. Return value from OneNote is: " + res.text
message.set_html(mail)
message.set_text(mail)
message.set_from('Elmer Thomas <[email protected]>')
status, msg = sg.send(message)
return "HTTP/1.1 200 OK"
@app.route('/', methods = ['GET'])
def hello():
"""Renders a sample page."""
return "Hello Universe!"
@app.route('/tos', methods = ['GET'])
def tos():
return "Terms of Service Placeholder."
@app.route('/privacy', methods = ['GET'])
def privacy():
return "Privacy Policy Placeholder."
if __name__ == '__main__':
import os
HOST = os.environ.get('SERVER_HOST', 'localhost')
try:
PORT = int(os.environ.get('SERVER_PORT', '5555'))
except ValueError:
PORT = 5555
app.run(HOST, PORT) | url = "https://www.onenote.com/api/v1.0/pages" | random_line_split |
first.py | #!/usr/bin/python
# usage: A debugging class
import pdb
version=2.0
def my_add(a,b):
''' This is the function for addition of numbers and strings '''
print "value of a is {}".format(a)
print "value of b is {}".format(b)
return a+b
def my_div(a,b):
''' This is the function for division '''
return a/b
def my_sub(a,b):
''' This is the function for substraction '''
if a > b:
return a - b
elif b > a:
return b - a
def my_mul(a,b):
''' This is the function for multiplication '''
return a * b
# Application code
if __name__ == '__main__': | print "Congo, i learned to write a calculator"
pdb.set_trace()
print "summation of two numbers- {}".format(my_add(1,2))
print "multiplication of two numbers- {}".format(my_mul(1,2))
print "substartion of two numbers - {}".format(my_sub(1,2))
print "division of two numbers - {}".format(my_div(4,2)) | print "This is a example on understading debugging" | random_line_split |
first.py | #!/usr/bin/python
# usage: A debugging class
import pdb
version=2.0
def my_add(a,b):
''' This is the function for addition of numbers and strings '''
print "value of a is {}".format(a)
print "value of b is {}".format(b)
return a+b
def | (a,b):
''' This is the function for division '''
return a/b
def my_sub(a,b):
''' This is the function for substraction '''
if a > b:
return a - b
elif b > a:
return b - a
def my_mul(a,b):
''' This is the function for multiplication '''
return a * b
# Application code
if __name__ == '__main__':
print "This is a example on understading debugging"
print "Congo, i learned to write a calculator"
pdb.set_trace()
print "summation of two numbers- {}".format(my_add(1,2))
print "multiplication of two numbers- {}".format(my_mul(1,2))
print "substartion of two numbers - {}".format(my_sub(1,2))
print "division of two numbers - {}".format(my_div(4,2))
| my_div | identifier_name |
first.py | #!/usr/bin/python
# usage: A debugging class
import pdb
version=2.0
def my_add(a,b):
''' This is the function for addition of numbers and strings '''
print "value of a is {}".format(a)
print "value of b is {}".format(b)
return a+b
def my_div(a,b):
''' This is the function for division '''
return a/b
def my_sub(a,b):
''' This is the function for substraction '''
if a > b:
return a - b
elif b > a:
return b - a
def my_mul(a,b):
|
# Application code
if __name__ == '__main__':
print "This is a example on understading debugging"
print "Congo, i learned to write a calculator"
pdb.set_trace()
print "summation of two numbers- {}".format(my_add(1,2))
print "multiplication of two numbers- {}".format(my_mul(1,2))
print "substartion of two numbers - {}".format(my_sub(1,2))
print "division of two numbers - {}".format(my_div(4,2))
| ''' This is the function for multiplication '''
return a * b | identifier_body |
first.py | #!/usr/bin/python
# usage: A debugging class
import pdb
version=2.0
def my_add(a,b):
''' This is the function for addition of numbers and strings '''
print "value of a is {}".format(a)
print "value of b is {}".format(b)
return a+b
def my_div(a,b):
''' This is the function for division '''
return a/b
def my_sub(a,b):
''' This is the function for substraction '''
if a > b:
return a - b
elif b > a:
|
def my_mul(a,b):
''' This is the function for multiplication '''
return a * b
# Application code
if __name__ == '__main__':
print "This is a example on understading debugging"
print "Congo, i learned to write a calculator"
pdb.set_trace()
print "summation of two numbers- {}".format(my_add(1,2))
print "multiplication of two numbers- {}".format(my_mul(1,2))
print "substartion of two numbers - {}".format(my_sub(1,2))
print "division of two numbers - {}".format(my_div(4,2))
| return b - a | conditional_block |
custom.js | var baseUrlServer = 'http://luzparatodos.o3midia.com.br/';
$('#btn_cadastrar_usuario').click(function(){
dadosForm = $('#form_login').serialize();
console.log(dadosForm);
});
// Wait for device API libraries to load
//
document.addEventListener("deviceready", onDeviceReady, false);
// device APIs are available
//
function onDeviceReady()
{
$('#btn_login').click(function(){
dadosForm = $('#form_login').serialize();
$.post(baseUrlServer,dadosForm,function(r){
});
});
}
var endereco = 'VAZIO';
// onSuccess Geolocation
//
function onSuccess(position) {
// var element = document.getElementById('geolocation');
// element.innerHTML = 'Latitude: ' + position.coords.latitude + '<br />' +
// var msg = 'Latitude: ' + position.coords.latitude + '<br />' +
// 'Longitude: ' + position.coords.longitude + '<br />' +
// 'Altitude: ' + position.coords.altitude + '<br />' +
// 'Accuracy: ' + position.coords.accuracy + '<br />' +
// 'Altitude Accuracy: ' + position.coords.altitudeAccuracy + '<br />' +
// 'Heading: ' + position.coords.heading + '<br />' +
// 'Speed: ' + position.coords.speed + '<br />' +
// 'Timestamp: ' + position.timestamp + '<br />';
var urlGmaps = 'http://maps.googleapis.com/maps/api/geocode/json?' + 'latlng='+position.coords.latitude+','+position.coords.longitude+'&sensor=false';
$.getJSON(urlGmaps,function(dataJson){
endereco = dataJson.results[0].formatted_address;
$('#paragrafo_tela_estou_aqui').html(endereco);
$('#btn_salvar_endereco_tela_estou_aqui,#observacao_tela_estou_aqui').show();
$.mobile.loading('hide');
});
}
$('#tela-estou-aqui').on('pagehide',function(){
$('#paragrafo_tela_estou_aqui').html('');
$('#btn_salvar_endereco_tela_estou_aqui,#observacao_tela_estou_aqui').hide();
});
$('#tela-estou-aqui').on('pageshow',function(){
$.mobile.loading('show'); | navigator.geolocation.getCurrentPosition(onSuccess, onError);
});
$('#btn_salvar_endereco_tela_estou_aqui').click(function(){
alert(endereco);
});
// onError Callback receives a PositionError object
//
function onError(error) {
alert('code: ' + error.code + '\n' +
'message: ' + error.message + '\n');
} | random_line_split |
|
custom.js |
var baseUrlServer = 'http://luzparatodos.o3midia.com.br/';
$('#btn_cadastrar_usuario').click(function(){
dadosForm = $('#form_login').serialize();
console.log(dadosForm);
});
// Wait for device API libraries to load
//
document.addEventListener("deviceready", onDeviceReady, false);
// device APIs are available
//
function onDeviceReady()
{
$('#btn_login').click(function(){
dadosForm = $('#form_login').serialize();
$.post(baseUrlServer,dadosForm,function(r){
});
});
}
var endereco = 'VAZIO';
// onSuccess Geolocation
//
function onSuccess(position) {
// var element = document.getElementById('geolocation');
// element.innerHTML = 'Latitude: ' + position.coords.latitude + '<br />' +
// var msg = 'Latitude: ' + position.coords.latitude + '<br />' +
// 'Longitude: ' + position.coords.longitude + '<br />' +
// 'Altitude: ' + position.coords.altitude + '<br />' +
// 'Accuracy: ' + position.coords.accuracy + '<br />' +
// 'Altitude Accuracy: ' + position.coords.altitudeAccuracy + '<br />' +
// 'Heading: ' + position.coords.heading + '<br />' +
// 'Speed: ' + position.coords.speed + '<br />' +
// 'Timestamp: ' + position.timestamp + '<br />';
var urlGmaps = 'http://maps.googleapis.com/maps/api/geocode/json?' + 'latlng='+position.coords.latitude+','+position.coords.longitude+'&sensor=false';
$.getJSON(urlGmaps,function(dataJson){
endereco = dataJson.results[0].formatted_address;
$('#paragrafo_tela_estou_aqui').html(endereco);
$('#btn_salvar_endereco_tela_estou_aqui,#observacao_tela_estou_aqui').show();
$.mobile.loading('hide');
});
}
$('#tela-estou-aqui').on('pagehide',function(){
$('#paragrafo_tela_estou_aqui').html('');
$('#btn_salvar_endereco_tela_estou_aqui,#observacao_tela_estou_aqui').hide();
});
$('#tela-estou-aqui').on('pageshow',function(){
$.mobile.loading('show');
navigator.geolocation.getCurrentPosition(onSuccess, onError);
});
$('#btn_salvar_endereco_tela_estou_aqui').click(function(){
alert(endereco);
});
// onError Callback receives a PositionError object
//
function | (error) {
alert('code: ' + error.code + '\n' +
'message: ' + error.message + '\n');
} | onError | identifier_name |
custom.js |
var baseUrlServer = 'http://luzparatodos.o3midia.com.br/';
$('#btn_cadastrar_usuario').click(function(){
dadosForm = $('#form_login').serialize();
console.log(dadosForm);
});
// Wait for device API libraries to load
//
document.addEventListener("deviceready", onDeviceReady, false);
// device APIs are available
//
function onDeviceReady()
{
$('#btn_login').click(function(){
dadosForm = $('#form_login').serialize();
$.post(baseUrlServer,dadosForm,function(r){
});
});
}
var endereco = 'VAZIO';
// onSuccess Geolocation
//
function onSuccess(position) | }
$('#tela-estou-aqui').on('pagehide',function(){
$('#paragrafo_tela_estou_aqui').html('');
$('#btn_salvar_endereco_tela_estou_aqui,#observacao_tela_estou_aqui').hide();
});
$('#tela-estou-aqui').on('pageshow',function(){
$.mobile.loading('show');
navigator.geolocation.getCurrentPosition(onSuccess, onError);
});
$('#btn_salvar_endereco_tela_estou_aqui').click(function(){
alert(endereco);
});
// onError Callback receives a PositionError object
//
function onError(error) {
alert('code: ' + error.code + '\n' +
'message: ' + error.message + '\n');
} | {
// var element = document.getElementById('geolocation');
// element.innerHTML = 'Latitude: ' + position.coords.latitude + '<br />' +
// var msg = 'Latitude: ' + position.coords.latitude + '<br />' +
// 'Longitude: ' + position.coords.longitude + '<br />' +
// 'Altitude: ' + position.coords.altitude + '<br />' +
// 'Accuracy: ' + position.coords.accuracy + '<br />' +
// 'Altitude Accuracy: ' + position.coords.altitudeAccuracy + '<br />' +
// 'Heading: ' + position.coords.heading + '<br />' +
// 'Speed: ' + position.coords.speed + '<br />' +
// 'Timestamp: ' + position.timestamp + '<br />';
var urlGmaps = 'http://maps.googleapis.com/maps/api/geocode/json?' + 'latlng='+position.coords.latitude+','+position.coords.longitude+'&sensor=false';
$.getJSON(urlGmaps,function(dataJson){
endereco = dataJson.results[0].formatted_address;
$('#paragrafo_tela_estou_aqui').html(endereco);
$('#btn_salvar_endereco_tela_estou_aqui,#observacao_tela_estou_aqui').show();
$.mobile.loading('hide');
}); | identifier_body |
loading.tsx | /*
* This file is part of CoCalc: Copyright © 2020 Sagemath, Inc.
* License: AGPLv3 s.t. "Commons Clause" – see LICENSE.md for details
*/
import React from "react";
import { Icon } from "./icon";
import { TypedMap } from "../app-framework";
export type Estimate = TypedMap<{
time: number; // Time in seconds
type: "new" | "ready" | "archived";
}>;
export const Estimate = null; // webpack + TS es2020 modules need this
interface Props {
style?: React.CSSProperties;
text?: string;
estimate?: Estimate;
theme?: "medium" | undefined;
delay?: number; // if given, don't show anything until after delay milliseconds. The component could easily unmount by then, and hence never annoyingly flicker on screen.
}
const LOADING_THEMES: { [keys: string]: React.CSSProperties } = {
medium: {
fontSize: "24pt",
textAlign: "center",
marginTop: "15px",
color: "#888",
background: "white",
},
};
export class Loading extends React.Component<Props> {
static defaultProps = { text: "Loading..." };
render_estimate() {
if (this.props.estimate != undefined) {
| }
render() {
let style: React.CSSProperties | undefined = undefined;
if (this.props.style != undefined) {
style = this.props.style;
} else if (this.props.theme != undefined) {
style = LOADING_THEMES[this.props.theme];
}
return (
<div style={style}>
<span>
<Icon name="cocalc-ring" spin /> {this.props.text}
</span>
{this.render_estimate()}
</div>
);
}
}
| return (
<div>
Loading '{this.props.estimate.get("type")}' file.
<br />
Estimated time: {this.props.estimate.get("time")}s
</div>
);
}
| conditional_block |
loading.tsx | /*
* This file is part of CoCalc: Copyright © 2020 Sagemath, Inc.
* License: AGPLv3 s.t. "Commons Clause" – see LICENSE.md for details
*/
import React from "react";
import { Icon } from "./icon";
import { TypedMap } from "../app-framework";
export type Estimate = TypedMap<{
time: number; // Time in seconds
type: "new" | "ready" | "archived";
}>;
export const Estimate = null; // webpack + TS es2020 modules need this
interface Props {
style?: React.CSSProperties;
text?: string;
estimate?: Estimate;
theme?: "medium" | undefined;
delay?: number; // if given, don't show anything until after delay milliseconds. The component could easily unmount by then, and hence never annoyingly flicker on screen.
}
const LOADING_THEMES: { [keys: string]: React.CSSProperties } = {
medium: {
fontSize: "24pt",
textAlign: "center",
marginTop: "15px",
color: "#888",
background: "white",
},
};
export class Loading extends React.Component<Props> {
static defaultProps = { text: "Loading..." };
ren | {
if (this.props.estimate != undefined) {
return (
<div>
Loading '{this.props.estimate.get("type")}' file.
<br />
Estimated time: {this.props.estimate.get("time")}s
</div>
);
}
}
render() {
let style: React.CSSProperties | undefined = undefined;
if (this.props.style != undefined) {
style = this.props.style;
} else if (this.props.theme != undefined) {
style = LOADING_THEMES[this.props.theme];
}
return (
<div style={style}>
<span>
<Icon name="cocalc-ring" spin /> {this.props.text}
</span>
{this.render_estimate()}
</div>
);
}
}
| der_estimate() | identifier_name |
loading.tsx | /*
* This file is part of CoCalc: Copyright © 2020 Sagemath, Inc.
* License: AGPLv3 s.t. "Commons Clause" – see LICENSE.md for details
*/
import React from "react";
import { Icon } from "./icon";
import { TypedMap } from "../app-framework";
export type Estimate = TypedMap<{
time: number; // Time in seconds
type: "new" | "ready" | "archived";
}>;
export const Estimate = null; // webpack + TS es2020 modules need this
interface Props {
style?: React.CSSProperties;
text?: string;
estimate?: Estimate;
theme?: "medium" | undefined;
delay?: number; // if given, don't show anything until after delay milliseconds. The component could easily unmount by then, and hence never annoyingly flicker on screen.
}
const LOADING_THEMES: { [keys: string]: React.CSSProperties } = {
medium: {
fontSize: "24pt",
textAlign: "center",
marginTop: "15px",
color: "#888",
background: "white",
},
};
export class Loading extends React.Component<Props> {
static defaultProps = { text: "Loading..." };
render_estimate() {
if (this.props.estimate != undefined) {
return (
<div> | Loading '{this.props.estimate.get("type")}' file.
<br />
Estimated time: {this.props.estimate.get("time")}s
</div>
);
}
}
render() {
let style: React.CSSProperties | undefined = undefined;
if (this.props.style != undefined) {
style = this.props.style;
} else if (this.props.theme != undefined) {
style = LOADING_THEMES[this.props.theme];
}
return (
<div style={style}>
<span>
<Icon name="cocalc-ring" spin /> {this.props.text}
</span>
{this.render_estimate()}
</div>
);
}
} | random_line_split |
|
loading.tsx | /*
* This file is part of CoCalc: Copyright © 2020 Sagemath, Inc.
* License: AGPLv3 s.t. "Commons Clause" – see LICENSE.md for details
*/
import React from "react";
import { Icon } from "./icon";
import { TypedMap } from "../app-framework";
export type Estimate = TypedMap<{
time: number; // Time in seconds
type: "new" | "ready" | "archived";
}>;
export const Estimate = null; // webpack + TS es2020 modules need this
interface Props {
style?: React.CSSProperties;
text?: string;
estimate?: Estimate;
theme?: "medium" | undefined;
delay?: number; // if given, don't show anything until after delay milliseconds. The component could easily unmount by then, and hence never annoyingly flicker on screen.
}
const LOADING_THEMES: { [keys: string]: React.CSSProperties } = {
medium: {
fontSize: "24pt",
textAlign: "center",
marginTop: "15px",
color: "#888",
background: "white",
},
};
export class Loading extends React.Component<Props> {
static defaultProps = { text: "Loading..." };
render_estimate() {
| render() {
let style: React.CSSProperties | undefined = undefined;
if (this.props.style != undefined) {
style = this.props.style;
} else if (this.props.theme != undefined) {
style = LOADING_THEMES[this.props.theme];
}
return (
<div style={style}>
<span>
<Icon name="cocalc-ring" spin /> {this.props.text}
</span>
{this.render_estimate()}
</div>
);
}
}
| if (this.props.estimate != undefined) {
return (
<div>
Loading '{this.props.estimate.get("type")}' file.
<br />
Estimated time: {this.props.estimate.get("time")}s
</div>
);
}
}
| identifier_body |
index.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* | */
export * from './datasource-view/datasource-view';
export * from './home-view/home-view';
export * from './ingestion-view/ingestion-view';
export * from './load-data-view/load-data-view';
export * from './lookups-view/lookups-view';
export * from './query-view/query-view';
export * from './segments-view/segments-view';
export * from './services-view/services-view'; | * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. | random_line_split |
gallery.component.spec.ts | /* tslint:disable:no-unused-variable */
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { By } from '@angular/platform-browser';
import { DebugElement } from '@angular/core';
import { StoreModule } from '@ngrx/store';
import { RouterTestingModule } from '@angular/router/testing';
import { SharedModule } from './../../shared/shared.module';
import { StoreService } from './../../core/store.service';
import { ScoreService } from './../../core/score.service';
import { rootReducer } from '../../state/root-reducer';
import { AdminGalleryComponent } from './gallery.component';
import { TimerComponent, NextComponent } from './../components';
describe('AdminGalleryComponent', () => {
let component: AdminGalleryComponent;
let fixture: ComponentFixture<AdminGalleryComponent>; | beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ AdminGalleryComponent, TimerComponent, NextComponent ],
imports: [
RouterTestingModule,
StoreModule.provideStore(rootReducer),
SharedModule
],
providers: [ScoreService, StoreService]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(AdminGalleryComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
}); | random_line_split |
|
test.py | # Copyright 2015, Oliver Nagy <[email protected]>
#
# This file is part of Azrael (https://github.com/olitheolix/azrael)
#
# Azrael is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Azrael is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Azrael. If not, see <http://www.gnu.org/licenses/>.
"""
This module does not contain any tests but utility functions often used in
other tests.
"""
import os
import base64
import subprocess
import numpy as np
import azrael.leonard
from azrael.types import FragMeta, FragDae, FragRaw, FragNone, Template
from azrael.types import CollShapeMeta, CollShapeEmpty, CollShapeSphere
from azrael.types import CollShapeBox, CollShapePlane, RigidBodyData
from azrael.types import Constraint6DofSpring2, ConstraintP2P, ConstraintMeta
def killAzrael():
subprocess.call(['pkill', 'Azreal:'])
# Delete all grids used in this test.
assert azrael.vectorgrid.deleteAllGrids().ok
azrael.database.init()
def getLeonard(LeonardCls=azrael.leonard.LeonardBase):
"""
Return a ``LeonardCls`` instance.
This is a convenience function to reduce code duplication in tests.
:param cls LeonardCls: Leonard class to instantiate.
"""
# Return a Leonard instance.
leo = LeonardCls()
leo.setup()
return leo
def getCSEmpty(pos=[0, 0, 0], rot=[0, 0, 0, 1]):
"""
Convenience function to construct an Empty shape.
"""
return CollShapeMeta('empty', pos, rot, CollShapeEmpty())
def getCSBox(pos=[0, 0, 0], rot=[0, 0, 0, 1], dim=[1, 1, 1]):
"""
Convenience function to construct a Box shape.
"""
return CollShapeMeta('box', pos, rot, CollShapeBox(*dim))
def getCSSphere(pos=[0, 0, 0], rot=[0, 0, 0, 1], radius=1):
"""
Convenience function to construct a Sphere shape.
"""
return CollShapeMeta('sphere', pos, rot, CollShapeSphere(radius))
def getCSPlane(pos=[0, 0, 0], rot=[0, 0, 0, 1], normal=[0, 0, 1], ofs=0):
"""
Convenience function to construct a Plane in the x/y dimension.
"""
return CollShapeMeta('plane', pos, rot, CollShapePlane(normal, ofs))
def getFragNone(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct an empty geometry element.
"""
return FragMeta(fragtype='_del_', scale=scale, position=pos,
rotation=rot, fragdata=FragNone())
def getFragRaw(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct a valid Raw geometry.
"""
vert = np.random.randint(0, 100, 9).tolist()
uv = np.random.randint(0, 100, 6).tolist()
rgb = np.random.randint(0, 100, 3).tolist()
geo = FragRaw(vert, uv, rgb)
return FragMeta(fragtype='RAW', scale=scale, position=pos,
rotation=rot, fragdata=geo)
def getFragDae(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct a valid Collada geometry.
"""
b = os.path.dirname(__file__)
dae_file = open(b + '/cube.dae', 'rb').read()
dae_rgb1 = open(b + '/rgb1.png', 'rb').read()
dae_rgb2 = open(b + '/rgb2.jpg', 'rb').read()
dae_file = base64.b64encode(dae_file).decode('utf8')
dae_rgb1 = base64.b64encode(dae_rgb1).decode('utf8')
dae_rgb2 = base64.b64encode(dae_rgb2).decode('utf8')
geo = FragDae(dae=dae_file,
rgb={'rgb1.png': dae_rgb1,
'rgb2.jpg': dae_rgb2})
return FragMeta(fragtype='DAE', scale=scale, position=pos,
rotation=rot, fragdata=geo)
def getP2P(aid='constraint_p2p', rb_a=1, rb_b=2,
pivot_a=(0, 0, -1), pivot_b=(0, 0, 1)):
"""
Return a Point2Point constraint for bodies ``rb_a`` and ``rb_b`.
"""
p2p = ConstraintP2P(pivot_a, pivot_b)
return ConstraintMeta(aid, 'p2p', rb_a, rb_b, p2p)
def get6DofSpring2(aid='constraint_6dofspring2', rb_a=1, rb_b=2):
"""
Return a 6DofSpring2 constraint for bodies ``rb_a`` and ``rb_b`.
"""
dof = Constraint6DofSpring2(
frameInA=(0, 0, 0, 0, 0, 0, 1),
frameInB=(0, 0, 0, 0, 0, 0, 1),
stiffness=(1, 2, 3, 4, 5.5, 6),
damping=(2, 3.5, 4, 5, 6.5, 7),
equilibrium=(-1, -1, -1, 0, 0, 0),
linLimitLo=(-10.5, -10.5, -10.5),
linLimitHi=(10.5, 10.5, 10.5),
rotLimitLo=(-0.1, -0.2, -0.3),
rotLimitHi=(0.1, 0.2, 0.3),
bounce=(1, 1.5, 2),
enableSpring=(True, False, False, False, False, False))
return ConstraintMeta(aid, '6DOFSPRING2', rb_a, rb_b, dof)
def getRigidBody(scale: (int, float)=1,
imass: (int, float)=1,
restitution: (int, float)=0.9,
rotation: (tuple, list)=(0, 0, 0, 1),
position: (tuple, list, np.ndarray)=(0, 0, 0),
velocityLin: (tuple, list, np.ndarray)=(0, 0, 0),
velocityRot: (tuple, list, np.ndarray)=(0, 0, 0),
cshapes: dict={'cssphere': getCSSphere()},
axesLockLin: (tuple, list, np.ndarray)=(1, 1, 1),
axesLockRot: (tuple, list, np.ndarray)=(1, 1, 1),
version: int=0):
return RigidBodyData(scale, imass, restitution, rotation, position,
velocityLin, velocityRot, cshapes, axesLockLin,
axesLockRot, version)
def getTemplate(name='template',
rbs=None,
fragments={},
boosters={},
factories={}):
if rbs is None:
|
return Template(name, rbs, fragments, boosters, factories)
| rbs = getRigidBody(cshapes={'cssphere': getCSSphere()}) | conditional_block |
test.py | # Copyright 2015, Oliver Nagy <[email protected]>
#
# This file is part of Azrael (https://github.com/olitheolix/azrael)
#
# Azrael is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Azrael is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Azrael. If not, see <http://www.gnu.org/licenses/>.
"""
This module does not contain any tests but utility functions often used in
other tests.
"""
import os
import base64
import subprocess
import numpy as np
import azrael.leonard
from azrael.types import FragMeta, FragDae, FragRaw, FragNone, Template
from azrael.types import CollShapeMeta, CollShapeEmpty, CollShapeSphere
from azrael.types import CollShapeBox, CollShapePlane, RigidBodyData
from azrael.types import Constraint6DofSpring2, ConstraintP2P, ConstraintMeta
def killAzrael():
subprocess.call(['pkill', 'Azreal:'])
# Delete all grids used in this test.
assert azrael.vectorgrid.deleteAllGrids().ok
azrael.database.init()
def getLeonard(LeonardCls=azrael.leonard.LeonardBase):
"""
Return a ``LeonardCls`` instance.
This is a convenience function to reduce code duplication in tests.
:param cls LeonardCls: Leonard class to instantiate.
"""
# Return a Leonard instance.
leo = LeonardCls()
leo.setup()
return leo
def getCSEmpty(pos=[0, 0, 0], rot=[0, 0, 0, 1]):
"""
Convenience function to construct an Empty shape.
"""
return CollShapeMeta('empty', pos, rot, CollShapeEmpty())
def getCSBox(pos=[0, 0, 0], rot=[0, 0, 0, 1], dim=[1, 1, 1]):
|
def getCSSphere(pos=[0, 0, 0], rot=[0, 0, 0, 1], radius=1):
"""
Convenience function to construct a Sphere shape.
"""
return CollShapeMeta('sphere', pos, rot, CollShapeSphere(radius))
def getCSPlane(pos=[0, 0, 0], rot=[0, 0, 0, 1], normal=[0, 0, 1], ofs=0):
"""
Convenience function to construct a Plane in the x/y dimension.
"""
return CollShapeMeta('plane', pos, rot, CollShapePlane(normal, ofs))
def getFragNone(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct an empty geometry element.
"""
return FragMeta(fragtype='_del_', scale=scale, position=pos,
rotation=rot, fragdata=FragNone())
def getFragRaw(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct a valid Raw geometry.
"""
vert = np.random.randint(0, 100, 9).tolist()
uv = np.random.randint(0, 100, 6).tolist()
rgb = np.random.randint(0, 100, 3).tolist()
geo = FragRaw(vert, uv, rgb)
return FragMeta(fragtype='RAW', scale=scale, position=pos,
rotation=rot, fragdata=geo)
def getFragDae(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct a valid Collada geometry.
"""
b = os.path.dirname(__file__)
dae_file = open(b + '/cube.dae', 'rb').read()
dae_rgb1 = open(b + '/rgb1.png', 'rb').read()
dae_rgb2 = open(b + '/rgb2.jpg', 'rb').read()
dae_file = base64.b64encode(dae_file).decode('utf8')
dae_rgb1 = base64.b64encode(dae_rgb1).decode('utf8')
dae_rgb2 = base64.b64encode(dae_rgb2).decode('utf8')
geo = FragDae(dae=dae_file,
rgb={'rgb1.png': dae_rgb1,
'rgb2.jpg': dae_rgb2})
return FragMeta(fragtype='DAE', scale=scale, position=pos,
rotation=rot, fragdata=geo)
def getP2P(aid='constraint_p2p', rb_a=1, rb_b=2,
pivot_a=(0, 0, -1), pivot_b=(0, 0, 1)):
"""
Return a Point2Point constraint for bodies ``rb_a`` and ``rb_b`.
"""
p2p = ConstraintP2P(pivot_a, pivot_b)
return ConstraintMeta(aid, 'p2p', rb_a, rb_b, p2p)
def get6DofSpring2(aid='constraint_6dofspring2', rb_a=1, rb_b=2):
"""
Return a 6DofSpring2 constraint for bodies ``rb_a`` and ``rb_b`.
"""
dof = Constraint6DofSpring2(
frameInA=(0, 0, 0, 0, 0, 0, 1),
frameInB=(0, 0, 0, 0, 0, 0, 1),
stiffness=(1, 2, 3, 4, 5.5, 6),
damping=(2, 3.5, 4, 5, 6.5, 7),
equilibrium=(-1, -1, -1, 0, 0, 0),
linLimitLo=(-10.5, -10.5, -10.5),
linLimitHi=(10.5, 10.5, 10.5),
rotLimitLo=(-0.1, -0.2, -0.3),
rotLimitHi=(0.1, 0.2, 0.3),
bounce=(1, 1.5, 2),
enableSpring=(True, False, False, False, False, False))
return ConstraintMeta(aid, '6DOFSPRING2', rb_a, rb_b, dof)
def getRigidBody(scale: (int, float)=1,
imass: (int, float)=1,
restitution: (int, float)=0.9,
rotation: (tuple, list)=(0, 0, 0, 1),
position: (tuple, list, np.ndarray)=(0, 0, 0),
velocityLin: (tuple, list, np.ndarray)=(0, 0, 0),
velocityRot: (tuple, list, np.ndarray)=(0, 0, 0),
cshapes: dict={'cssphere': getCSSphere()},
axesLockLin: (tuple, list, np.ndarray)=(1, 1, 1),
axesLockRot: (tuple, list, np.ndarray)=(1, 1, 1),
version: int=0):
return RigidBodyData(scale, imass, restitution, rotation, position,
velocityLin, velocityRot, cshapes, axesLockLin,
axesLockRot, version)
def getTemplate(name='template',
rbs=None,
fragments={},
boosters={},
factories={}):
if rbs is None:
rbs = getRigidBody(cshapes={'cssphere': getCSSphere()})
return Template(name, rbs, fragments, boosters, factories)
| """
Convenience function to construct a Box shape.
"""
return CollShapeMeta('box', pos, rot, CollShapeBox(*dim)) | identifier_body |
test.py | # Copyright 2015, Oliver Nagy <[email protected]>
#
# This file is part of Azrael (https://github.com/olitheolix/azrael)
#
# Azrael is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the | # Azrael is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Azrael. If not, see <http://www.gnu.org/licenses/>.
"""
This module does not contain any tests but utility functions often used in
other tests.
"""
import os
import base64
import subprocess
import numpy as np
import azrael.leonard
from azrael.types import FragMeta, FragDae, FragRaw, FragNone, Template
from azrael.types import CollShapeMeta, CollShapeEmpty, CollShapeSphere
from azrael.types import CollShapeBox, CollShapePlane, RigidBodyData
from azrael.types import Constraint6DofSpring2, ConstraintP2P, ConstraintMeta
def killAzrael():
subprocess.call(['pkill', 'Azreal:'])
# Delete all grids used in this test.
assert azrael.vectorgrid.deleteAllGrids().ok
azrael.database.init()
def getLeonard(LeonardCls=azrael.leonard.LeonardBase):
"""
Return a ``LeonardCls`` instance.
This is a convenience function to reduce code duplication in tests.
:param cls LeonardCls: Leonard class to instantiate.
"""
# Return a Leonard instance.
leo = LeonardCls()
leo.setup()
return leo
def getCSEmpty(pos=[0, 0, 0], rot=[0, 0, 0, 1]):
"""
Convenience function to construct an Empty shape.
"""
return CollShapeMeta('empty', pos, rot, CollShapeEmpty())
def getCSBox(pos=[0, 0, 0], rot=[0, 0, 0, 1], dim=[1, 1, 1]):
"""
Convenience function to construct a Box shape.
"""
return CollShapeMeta('box', pos, rot, CollShapeBox(*dim))
def getCSSphere(pos=[0, 0, 0], rot=[0, 0, 0, 1], radius=1):
"""
Convenience function to construct a Sphere shape.
"""
return CollShapeMeta('sphere', pos, rot, CollShapeSphere(radius))
def getCSPlane(pos=[0, 0, 0], rot=[0, 0, 0, 1], normal=[0, 0, 1], ofs=0):
"""
Convenience function to construct a Plane in the x/y dimension.
"""
return CollShapeMeta('plane', pos, rot, CollShapePlane(normal, ofs))
def getFragNone(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct an empty geometry element.
"""
return FragMeta(fragtype='_del_', scale=scale, position=pos,
rotation=rot, fragdata=FragNone())
def getFragRaw(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct a valid Raw geometry.
"""
vert = np.random.randint(0, 100, 9).tolist()
uv = np.random.randint(0, 100, 6).tolist()
rgb = np.random.randint(0, 100, 3).tolist()
geo = FragRaw(vert, uv, rgb)
return FragMeta(fragtype='RAW', scale=scale, position=pos,
rotation=rot, fragdata=geo)
def getFragDae(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct a valid Collada geometry.
"""
b = os.path.dirname(__file__)
dae_file = open(b + '/cube.dae', 'rb').read()
dae_rgb1 = open(b + '/rgb1.png', 'rb').read()
dae_rgb2 = open(b + '/rgb2.jpg', 'rb').read()
dae_file = base64.b64encode(dae_file).decode('utf8')
dae_rgb1 = base64.b64encode(dae_rgb1).decode('utf8')
dae_rgb2 = base64.b64encode(dae_rgb2).decode('utf8')
geo = FragDae(dae=dae_file,
rgb={'rgb1.png': dae_rgb1,
'rgb2.jpg': dae_rgb2})
return FragMeta(fragtype='DAE', scale=scale, position=pos,
rotation=rot, fragdata=geo)
def getP2P(aid='constraint_p2p', rb_a=1, rb_b=2,
pivot_a=(0, 0, -1), pivot_b=(0, 0, 1)):
"""
Return a Point2Point constraint for bodies ``rb_a`` and ``rb_b`.
"""
p2p = ConstraintP2P(pivot_a, pivot_b)
return ConstraintMeta(aid, 'p2p', rb_a, rb_b, p2p)
def get6DofSpring2(aid='constraint_6dofspring2', rb_a=1, rb_b=2):
"""
Return a 6DofSpring2 constraint for bodies ``rb_a`` and ``rb_b`.
"""
dof = Constraint6DofSpring2(
frameInA=(0, 0, 0, 0, 0, 0, 1),
frameInB=(0, 0, 0, 0, 0, 0, 1),
stiffness=(1, 2, 3, 4, 5.5, 6),
damping=(2, 3.5, 4, 5, 6.5, 7),
equilibrium=(-1, -1, -1, 0, 0, 0),
linLimitLo=(-10.5, -10.5, -10.5),
linLimitHi=(10.5, 10.5, 10.5),
rotLimitLo=(-0.1, -0.2, -0.3),
rotLimitHi=(0.1, 0.2, 0.3),
bounce=(1, 1.5, 2),
enableSpring=(True, False, False, False, False, False))
return ConstraintMeta(aid, '6DOFSPRING2', rb_a, rb_b, dof)
def getRigidBody(scale: (int, float)=1,
imass: (int, float)=1,
restitution: (int, float)=0.9,
rotation: (tuple, list)=(0, 0, 0, 1),
position: (tuple, list, np.ndarray)=(0, 0, 0),
velocityLin: (tuple, list, np.ndarray)=(0, 0, 0),
velocityRot: (tuple, list, np.ndarray)=(0, 0, 0),
cshapes: dict={'cssphere': getCSSphere()},
axesLockLin: (tuple, list, np.ndarray)=(1, 1, 1),
axesLockRot: (tuple, list, np.ndarray)=(1, 1, 1),
version: int=0):
return RigidBodyData(scale, imass, restitution, rotation, position,
velocityLin, velocityRot, cshapes, axesLockLin,
axesLockRot, version)
def getTemplate(name='template',
rbs=None,
fragments={},
boosters={},
factories={}):
if rbs is None:
rbs = getRigidBody(cshapes={'cssphere': getCSSphere()})
return Template(name, rbs, fragments, boosters, factories) | # License, or (at your option) any later version.
# | random_line_split |
test.py | # Copyright 2015, Oliver Nagy <[email protected]>
#
# This file is part of Azrael (https://github.com/olitheolix/azrael)
#
# Azrael is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Azrael is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Azrael. If not, see <http://www.gnu.org/licenses/>.
"""
This module does not contain any tests but utility functions often used in
other tests.
"""
import os
import base64
import subprocess
import numpy as np
import azrael.leonard
from azrael.types import FragMeta, FragDae, FragRaw, FragNone, Template
from azrael.types import CollShapeMeta, CollShapeEmpty, CollShapeSphere
from azrael.types import CollShapeBox, CollShapePlane, RigidBodyData
from azrael.types import Constraint6DofSpring2, ConstraintP2P, ConstraintMeta
def killAzrael():
subprocess.call(['pkill', 'Azreal:'])
# Delete all grids used in this test.
assert azrael.vectorgrid.deleteAllGrids().ok
azrael.database.init()
def getLeonard(LeonardCls=azrael.leonard.LeonardBase):
"""
Return a ``LeonardCls`` instance.
This is a convenience function to reduce code duplication in tests.
:param cls LeonardCls: Leonard class to instantiate.
"""
# Return a Leonard instance.
leo = LeonardCls()
leo.setup()
return leo
def getCSEmpty(pos=[0, 0, 0], rot=[0, 0, 0, 1]):
"""
Convenience function to construct an Empty shape.
"""
return CollShapeMeta('empty', pos, rot, CollShapeEmpty())
def getCSBox(pos=[0, 0, 0], rot=[0, 0, 0, 1], dim=[1, 1, 1]):
"""
Convenience function to construct a Box shape.
"""
return CollShapeMeta('box', pos, rot, CollShapeBox(*dim))
def getCSSphere(pos=[0, 0, 0], rot=[0, 0, 0, 1], radius=1):
"""
Convenience function to construct a Sphere shape.
"""
return CollShapeMeta('sphere', pos, rot, CollShapeSphere(radius))
def getCSPlane(pos=[0, 0, 0], rot=[0, 0, 0, 1], normal=[0, 0, 1], ofs=0):
"""
Convenience function to construct a Plane in the x/y dimension.
"""
return CollShapeMeta('plane', pos, rot, CollShapePlane(normal, ofs))
def getFragNone(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct an empty geometry element.
"""
return FragMeta(fragtype='_del_', scale=scale, position=pos,
rotation=rot, fragdata=FragNone())
def getFragRaw(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct a valid Raw geometry.
"""
vert = np.random.randint(0, 100, 9).tolist()
uv = np.random.randint(0, 100, 6).tolist()
rgb = np.random.randint(0, 100, 3).tolist()
geo = FragRaw(vert, uv, rgb)
return FragMeta(fragtype='RAW', scale=scale, position=pos,
rotation=rot, fragdata=geo)
def getFragDae(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct a valid Collada geometry.
"""
b = os.path.dirname(__file__)
dae_file = open(b + '/cube.dae', 'rb').read()
dae_rgb1 = open(b + '/rgb1.png', 'rb').read()
dae_rgb2 = open(b + '/rgb2.jpg', 'rb').read()
dae_file = base64.b64encode(dae_file).decode('utf8')
dae_rgb1 = base64.b64encode(dae_rgb1).decode('utf8')
dae_rgb2 = base64.b64encode(dae_rgb2).decode('utf8')
geo = FragDae(dae=dae_file,
rgb={'rgb1.png': dae_rgb1,
'rgb2.jpg': dae_rgb2})
return FragMeta(fragtype='DAE', scale=scale, position=pos,
rotation=rot, fragdata=geo)
def getP2P(aid='constraint_p2p', rb_a=1, rb_b=2,
pivot_a=(0, 0, -1), pivot_b=(0, 0, 1)):
"""
Return a Point2Point constraint for bodies ``rb_a`` and ``rb_b`.
"""
p2p = ConstraintP2P(pivot_a, pivot_b)
return ConstraintMeta(aid, 'p2p', rb_a, rb_b, p2p)
def get6DofSpring2(aid='constraint_6dofspring2', rb_a=1, rb_b=2):
"""
Return a 6DofSpring2 constraint for bodies ``rb_a`` and ``rb_b`.
"""
dof = Constraint6DofSpring2(
frameInA=(0, 0, 0, 0, 0, 0, 1),
frameInB=(0, 0, 0, 0, 0, 0, 1),
stiffness=(1, 2, 3, 4, 5.5, 6),
damping=(2, 3.5, 4, 5, 6.5, 7),
equilibrium=(-1, -1, -1, 0, 0, 0),
linLimitLo=(-10.5, -10.5, -10.5),
linLimitHi=(10.5, 10.5, 10.5),
rotLimitLo=(-0.1, -0.2, -0.3),
rotLimitHi=(0.1, 0.2, 0.3),
bounce=(1, 1.5, 2),
enableSpring=(True, False, False, False, False, False))
return ConstraintMeta(aid, '6DOFSPRING2', rb_a, rb_b, dof)
def | (scale: (int, float)=1,
imass: (int, float)=1,
restitution: (int, float)=0.9,
rotation: (tuple, list)=(0, 0, 0, 1),
position: (tuple, list, np.ndarray)=(0, 0, 0),
velocityLin: (tuple, list, np.ndarray)=(0, 0, 0),
velocityRot: (tuple, list, np.ndarray)=(0, 0, 0),
cshapes: dict={'cssphere': getCSSphere()},
axesLockLin: (tuple, list, np.ndarray)=(1, 1, 1),
axesLockRot: (tuple, list, np.ndarray)=(1, 1, 1),
version: int=0):
return RigidBodyData(scale, imass, restitution, rotation, position,
velocityLin, velocityRot, cshapes, axesLockLin,
axesLockRot, version)
def getTemplate(name='template',
rbs=None,
fragments={},
boosters={},
factories={}):
if rbs is None:
rbs = getRigidBody(cshapes={'cssphere': getCSSphere()})
return Template(name, rbs, fragments, boosters, factories)
| getRigidBody | identifier_name |
classorg_1_1onosproject_1_1provider_1_1lldp_1_1impl_1_1SuppressionConfigTest.js | var classorg_1_1onosproject_1_1provider_1_1lldp_1_1impl_1_1SuppressionConfigTest =
[
[ "setUp", "classorg_1_1onosproject_1_1provider_1_1lldp_1_1impl_1_1SuppressionConfigTest.html#a2dce61fc6869e60bcb0f5be7616b985d", null ],
[ "testDeviceAnnotation", "classorg_1_1onosproject_1_1provider_1_1lldp_1_1impl_1_1SuppressionConfigTest.html#aea5c73ac191d40a0a4986ed2bdb6a751", null ], | [ "testDeviceTypes", "classorg_1_1onosproject_1_1provider_1_1lldp_1_1impl_1_1SuppressionConfigTest.html#add66c76a6bf03715757f058c32cf5e34", null ]
]; | random_line_split |
|
test_pdb_chain.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 João Pedro Rodrigues
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit Tests for `pdb_chain`.
"""
import os
import sys
import unittest
from config import data_dir
from utils import OutputCapture
class TestTool(unittest.TestCase):
"""
Generic class for testing tools.
"""
def setUp(self):
# Dynamically import the module
name = 'pdbtools.pdb_chain'
self.module = __import__(name, fromlist=[''])
def exec_module(self):
"""
Execs module.
"""
with OutputCapture() as output:
try:
self.module.main()
except SystemExit as e:
self.retcode = e.code
self.stdout = output.stdout
self.stderr = output.stderr
return
def test_default(self):
"""$ pdb_chain data/dummy.pdb"""
# Simulate input
sys.argv = ['', os.path.join(data_dir, 'dummy.pdb')]
# Execute the script
self.exec_module()
# Validate results
self.assertEqual(self.retcode, 0) # ensure the program exited OK.
self.assertEqual(len(self.stdout), 204) # no lines deleted
self.assertEqual(len(self.stderr), 0) # no errors
records = (('ATOM', 'HETATM'))
chain_ids = [l[21] for l in self.stdout if l.startswith(records)]
unique_chain_ids = list(set(chain_ids))
self.assertEqual(unique_chain_ids, [' '])
def test_two_options(self):
"""$ pdb_chain -X data/dummy.pdb"""
sys.argv = ['', '-X', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 0)
self.assertEqual(len(self.stdout), 204)
self.assertEqual(len(self.stderr), 0)
records = (('ATOM', 'HETATM'))
chain_ids = [l[21] for l in self.stdout if l.startswith(records)]
unique_chain_ids = list(set(chain_ids))
self.assertEqual(unique_chain_ids, ['X'])
def test_file_not_found(self):
"""$ pdb_chain -A not_existing.pdb"""
afile = os.path.join(data_dir, 'not_existing.pdb')
sys.argv = ['', '-A', afile]
self.exec_module()
self.assertEqual(self.retcode, 1) # exit code is 1 (error)
self.assertEqual(len(self.stdout), 0) # nothing written to stdout
self.assertEqual(self.stderr[0][:22],
"ERROR!! File not found") # proper error message
def test_file_missing(self):
"""$ pdb_chain -A"""
sys.argv = ['', '-A']
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr[0],
"ERROR!! No data to process!")
def test_helptext(self):
"""$ pdb_chain"""
sys.argv = ['']
self.exec_module()
self.assertEqual(self.retcode, 1) # ensure the program exited gracefully.
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr, self.module.__doc__.split("\n")[:-1])
def test_invalid_option(self):
"""$ pdb_chain -AH data/dummy.pdb"""
sys.argv = ['', '-AH', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0][:47],
"ERROR!! Chain identifiers must be a single char")
def test_not_an_option(self):
"""$ pdb_chain A data/dummy.pdb"""
sys.argv = ['', 'A', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0],
"ERROR! First argument is not an option: 'A'")
if __name__ == '__main__':
f | rom config import test_dir
mpath = os.path.abspath(os.path.join(test_dir, '..'))
sys.path.insert(0, mpath) # so we load dev files before any installation
unittest.main()
| conditional_block |
|
test_pdb_chain.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 João Pedro Rodrigues
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit Tests for `pdb_chain`.
"""
import os
import sys
import unittest
from config import data_dir
from utils import OutputCapture
class TestTool(unittest.TestCase):
"""
Generic class for testing tools.
"""
def setUp(self):
# Dynamically import the module
name = 'pdbtools.pdb_chain'
self.module = __import__(name, fromlist=[''])
def exec_module(self):
"""
Execs module.
"""
with OutputCapture() as output:
try:
self.module.main()
except SystemExit as e:
self.retcode = e.code
self.stdout = output.stdout
self.stderr = output.stderr
return
def test_default(self):
"""$ pdb_chain data/dummy.pdb"""
# Simulate input
sys.argv = ['', os.path.join(data_dir, 'dummy.pdb')]
# Execute the script
self.exec_module()
# Validate results
self.assertEqual(self.retcode, 0) # ensure the program exited OK.
self.assertEqual(len(self.stdout), 204) # no lines deleted
self.assertEqual(len(self.stderr), 0) # no errors
records = (('ATOM', 'HETATM'))
chain_ids = [l[21] for l in self.stdout if l.startswith(records)]
unique_chain_ids = list(set(chain_ids))
self.assertEqual(unique_chain_ids, [' '])
def test_two_options(self):
"""$ pdb_chain -X data/dummy.pdb"""
sys.argv = ['', '-X', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 0)
self.assertEqual(len(self.stdout), 204)
self.assertEqual(len(self.stderr), 0)
records = (('ATOM', 'HETATM'))
chain_ids = [l[21] for l in self.stdout if l.startswith(records)]
unique_chain_ids = list(set(chain_ids))
self.assertEqual(unique_chain_ids, ['X'])
def test_file_not_found(self):
"""$ pdb_chain -A not_existing.pdb"""
afile = os.path.join(data_dir, 'not_existing.pdb')
sys.argv = ['', '-A', afile]
self.exec_module()
self.assertEqual(self.retcode, 1) # exit code is 1 (error)
self.assertEqual(len(self.stdout), 0) # nothing written to stdout
self.assertEqual(self.stderr[0][:22],
"ERROR!! File not found") # proper error message
def test_file_missing(self):
"""$ pdb_chain -A"""
sys.argv = ['', '-A']
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr[0],
"ERROR!! No data to process!")
def test_helptext(self):
"""$ pdb_chain"""
sys.argv = [''] | self.exec_module()
self.assertEqual(self.retcode, 1) # ensure the program exited gracefully.
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr, self.module.__doc__.split("\n")[:-1])
def test_invalid_option(self):
"""$ pdb_chain -AH data/dummy.pdb"""
sys.argv = ['', '-AH', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0][:47],
"ERROR!! Chain identifiers must be a single char")
def test_not_an_option(self):
"""$ pdb_chain A data/dummy.pdb"""
sys.argv = ['', 'A', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0],
"ERROR! First argument is not an option: 'A'")
if __name__ == '__main__':
from config import test_dir
mpath = os.path.abspath(os.path.join(test_dir, '..'))
sys.path.insert(0, mpath) # so we load dev files before any installation
unittest.main() | random_line_split |
|
test_pdb_chain.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 João Pedro Rodrigues
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit Tests for `pdb_chain`.
"""
import os
import sys
import unittest
from config import data_dir
from utils import OutputCapture
class TestTool(unittest.TestCase):
" | self.stdout = output.stdout
self.stderr = output.stderr
return
def test_default(self):
"""$ pdb_chain data/dummy.pdb"""
# Simulate input
sys.argv = ['', os.path.join(data_dir, 'dummy.pdb')]
# Execute the script
self.exec_module()
# Validate results
self.assertEqual(self.retcode, 0) # ensure the program exited OK.
self.assertEqual(len(self.stdout), 204) # no lines deleted
self.assertEqual(len(self.stderr), 0) # no errors
records = (('ATOM', 'HETATM'))
chain_ids = [l[21] for l in self.stdout if l.startswith(records)]
unique_chain_ids = list(set(chain_ids))
self.assertEqual(unique_chain_ids, [' '])
def test_two_options(self):
"""$ pdb_chain -X data/dummy.pdb"""
sys.argv = ['', '-X', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 0)
self.assertEqual(len(self.stdout), 204)
self.assertEqual(len(self.stderr), 0)
records = (('ATOM', 'HETATM'))
chain_ids = [l[21] for l in self.stdout if l.startswith(records)]
unique_chain_ids = list(set(chain_ids))
self.assertEqual(unique_chain_ids, ['X'])
def test_file_not_found(self):
"""$ pdb_chain -A not_existing.pdb"""
afile = os.path.join(data_dir, 'not_existing.pdb')
sys.argv = ['', '-A', afile]
self.exec_module()
self.assertEqual(self.retcode, 1) # exit code is 1 (error)
self.assertEqual(len(self.stdout), 0) # nothing written to stdout
self.assertEqual(self.stderr[0][:22],
"ERROR!! File not found") # proper error message
def test_file_missing(self):
"""$ pdb_chain -A"""
sys.argv = ['', '-A']
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr[0],
"ERROR!! No data to process!")
def test_helptext(self):
"""$ pdb_chain"""
sys.argv = ['']
self.exec_module()
self.assertEqual(self.retcode, 1) # ensure the program exited gracefully.
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr, self.module.__doc__.split("\n")[:-1])
def test_invalid_option(self):
"""$ pdb_chain -AH data/dummy.pdb"""
sys.argv = ['', '-AH', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0][:47],
"ERROR!! Chain identifiers must be a single char")
def test_not_an_option(self):
"""$ pdb_chain A data/dummy.pdb"""
sys.argv = ['', 'A', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0],
"ERROR! First argument is not an option: 'A'")
if __name__ == '__main__':
from config import test_dir
mpath = os.path.abspath(os.path.join(test_dir, '..'))
sys.path.insert(0, mpath) # so we load dev files before any installation
unittest.main()
| ""
Generic class for testing tools.
"""
def setUp(self):
# Dynamically import the module
name = 'pdbtools.pdb_chain'
self.module = __import__(name, fromlist=[''])
def exec_module(self):
"""
Execs module.
"""
with OutputCapture() as output:
try:
self.module.main()
except SystemExit as e:
self.retcode = e.code
| identifier_body |
test_pdb_chain.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 João Pedro Rodrigues
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit Tests for `pdb_chain`.
"""
import os
import sys
import unittest
from config import data_dir
from utils import OutputCapture
class T | unittest.TestCase):
"""
Generic class for testing tools.
"""
def setUp(self):
# Dynamically import the module
name = 'pdbtools.pdb_chain'
self.module = __import__(name, fromlist=[''])
def exec_module(self):
"""
Execs module.
"""
with OutputCapture() as output:
try:
self.module.main()
except SystemExit as e:
self.retcode = e.code
self.stdout = output.stdout
self.stderr = output.stderr
return
def test_default(self):
"""$ pdb_chain data/dummy.pdb"""
# Simulate input
sys.argv = ['', os.path.join(data_dir, 'dummy.pdb')]
# Execute the script
self.exec_module()
# Validate results
self.assertEqual(self.retcode, 0) # ensure the program exited OK.
self.assertEqual(len(self.stdout), 204) # no lines deleted
self.assertEqual(len(self.stderr), 0) # no errors
records = (('ATOM', 'HETATM'))
chain_ids = [l[21] for l in self.stdout if l.startswith(records)]
unique_chain_ids = list(set(chain_ids))
self.assertEqual(unique_chain_ids, [' '])
def test_two_options(self):
"""$ pdb_chain -X data/dummy.pdb"""
sys.argv = ['', '-X', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 0)
self.assertEqual(len(self.stdout), 204)
self.assertEqual(len(self.stderr), 0)
records = (('ATOM', 'HETATM'))
chain_ids = [l[21] for l in self.stdout if l.startswith(records)]
unique_chain_ids = list(set(chain_ids))
self.assertEqual(unique_chain_ids, ['X'])
def test_file_not_found(self):
"""$ pdb_chain -A not_existing.pdb"""
afile = os.path.join(data_dir, 'not_existing.pdb')
sys.argv = ['', '-A', afile]
self.exec_module()
self.assertEqual(self.retcode, 1) # exit code is 1 (error)
self.assertEqual(len(self.stdout), 0) # nothing written to stdout
self.assertEqual(self.stderr[0][:22],
"ERROR!! File not found") # proper error message
def test_file_missing(self):
"""$ pdb_chain -A"""
sys.argv = ['', '-A']
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr[0],
"ERROR!! No data to process!")
def test_helptext(self):
"""$ pdb_chain"""
sys.argv = ['']
self.exec_module()
self.assertEqual(self.retcode, 1) # ensure the program exited gracefully.
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr, self.module.__doc__.split("\n")[:-1])
def test_invalid_option(self):
"""$ pdb_chain -AH data/dummy.pdb"""
sys.argv = ['', '-AH', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0][:47],
"ERROR!! Chain identifiers must be a single char")
def test_not_an_option(self):
"""$ pdb_chain A data/dummy.pdb"""
sys.argv = ['', 'A', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0],
"ERROR! First argument is not an option: 'A'")
if __name__ == '__main__':
from config import test_dir
mpath = os.path.abspath(os.path.join(test_dir, '..'))
sys.path.insert(0, mpath) # so we load dev files before any installation
unittest.main()
| estTool( | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.