file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
preloader.js | 'use strict';
angular.module('mpApp')
.factory(
'preloader',
function($q, $rootScope) {
// I manage the preloading of image objects. Accepts an array of image URLs.
function | (imageLocations) {
// I am the image SRC values to preload.
this.imageLocations = imageLocations;
// As the images load, we'll need to keep track of the load/error
// counts when announing the progress on the loading.
this.imageCount = this.imageLocations.length;
this.loadCount = 0;
this.errorCount = 0;
// I am the possible states that the preloader can be in.
this.states = {
PENDING: 1,
LOADING: 2,
RESOLVED: 3,
REJECTED: 4
};
// I keep track of the current state of the preloader.
this.state = this.states.PENDING;
// When loading the images, a promise will be returned to indicate
// when the loading has completed (and / or progressed).
this.deferred = $q.defer();
this.promise = this.deferred.promise;
}
// ---
// STATIC METHODS.
// ---
// I reload the given images [Array] and return a promise. The promise
// will be resolved with the array of image locations. 111111
Preloader.preloadImages = function(imageLocations) {
var preloader = new Preloader(imageLocations);
return (preloader.load());
};
// ---
// INSTANCE METHODS.
// ---
Preloader.prototype = {
// Best practice for "instnceof" operator.
constructor: Preloader,
// ---
// PUBLIC METHODS.
// ---
// I determine if the preloader has started loading images yet.
isInitiated: function isInitiated() {
return (this.state !== this.states.PENDING);
},
// I determine if the preloader has failed to load all of the images.
isRejected: function isRejected() {
return (this.state === this.states.REJECTED);
},
// I determine if the preloader has successfully loaded all of the images.
isResolved: function isResolved() {
return (this.state === this.states.RESOLVED);
},
// I initiate the preload of the images. Returns a promise. 222
load: function load() {
// If the images are already loading, return the existing promise.
if (this.isInitiated()) {
return (this.promise);
}
this.state = this.states.LOADING;
for (var i = 0; i < this.imageCount; i++) {
this.loadImageLocation(this.imageLocations[i]);
}
// Return the deferred promise for the load event.
return (this.promise);
},
// ---
// PRIVATE METHODS.
// ---
// I handle the load-failure of the given image location.
handleImageError: function handleImageError(imageLocation) {
this.errorCount++;
// If the preload action has already failed, ignore further action.
if (this.isRejected()) {
return;
}
this.state = this.states.REJECTED;
this.deferred.reject(imageLocation);
},
// I handle the load-success of the given image location.
handleImageLoad: function handleImageLoad(imageLocation) {
this.loadCount++;
// If the preload action has already failed, ignore further action.
if (this.isRejected()) {
return;
}
// Notify the progress of the overall deferred. This is different
// than Resolving the deferred - you can call notify many times
// before the ultimate resolution (or rejection) of the deferred.
this.deferred.notify({
percent: Math.ceil(this.loadCount / this.imageCount * 100),
imageLocation: imageLocation
});
// If all of the images have loaded, we can resolve the deferred
// value that we returned to the calling context.
if (this.loadCount === this.imageCount) {
this.state = this.states.RESOLVED;
this.deferred.resolve(this.imageLocations);
}
},
// I load the given image location and then wire the load / error
// events back into the preloader instance.
// --
// NOTE: The load/error events trigger a $digest. 333
loadImageLocation: function loadImageLocation(imageLocation) {
var preloader = this;
// When it comes to creating the image object, it is critical that
// we bind the event handlers BEFORE we actually set the image
// source. Failure to do so will prevent the events from proper
// triggering in some browsers.
var image = angular.element(new Image())
.bind('load', function(event) {
// Since the load event is asynchronous, we have to
// tell AngularJS that something changed.
$rootScope.$apply(
function() {
preloader.handleImageLoad(event.target.src);
// Clean up object reference to help with the
// garbage collection in the closure.
preloader = image = event = null;
}
);
})
.bind('error', function(event) {
// Since the load event is asynchronous, we have to
// tell AngularJS that something changed.
$rootScope.$apply(
function() {
preloader.handleImageError(event.target.src);
// Clean up object reference to help with the
// garbage collection in the closure.
preloader = image = event = null;
}
);
})
.attr('src', imageLocation);
}
};
// Return the factory instance.
return (Preloader);
}
);
| Preloader | identifier_name |
300-custom-types.rs | #[derive(Debug)]
struct Person {
name: String,
age: u8,
}
// A unit struct
struct Unit;
// A tuple struct
struct | (i32, f32);
// A struct with two fields
struct Point {
x: f32,
y: f32,
}
// Structs can be reused as fields of another struct
#[allow(dead_code)]
struct Rectangle {
// A rectangle can be specified by where the top left and bottom right
// corners are in space.
top_left: Point,
bottom_right: Point,
}
fn main() {
// Create struct with field init shorthand
let name = String::from("Peter");
let age = 27;
let peter = Person { name, age };
// Print debug struct
println!("{:?}", peter);
// Instantiate a `Point`
let point: Point = Point { x: 10.3, y: 0.4 };
// Access the fields of the point
println!("point coordinates: ({}, {})", point.x, point.y);
// Make a new point by using struct update syntax to use the fields of our
// other one
let bottom_right = Point { x: 5.2, ..point };
// `bottom_right.y` will be the same as `point.y` because we used that field
// from `point`
println!("second point: ({}, {})", bottom_right.x, bottom_right.y);
// Destructure the point using a `let` binding
let Point {
x: left_edge,
y: top_edge,
} = point;
let _rectangle = Rectangle {
// struct instantiation is an expression too
top_left: Point {
x: left_edge,
y: top_edge,
},
bottom_right,
};
// Instantiate a unit struct
let _unit = Unit;
// Instantiate a tuple struct
let pair = Pair(1, 0.1);
// Access the fields of a tuple struct
println!("pair contains {:?} and {:?}", pair.0, pair.1);
// Destructure a tuple struct
let Pair(integer, decimal) = pair;
println!("pair contains {:?} and {:?}", integer, decimal);
}
| Pair | identifier_name |
300-custom-types.rs | #[derive(Debug)]
struct Person {
name: String,
age: u8,
}
// A unit struct
struct Unit;
// A tuple struct
struct Pair(i32, f32);
// A struct with two fields
struct Point {
x: f32,
y: f32,
}
// Structs can be reused as fields of another struct
#[allow(dead_code)]
struct Rectangle {
// A rectangle can be specified by where the top left and bottom right
// corners are in space.
top_left: Point,
bottom_right: Point,
}
fn main() {
// Create struct with field init shorthand
let name = String::from("Peter");
let age = 27;
let peter = Person { name, age };
// Print debug struct
println!("{:?}", peter);
// Instantiate a `Point` |
// Access the fields of the point
println!("point coordinates: ({}, {})", point.x, point.y);
// Make a new point by using struct update syntax to use the fields of our
// other one
let bottom_right = Point { x: 5.2, ..point };
// `bottom_right.y` will be the same as `point.y` because we used that field
// from `point`
println!("second point: ({}, {})", bottom_right.x, bottom_right.y);
// Destructure the point using a `let` binding
let Point {
x: left_edge,
y: top_edge,
} = point;
let _rectangle = Rectangle {
// struct instantiation is an expression too
top_left: Point {
x: left_edge,
y: top_edge,
},
bottom_right,
};
// Instantiate a unit struct
let _unit = Unit;
// Instantiate a tuple struct
let pair = Pair(1, 0.1);
// Access the fields of a tuple struct
println!("pair contains {:?} and {:?}", pair.0, pair.1);
// Destructure a tuple struct
let Pair(integer, decimal) = pair;
println!("pair contains {:?} and {:?}", integer, decimal);
} | let point: Point = Point { x: 10.3, y: 0.4 }; | random_line_split |
300-custom-types.rs | #[derive(Debug)]
struct Person {
name: String,
age: u8,
}
// A unit struct
struct Unit;
// A tuple struct
struct Pair(i32, f32);
// A struct with two fields
struct Point {
x: f32,
y: f32,
}
// Structs can be reused as fields of another struct
#[allow(dead_code)]
struct Rectangle {
// A rectangle can be specified by where the top left and bottom right
// corners are in space.
top_left: Point,
bottom_right: Point,
}
fn main() | // from `point`
println!("second point: ({}, {})", bottom_right.x, bottom_right.y);
// Destructure the point using a `let` binding
let Point {
x: left_edge,
y: top_edge,
} = point;
let _rectangle = Rectangle {
// struct instantiation is an expression too
top_left: Point {
x: left_edge,
y: top_edge,
},
bottom_right,
};
// Instantiate a unit struct
let _unit = Unit;
// Instantiate a tuple struct
let pair = Pair(1, 0.1);
// Access the fields of a tuple struct
println!("pair contains {:?} and {:?}", pair.0, pair.1);
// Destructure a tuple struct
let Pair(integer, decimal) = pair;
println!("pair contains {:?} and {:?}", integer, decimal);
}
| {
// Create struct with field init shorthand
let name = String::from("Peter");
let age = 27;
let peter = Person { name, age };
// Print debug struct
println!("{:?}", peter);
// Instantiate a `Point`
let point: Point = Point { x: 10.3, y: 0.4 };
// Access the fields of the point
println!("point coordinates: ({}, {})", point.x, point.y);
// Make a new point by using struct update syntax to use the fields of our
// other one
let bottom_right = Point { x: 5.2, ..point };
// `bottom_right.y` will be the same as `point.y` because we used that field | identifier_body |
views.py | # -*- coding: utf-8 -*-
"""
flask.views
~~~~~~~~~~~
This module provides class-based views inspired by the ones in Django.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from .globals import request
http_method_funcs = frozenset(['get', 'post', 'head', 'options',
'delete', 'put', 'trace', 'patch'])
class View(object):
"""Alternative way to use view functions. A subclass has to implement
:meth:`dispatch_request` which is called with the view arguments from
the URL routing system. If :attr:`methods` is provided the methods
do not have to be passed to the :meth:`~flask.Flask.add_url_rule`
method explicitly::
class MyView(View):
methods = ['GET']
def dispatch_request(self, name):
return 'Hello %s!' % name
app.add_url_rule('/hello/<name>', view_func=MyView.as_view('myview'))
When you want to decorate a pluggable view you will have to either do that
when the view function is created (by wrapping the return value of
:meth:`as_view`) or you can use the :attr:`decorators` attribute::
class SecretView(View):
methods = ['GET']
decorators = [superuser_required]
def dispatch_request(self):
...
The decorators stored in the decorators list are applied one after another
when the view function is created. Note that you can *not* use the class
based decorators since those would decorate the view class and not the
generated view function!
"""
#: A for which methods this pluggable view can handle.
methods = None
#: The canonical way to decorate class-based views is to decorate the
#: return value of as_view(). However since this moves parts of the
#: logic from the class declaration to the place where it's hooked
#: into the routing system.
#:
#: You can place one or more decorators in this list and whenever the
#: view function is created the result is automatically decorated.
#:
#: .. versionadded:: 0.8
decorators = []
def dispatch_request(self):
"""Subclasses have to override this method to implement the
actual view function code. This method is called with all
the arguments from the URL rule.
"""
raise NotImplementedError()
@classmethod
def as_view(cls, name, *class_args, **class_kwargs):
"""Converts the class into an actual view function that can be used
with the routing system. Internally this generates a function on the
fly which will instantiate the :class:`View` on each request and call
the :meth:`dispatch_request` method on it.
The arguments passed to :meth:`as_view` are forwarded to the
constructor of the class.
"""
def view(*args, **kwargs):
self = view.view_class(*class_args, **class_kwargs)
return self.dispatch_request(*args, **kwargs)
if cls.decorators:
view.__name__ = name
view.__module__ = cls.__module__
for decorator in cls.decorators:
view = decorator(view)
# we attach the view class to the view function for two reasons:
# first of all it allows us to easily figure out what class-based
# view this thing came from, secondly it's also used for instantiating
# the view class so you can actually replace it with something else
# for testing purposes and debugging.
view.view_class = cls
view.__name__ = name
view.__doc__ = cls.__doc__
view.__module__ = cls.__module__
view.methods = cls.methods
return view
class MethodViewType(type):
def __new__(cls, name, bases, d):
rv = type.__new__(cls, name, bases, d)
if 'methods' not in d:
methods = set(rv.methods or [])
for key, value in d.iteritems():
if key in http_method_funcs:
methods.add(key.upper())
# if we have no method at all in there we don't want to
# add a method list. (This is for instance the case for
# the baseclass or another subclass of a base method view
# that does not introduce new methods).
if methods:
rv.methods = sorted(methods)
return rv
class MethodView(View):
| def dispatch_request(self, *args, **kwargs):
meth = getattr(self, request.method.lower(), None)
# if the request method is HEAD and we don't have a handler for it
# retry with GET
if meth is None and request.method == 'HEAD':
meth = getattr(self, 'get', None)
assert meth is not None, 'Unimplemented method %r' % request.method
return meth(*args, **kwargs)
| """Like a regular class-based view but that dispatches requests to
particular methods. For instance if you implement a method called
:meth:`get` it means you will response to ``'GET'`` requests and
the :meth:`dispatch_request` implementation will automatically
forward your request to that. Also :attr:`options` is set for you
automatically::
class CounterAPI(MethodView):
def get(self):
return session.get('counter', 0)
def post(self):
session['counter'] = session.get('counter', 0) + 1
return 'OK'
app.add_url_rule('/counter', view_func=CounterAPI.as_view('counter'))
"""
__metaclass__ = MethodViewType
| identifier_body |
views.py | # -*- coding: utf-8 -*-
"""
flask.views
~~~~~~~~~~~
This module provides class-based views inspired by the ones in Django.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from .globals import request
http_method_funcs = frozenset(['get', 'post', 'head', 'options',
'delete', 'put', 'trace', 'patch'])
class View(object):
"""Alternative way to use view functions. A subclass has to implement
:meth:`dispatch_request` which is called with the view arguments from
the URL routing system. If :attr:`methods` is provided the methods
do not have to be passed to the :meth:`~flask.Flask.add_url_rule`
method explicitly::
class MyView(View):
methods = ['GET'] |
def dispatch_request(self, name):
return 'Hello %s!' % name
app.add_url_rule('/hello/<name>', view_func=MyView.as_view('myview'))
When you want to decorate a pluggable view you will have to either do that
when the view function is created (by wrapping the return value of
:meth:`as_view`) or you can use the :attr:`decorators` attribute::
class SecretView(View):
methods = ['GET']
decorators = [superuser_required]
def dispatch_request(self):
...
The decorators stored in the decorators list are applied one after another
when the view function is created. Note that you can *not* use the class
based decorators since those would decorate the view class and not the
generated view function!
"""
#: A for which methods this pluggable view can handle.
methods = None
#: The canonical way to decorate class-based views is to decorate the
#: return value of as_view(). However since this moves parts of the
#: logic from the class declaration to the place where it's hooked
#: into the routing system.
#:
#: You can place one or more decorators in this list and whenever the
#: view function is created the result is automatically decorated.
#:
#: .. versionadded:: 0.8
decorators = []
def dispatch_request(self):
"""Subclasses have to override this method to implement the
actual view function code. This method is called with all
the arguments from the URL rule.
"""
raise NotImplementedError()
@classmethod
def as_view(cls, name, *class_args, **class_kwargs):
"""Converts the class into an actual view function that can be used
with the routing system. Internally this generates a function on the
fly which will instantiate the :class:`View` on each request and call
the :meth:`dispatch_request` method on it.
The arguments passed to :meth:`as_view` are forwarded to the
constructor of the class.
"""
def view(*args, **kwargs):
self = view.view_class(*class_args, **class_kwargs)
return self.dispatch_request(*args, **kwargs)
if cls.decorators:
view.__name__ = name
view.__module__ = cls.__module__
for decorator in cls.decorators:
view = decorator(view)
# we attach the view class to the view function for two reasons:
# first of all it allows us to easily figure out what class-based
# view this thing came from, secondly it's also used for instantiating
# the view class so you can actually replace it with something else
# for testing purposes and debugging.
view.view_class = cls
view.__name__ = name
view.__doc__ = cls.__doc__
view.__module__ = cls.__module__
view.methods = cls.methods
return view
class MethodViewType(type):
def __new__(cls, name, bases, d):
rv = type.__new__(cls, name, bases, d)
if 'methods' not in d:
methods = set(rv.methods or [])
for key, value in d.iteritems():
if key in http_method_funcs:
methods.add(key.upper())
# if we have no method at all in there we don't want to
# add a method list. (This is for instance the case for
# the baseclass or another subclass of a base method view
# that does not introduce new methods).
if methods:
rv.methods = sorted(methods)
return rv
class MethodView(View):
"""Like a regular class-based view but that dispatches requests to
particular methods. For instance if you implement a method called
:meth:`get` it means you will response to ``'GET'`` requests and
the :meth:`dispatch_request` implementation will automatically
forward your request to that. Also :attr:`options` is set for you
automatically::
class CounterAPI(MethodView):
def get(self):
return session.get('counter', 0)
def post(self):
session['counter'] = session.get('counter', 0) + 1
return 'OK'
app.add_url_rule('/counter', view_func=CounterAPI.as_view('counter'))
"""
__metaclass__ = MethodViewType
def dispatch_request(self, *args, **kwargs):
meth = getattr(self, request.method.lower(), None)
# if the request method is HEAD and we don't have a handler for it
# retry with GET
if meth is None and request.method == 'HEAD':
meth = getattr(self, 'get', None)
assert meth is not None, 'Unimplemented method %r' % request.method
return meth(*args, **kwargs) | random_line_split |
|
views.py | # -*- coding: utf-8 -*-
"""
flask.views
~~~~~~~~~~~
This module provides class-based views inspired by the ones in Django.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from .globals import request
http_method_funcs = frozenset(['get', 'post', 'head', 'options',
'delete', 'put', 'trace', 'patch'])
class View(object):
"""Alternative way to use view functions. A subclass has to implement
:meth:`dispatch_request` which is called with the view arguments from
the URL routing system. If :attr:`methods` is provided the methods
do not have to be passed to the :meth:`~flask.Flask.add_url_rule`
method explicitly::
class MyView(View):
methods = ['GET']
def dispatch_request(self, name):
return 'Hello %s!' % name
app.add_url_rule('/hello/<name>', view_func=MyView.as_view('myview'))
When you want to decorate a pluggable view you will have to either do that
when the view function is created (by wrapping the return value of
:meth:`as_view`) or you can use the :attr:`decorators` attribute::
class SecretView(View):
methods = ['GET']
decorators = [superuser_required]
def dispatch_request(self):
...
The decorators stored in the decorators list are applied one after another
when the view function is created. Note that you can *not* use the class
based decorators since those would decorate the view class and not the
generated view function!
"""
#: A for which methods this pluggable view can handle.
methods = None
#: The canonical way to decorate class-based views is to decorate the
#: return value of as_view(). However since this moves parts of the
#: logic from the class declaration to the place where it's hooked
#: into the routing system.
#:
#: You can place one or more decorators in this list and whenever the
#: view function is created the result is automatically decorated.
#:
#: .. versionadded:: 0.8
decorators = []
def dispatch_request(self):
"""Subclasses have to override this method to implement the
actual view function code. This method is called with all
the arguments from the URL rule.
"""
raise NotImplementedError()
@classmethod
def as_view(cls, name, *class_args, **class_kwargs):
"""Converts the class into an actual view function that can be used
with the routing system. Internally this generates a function on the
fly which will instantiate the :class:`View` on each request and call
the :meth:`dispatch_request` method on it.
The arguments passed to :meth:`as_view` are forwarded to the
constructor of the class.
"""
def view(*args, **kwargs):
self = view.view_class(*class_args, **class_kwargs)
return self.dispatch_request(*args, **kwargs)
if cls.decorators:
|
# we attach the view class to the view function for two reasons:
# first of all it allows us to easily figure out what class-based
# view this thing came from, secondly it's also used for instantiating
# the view class so you can actually replace it with something else
# for testing purposes and debugging.
view.view_class = cls
view.__name__ = name
view.__doc__ = cls.__doc__
view.__module__ = cls.__module__
view.methods = cls.methods
return view
class MethodViewType(type):
def __new__(cls, name, bases, d):
rv = type.__new__(cls, name, bases, d)
if 'methods' not in d:
methods = set(rv.methods or [])
for key, value in d.iteritems():
if key in http_method_funcs:
methods.add(key.upper())
# if we have no method at all in there we don't want to
# add a method list. (This is for instance the case for
# the baseclass or another subclass of a base method view
# that does not introduce new methods).
if methods:
rv.methods = sorted(methods)
return rv
class MethodView(View):
"""Like a regular class-based view but that dispatches requests to
particular methods. For instance if you implement a method called
:meth:`get` it means you will response to ``'GET'`` requests and
the :meth:`dispatch_request` implementation will automatically
forward your request to that. Also :attr:`options` is set for you
automatically::
class CounterAPI(MethodView):
def get(self):
return session.get('counter', 0)
def post(self):
session['counter'] = session.get('counter', 0) + 1
return 'OK'
app.add_url_rule('/counter', view_func=CounterAPI.as_view('counter'))
"""
__metaclass__ = MethodViewType
def dispatch_request(self, *args, **kwargs):
meth = getattr(self, request.method.lower(), None)
# if the request method is HEAD and we don't have a handler for it
# retry with GET
if meth is None and request.method == 'HEAD':
meth = getattr(self, 'get', None)
assert meth is not None, 'Unimplemented method %r' % request.method
return meth(*args, **kwargs)
| view.__name__ = name
view.__module__ = cls.__module__
for decorator in cls.decorators:
view = decorator(view) | conditional_block |
views.py | # -*- coding: utf-8 -*-
"""
flask.views
~~~~~~~~~~~
This module provides class-based views inspired by the ones in Django.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from .globals import request
http_method_funcs = frozenset(['get', 'post', 'head', 'options',
'delete', 'put', 'trace', 'patch'])
class View(object):
"""Alternative way to use view functions. A subclass has to implement
:meth:`dispatch_request` which is called with the view arguments from
the URL routing system. If :attr:`methods` is provided the methods
do not have to be passed to the :meth:`~flask.Flask.add_url_rule`
method explicitly::
class MyView(View):
methods = ['GET']
def dispatch_request(self, name):
return 'Hello %s!' % name
app.add_url_rule('/hello/<name>', view_func=MyView.as_view('myview'))
When you want to decorate a pluggable view you will have to either do that
when the view function is created (by wrapping the return value of
:meth:`as_view`) or you can use the :attr:`decorators` attribute::
class SecretView(View):
methods = ['GET']
decorators = [superuser_required]
def dispatch_request(self):
...
The decorators stored in the decorators list are applied one after another
when the view function is created. Note that you can *not* use the class
based decorators since those would decorate the view class and not the
generated view function!
"""
#: A for which methods this pluggable view can handle.
methods = None
#: The canonical way to decorate class-based views is to decorate the
#: return value of as_view(). However since this moves parts of the
#: logic from the class declaration to the place where it's hooked
#: into the routing system.
#:
#: You can place one or more decorators in this list and whenever the
#: view function is created the result is automatically decorated.
#:
#: .. versionadded:: 0.8
decorators = []
def | (self):
"""Subclasses have to override this method to implement the
actual view function code. This method is called with all
the arguments from the URL rule.
"""
raise NotImplementedError()
@classmethod
def as_view(cls, name, *class_args, **class_kwargs):
"""Converts the class into an actual view function that can be used
with the routing system. Internally this generates a function on the
fly which will instantiate the :class:`View` on each request and call
the :meth:`dispatch_request` method on it.
The arguments passed to :meth:`as_view` are forwarded to the
constructor of the class.
"""
def view(*args, **kwargs):
self = view.view_class(*class_args, **class_kwargs)
return self.dispatch_request(*args, **kwargs)
if cls.decorators:
view.__name__ = name
view.__module__ = cls.__module__
for decorator in cls.decorators:
view = decorator(view)
# we attach the view class to the view function for two reasons:
# first of all it allows us to easily figure out what class-based
# view this thing came from, secondly it's also used for instantiating
# the view class so you can actually replace it with something else
# for testing purposes and debugging.
view.view_class = cls
view.__name__ = name
view.__doc__ = cls.__doc__
view.__module__ = cls.__module__
view.methods = cls.methods
return view
class MethodViewType(type):
def __new__(cls, name, bases, d):
rv = type.__new__(cls, name, bases, d)
if 'methods' not in d:
methods = set(rv.methods or [])
for key, value in d.iteritems():
if key in http_method_funcs:
methods.add(key.upper())
# if we have no method at all in there we don't want to
# add a method list. (This is for instance the case for
# the baseclass or another subclass of a base method view
# that does not introduce new methods).
if methods:
rv.methods = sorted(methods)
return rv
class MethodView(View):
"""Like a regular class-based view but that dispatches requests to
particular methods. For instance if you implement a method called
:meth:`get` it means you will response to ``'GET'`` requests and
the :meth:`dispatch_request` implementation will automatically
forward your request to that. Also :attr:`options` is set for you
automatically::
class CounterAPI(MethodView):
def get(self):
return session.get('counter', 0)
def post(self):
session['counter'] = session.get('counter', 0) + 1
return 'OK'
app.add_url_rule('/counter', view_func=CounterAPI.as_view('counter'))
"""
__metaclass__ = MethodViewType
def dispatch_request(self, *args, **kwargs):
meth = getattr(self, request.method.lower(), None)
# if the request method is HEAD and we don't have a handler for it
# retry with GET
if meth is None and request.method == 'HEAD':
meth = getattr(self, 'get', None)
assert meth is not None, 'Unimplemented method %r' % request.method
return meth(*args, **kwargs)
| dispatch_request | identifier_name |
test-amp-fresh.js | /**
* Copyright 2016 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as sinon from 'sinon';
import {AmpFresh} from '../amp-fresh';
import {
ampFreshManagerForDoc,
installAmpFreshManagerForDoc,
} from '../amp-fresh-manager';
import {resetServiceForTesting} from '../../../../src/service';
import {toggleExperiment} from '../../../../src/experiments';
describe('amp-fresh', () => {
let sandbox;
let fresh;
let elem;
let manager;
beforeEach(() => {
toggleExperiment(window, 'amp-fresh', true);
elem = document.createElement('div');
elem.setAttribute('id', 'amp-fresh-1');
document.body.appendChild(elem);
const span = document.createElement('span');
span.textContent = 'hello';
elem.appendChild(span);
installAmpFreshManagerForDoc(window.document);
manager = ampFreshManagerForDoc(window.document);
fresh = new AmpFresh(elem);
sandbox = sinon.sandbox.create();
fresh.mutateElement = function(cb) {
cb();
};
});
| afterEach(() => {
toggleExperiment(window, 'amp-fresh', false);
resetServiceForTesting(window, 'ampFreshManager');
sandbox.restore();
if (elem.parentNode) {
elem.parentNode.removeChild(elem);
}
});
it('should register to manager', () => {
const registerSpy = sandbox.spy(manager, 'register');
expect(registerSpy).to.have.not.been.called;
fresh.buildCallback();
expect(registerSpy).to.be.calledOnce;
});
it('should replace its subtree', () => {
fresh.buildCallback();
expect(fresh.element.innerHTML).to.equal('<span>hello</span>');
const doc = {
getElementById: function(id) {
const el = document.createElement('amp-fresh');
el.innerHTML = '<span>hello</span><div>world</div>!';
el.setAttribute('id', id);
return el;
},
};
manager.update_(doc);
expect(fresh.element.innerHTML).to.equal(
'<span>hello</span><div>world</div>!');
});
it('should have aria-live=polite by default', () => {
fresh.buildCallback();
expect(fresh.element.getAttribute('aria-live')).to.equal('polite');
});
it('should use explicitly defined aria-live attribute value', () => {
elem.setAttribute('aria-live', 'assertive');
fresh.buildCallback();
expect(fresh.element.getAttribute('aria-live')).to.equal('assertive');
});
}); | random_line_split |
|
test-amp-fresh.js | /**
* Copyright 2016 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as sinon from 'sinon';
import {AmpFresh} from '../amp-fresh';
import {
ampFreshManagerForDoc,
installAmpFreshManagerForDoc,
} from '../amp-fresh-manager';
import {resetServiceForTesting} from '../../../../src/service';
import {toggleExperiment} from '../../../../src/experiments';
describe('amp-fresh', () => {
let sandbox;
let fresh;
let elem;
let manager;
beforeEach(() => {
toggleExperiment(window, 'amp-fresh', true);
elem = document.createElement('div');
elem.setAttribute('id', 'amp-fresh-1');
document.body.appendChild(elem);
const span = document.createElement('span');
span.textContent = 'hello';
elem.appendChild(span);
installAmpFreshManagerForDoc(window.document);
manager = ampFreshManagerForDoc(window.document);
fresh = new AmpFresh(elem);
sandbox = sinon.sandbox.create();
fresh.mutateElement = function(cb) {
cb();
};
});
afterEach(() => {
toggleExperiment(window, 'amp-fresh', false);
resetServiceForTesting(window, 'ampFreshManager');
sandbox.restore();
if (elem.parentNode) |
});
it('should register to manager', () => {
const registerSpy = sandbox.spy(manager, 'register');
expect(registerSpy).to.have.not.been.called;
fresh.buildCallback();
expect(registerSpy).to.be.calledOnce;
});
it('should replace its subtree', () => {
fresh.buildCallback();
expect(fresh.element.innerHTML).to.equal('<span>hello</span>');
const doc = {
getElementById: function(id) {
const el = document.createElement('amp-fresh');
el.innerHTML = '<span>hello</span><div>world</div>!';
el.setAttribute('id', id);
return el;
},
};
manager.update_(doc);
expect(fresh.element.innerHTML).to.equal(
'<span>hello</span><div>world</div>!');
});
it('should have aria-live=polite by default', () => {
fresh.buildCallback();
expect(fresh.element.getAttribute('aria-live')).to.equal('polite');
});
it('should use explicitly defined aria-live attribute value', () => {
elem.setAttribute('aria-live', 'assertive');
fresh.buildCallback();
expect(fresh.element.getAttribute('aria-live')).to.equal('assertive');
});
});
| {
elem.parentNode.removeChild(elem);
} | conditional_block |
find_dependencies.py | this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import imp
import logging
import modulefinder
import optparse
import os
import sys
import zipfile
from telemetry import benchmark
from telemetry.core import command_line
from telemetry.core import discover
from telemetry.core import util
from telemetry.page import cloud_storage
from telemetry.util import bootstrap
from telemetry.util import path_set
DEPS_FILE = 'bootstrap_deps'
def _InDirectory(subdirectory, directory):
subdirectory = os.path.realpath(subdirectory)
directory = os.path.realpath(directory)
common_prefix = os.path.commonprefix([subdirectory, directory])
return common_prefix == directory
def FindBootstrapDependencies(base_dir):
deps_file = os.path.join(base_dir, DEPS_FILE)
if not os.path.exists(deps_file):
return []
deps_paths = bootstrap.ListAllDepsPaths(deps_file)
return set(
os.path.realpath(os.path.join(util.GetChromiumSrcDir(), os.pardir, path))
for path in deps_paths)
def FindPythonDependencies(module_path):
logging.info('Finding Python dependencies of %s' % module_path)
# Load the module to inherit its sys.path modifications.
imp.load_source(
os.path.splitext(os.path.basename(module_path))[0], module_path)
# Analyze the module for its imports.
finder = modulefinder.ModuleFinder()
finder.run_script(module_path)
# Filter for only imports in Chromium.
for module in finder.modules.itervalues():
# If it's an __init__.py, module.__path__ gives the package's folder.
module_path = module.__path__[0] if module.__path__ else module.__file__
if not module_path:
continue
module_path = os.path.realpath(module_path)
if not _InDirectory(module_path, util.GetChromiumSrcDir()):
continue
yield module_path
def FindPageSetDependencies(base_dir):
logging.info('Finding page sets in %s' % base_dir)
# Add base_dir to path so our imports relative to base_dir will work.
sys.path.append(base_dir)
tests = discover.DiscoverClasses(base_dir, base_dir, benchmark.Benchmark,
index_by_class_name=True)
for test_class in tests.itervalues():
test_obj = test_class()
# Ensure the test's default options are set if needed.
parser = optparse.OptionParser()
test_obj.AddCommandLineArgs(parser)
options = optparse.Values()
for k, v in parser.get_default_values().__dict__.iteritems():
options.ensure_value(k, v)
# Page set paths are relative to their runner script, not relative to us.
util.GetBaseDir = lambda: base_dir
# TODO: Loading the page set will automatically download its Cloud Storage
# deps. This is really expensive, and we don't want to do this by default.
page_set = test_obj.CreatePageSet(options)
# Add all of its serving_dirs as dependencies.
for serving_dir in page_set.serving_dirs:
yield serving_dir
for page in page_set:
if page.is_file:
yield page.serving_dir
def FindExcludedFiles(files, options):
def MatchesConditions(path, conditions):
|
# Define some filters for files.
def IsHidden(path):
for pathname_component in path.split(os.sep):
if pathname_component.startswith('.'):
return True
return False
def IsPyc(path):
return os.path.splitext(path)[1] == '.pyc'
def IsInCloudStorage(path):
return os.path.exists(path + '.sha1')
def MatchesExcludeOptions(path):
for pattern in options.exclude:
if (fnmatch.fnmatch(path, pattern) or
fnmatch.fnmatch(os.path.basename(path), pattern)):
return True
return False
# Collect filters we're going to use to exclude files.
exclude_conditions = [
IsHidden,
IsPyc,
IsInCloudStorage,
MatchesExcludeOptions,
]
# Check all the files against the filters.
for path in files:
if MatchesConditions(path, exclude_conditions):
yield path
def FindDependencies(paths, options):
# Verify arguments.
for path in paths:
if not os.path.exists(path):
raise ValueError('Path does not exist: %s' % path)
dependencies = path_set.PathSet()
# Including __init__.py will include Telemetry and its dependencies.
# If the user doesn't pass any arguments, we just have Telemetry.
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(util.GetTelemetryDir(), 'telemetry', '__init__.py')))
dependencies |= FindBootstrapDependencies(util.GetTelemetryDir())
# Add dependencies.
for path in paths:
base_dir = os.path.dirname(os.path.realpath(path))
dependencies.add(base_dir)
dependencies |= FindBootstrapDependencies(base_dir)
dependencies |= FindPythonDependencies(path)
if options.include_page_set_data:
dependencies |= FindPageSetDependencies(base_dir)
# Remove excluded files.
dependencies -= FindExcludedFiles(set(dependencies), options)
return dependencies
def ZipDependencies(paths, dependencies, options):
base_dir = os.path.dirname(os.path.realpath(util.GetChromiumSrcDir()))
with zipfile.ZipFile(options.zip, 'w', zipfile.ZIP_DEFLATED) as zip_file:
# Add dependencies to archive.
for path in dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(path, base_dir))
zip_file.write(path, path_in_archive)
# Add symlinks to executable paths, for ease of use.
for path in paths:
link_info = zipfile.ZipInfo(
os.path.join('telemetry', os.path.basename(path)))
link_info.create_system = 3 # Unix attributes.
# 010 is regular file, 0111 is the permission bits rwxrwxrwx.
link_info.external_attr = 0100777 << 16 # Octal.
relative_path = os.path.relpath(path, base_dir)
link_script = (
'#!/usr/bin/env python\n\n'
'import os\n'
'import sys\n\n\n'
'script = os.path.join(os.path.dirname(__file__), \'%s\')\n'
'os.execv(sys.executable, [sys.executable, script] + sys.argv[1:])'
% relative_path)
zip_file.writestr(link_info, link_script)
# Add gsutil to the archive, if it's available. The gsutil in
# depot_tools is modified to allow authentication using prodaccess.
# TODO: If there's a gsutil in telemetry/third_party/, bootstrap_deps
# will include it. Then there will be two copies of gsutil at the same
# location in the archive. This can be confusing for users.
gsutil_path = os.path.realpath(cloud_storage.FindGsutil())
if cloud_storage.SupportsProdaccess(gsutil_path):
gsutil_base_dir = os.path.join(os.path.dirname(gsutil_path), os.pardir)
gsutil_dependencies = path_set.PathSet()
gsutil_dependencies.add(os.path.dirname(gsutil_path))
# Also add modules from depot_tools that are needed by gsutil.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'boto'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'fancy_urllib'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'retry_decorator'))
gsutil_dependencies -= FindExcludedFiles(
set(gsutil_dependencies), options)
# Also add upload.py to the archive from depot_tools, if it is available.
# This allows us to post patches without requiring a full depot_tools
# install. There's no real point in including upload.py if we do not
# also have gsutil, which is why this is inside the gsutil block.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'upload.py'))
for path in gsutil_dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(util.GetTelemetryDir(), base_dir),
'third_party', os.path.relpath(path, gsutil_base_dir))
zip_file.write(path, path_in_archive)
class FindDependenciesCommand(command_line.OptparseCommand):
"""Prints all dependencies"""
@classmethod
def AddCommandLineArgs(cls, parser):
parser.add_option(
'-v', '--verbose', action='count', dest='verbosity',
help='Increase verbosity level (repeat as needed).')
parser.add_option(
'-p', '--include-page-set-data', action='store_true', default=False,
help='Scan tests for page set data and include them.')
parser.add_option(
'-e', '--exclude', action='append', default=[],
help='Exclude paths matching EXCLUDE. Can be used multiple times.')
parser.add_option(
'-z', '--zip',
help='Store files in a zip archive at ZIP.')
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
if args.verbosity >= 2:
logging.getLogger().setLevel(logging.DEBUG)
elif args.verbosity:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARNING)
def Run(self, args):
paths = args.positional_args
dependencies = FindDependencies(paths, args)
| for condition in conditions:
if condition(path):
return True
return False | identifier_body |
find_dependencies.py | this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import imp
import logging
import modulefinder
import optparse
import os
import sys
import zipfile
from telemetry import benchmark
from telemetry.core import command_line
from telemetry.core import discover
from telemetry.core import util
from telemetry.page import cloud_storage
from telemetry.util import bootstrap
from telemetry.util import path_set
DEPS_FILE = 'bootstrap_deps'
def | (subdirectory, directory):
subdirectory = os.path.realpath(subdirectory)
directory = os.path.realpath(directory)
common_prefix = os.path.commonprefix([subdirectory, directory])
return common_prefix == directory
def FindBootstrapDependencies(base_dir):
deps_file = os.path.join(base_dir, DEPS_FILE)
if not os.path.exists(deps_file):
return []
deps_paths = bootstrap.ListAllDepsPaths(deps_file)
return set(
os.path.realpath(os.path.join(util.GetChromiumSrcDir(), os.pardir, path))
for path in deps_paths)
def FindPythonDependencies(module_path):
logging.info('Finding Python dependencies of %s' % module_path)
# Load the module to inherit its sys.path modifications.
imp.load_source(
os.path.splitext(os.path.basename(module_path))[0], module_path)
# Analyze the module for its imports.
finder = modulefinder.ModuleFinder()
finder.run_script(module_path)
# Filter for only imports in Chromium.
for module in finder.modules.itervalues():
# If it's an __init__.py, module.__path__ gives the package's folder.
module_path = module.__path__[0] if module.__path__ else module.__file__
if not module_path:
continue
module_path = os.path.realpath(module_path)
if not _InDirectory(module_path, util.GetChromiumSrcDir()):
continue
yield module_path
def FindPageSetDependencies(base_dir):
logging.info('Finding page sets in %s' % base_dir)
# Add base_dir to path so our imports relative to base_dir will work.
sys.path.append(base_dir)
tests = discover.DiscoverClasses(base_dir, base_dir, benchmark.Benchmark,
index_by_class_name=True)
for test_class in tests.itervalues():
test_obj = test_class()
# Ensure the test's default options are set if needed.
parser = optparse.OptionParser()
test_obj.AddCommandLineArgs(parser)
options = optparse.Values()
for k, v in parser.get_default_values().__dict__.iteritems():
options.ensure_value(k, v)
# Page set paths are relative to their runner script, not relative to us.
util.GetBaseDir = lambda: base_dir
# TODO: Loading the page set will automatically download its Cloud Storage
# deps. This is really expensive, and we don't want to do this by default.
page_set = test_obj.CreatePageSet(options)
# Add all of its serving_dirs as dependencies.
for serving_dir in page_set.serving_dirs:
yield serving_dir
for page in page_set:
if page.is_file:
yield page.serving_dir
def FindExcludedFiles(files, options):
def MatchesConditions(path, conditions):
for condition in conditions:
if condition(path):
return True
return False
# Define some filters for files.
def IsHidden(path):
for pathname_component in path.split(os.sep):
if pathname_component.startswith('.'):
return True
return False
def IsPyc(path):
return os.path.splitext(path)[1] == '.pyc'
def IsInCloudStorage(path):
return os.path.exists(path + '.sha1')
def MatchesExcludeOptions(path):
for pattern in options.exclude:
if (fnmatch.fnmatch(path, pattern) or
fnmatch.fnmatch(os.path.basename(path), pattern)):
return True
return False
# Collect filters we're going to use to exclude files.
exclude_conditions = [
IsHidden,
IsPyc,
IsInCloudStorage,
MatchesExcludeOptions,
]
# Check all the files against the filters.
for path in files:
if MatchesConditions(path, exclude_conditions):
yield path
def FindDependencies(paths, options):
# Verify arguments.
for path in paths:
if not os.path.exists(path):
raise ValueError('Path does not exist: %s' % path)
dependencies = path_set.PathSet()
# Including __init__.py will include Telemetry and its dependencies.
# If the user doesn't pass any arguments, we just have Telemetry.
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(util.GetTelemetryDir(), 'telemetry', '__init__.py')))
dependencies |= FindBootstrapDependencies(util.GetTelemetryDir())
# Add dependencies.
for path in paths:
base_dir = os.path.dirname(os.path.realpath(path))
dependencies.add(base_dir)
dependencies |= FindBootstrapDependencies(base_dir)
dependencies |= FindPythonDependencies(path)
if options.include_page_set_data:
dependencies |= FindPageSetDependencies(base_dir)
# Remove excluded files.
dependencies -= FindExcludedFiles(set(dependencies), options)
return dependencies
def ZipDependencies(paths, dependencies, options):
base_dir = os.path.dirname(os.path.realpath(util.GetChromiumSrcDir()))
with zipfile.ZipFile(options.zip, 'w', zipfile.ZIP_DEFLATED) as zip_file:
# Add dependencies to archive.
for path in dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(path, base_dir))
zip_file.write(path, path_in_archive)
# Add symlinks to executable paths, for ease of use.
for path in paths:
link_info = zipfile.ZipInfo(
os.path.join('telemetry', os.path.basename(path)))
link_info.create_system = 3 # Unix attributes.
# 010 is regular file, 0111 is the permission bits rwxrwxrwx.
link_info.external_attr = 0100777 << 16 # Octal.
relative_path = os.path.relpath(path, base_dir)
link_script = (
'#!/usr/bin/env python\n\n'
'import os\n'
'import sys\n\n\n'
'script = os.path.join(os.path.dirname(__file__), \'%s\')\n'
'os.execv(sys.executable, [sys.executable, script] + sys.argv[1:])'
% relative_path)
zip_file.writestr(link_info, link_script)
# Add gsutil to the archive, if it's available. The gsutil in
# depot_tools is modified to allow authentication using prodaccess.
# TODO: If there's a gsutil in telemetry/third_party/, bootstrap_deps
# will include it. Then there will be two copies of gsutil at the same
# location in the archive. This can be confusing for users.
gsutil_path = os.path.realpath(cloud_storage.FindGsutil())
if cloud_storage.SupportsProdaccess(gsutil_path):
gsutil_base_dir = os.path.join(os.path.dirname(gsutil_path), os.pardir)
gsutil_dependencies = path_set.PathSet()
gsutil_dependencies.add(os.path.dirname(gsutil_path))
# Also add modules from depot_tools that are needed by gsutil.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'boto'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'fancy_urllib'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'retry_decorator'))
gsutil_dependencies -= FindExcludedFiles(
set(gsutil_dependencies), options)
# Also add upload.py to the archive from depot_tools, if it is available.
# This allows us to post patches without requiring a full depot_tools
# install. There's no real point in including upload.py if we do not
# also have gsutil, which is why this is inside the gsutil block.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'upload.py'))
for path in gsutil_dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(util.GetTelemetryDir(), base_dir),
'third_party', os.path.relpath(path, gsutil_base_dir))
zip_file.write(path, path_in_archive)
class FindDependenciesCommand(command_line.OptparseCommand):
"""Prints all dependencies"""
@classmethod
def AddCommandLineArgs(cls, parser):
parser.add_option(
'-v', '--verbose', action='count', dest='verbosity',
help='Increase verbosity level (repeat as needed).')
parser.add_option(
'-p', '--include-page-set-data', action='store_true', default=False,
help='Scan tests for page set data and include them.')
parser.add_option(
'-e', '--exclude', action='append', default=[],
help='Exclude paths matching EXCLUDE. Can be used multiple times.')
parser.add_option(
'-z', '--zip',
help='Store files in a zip archive at ZIP.')
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
if args.verbosity >= 2:
logging.getLogger().setLevel(logging.DEBUG)
elif args.verbosity:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARNING)
def Run(self, args):
paths = args.positional_args
dependencies = FindDependencies(paths, args)
| _InDirectory | identifier_name |
find_dependencies.py | this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import imp
import logging
import modulefinder
import optparse
import os
import sys
import zipfile
from telemetry import benchmark
from telemetry.core import command_line
from telemetry.core import discover
from telemetry.core import util
from telemetry.page import cloud_storage
from telemetry.util import bootstrap
from telemetry.util import path_set
DEPS_FILE = 'bootstrap_deps'
def _InDirectory(subdirectory, directory):
subdirectory = os.path.realpath(subdirectory)
directory = os.path.realpath(directory)
common_prefix = os.path.commonprefix([subdirectory, directory])
return common_prefix == directory
def FindBootstrapDependencies(base_dir):
deps_file = os.path.join(base_dir, DEPS_FILE)
if not os.path.exists(deps_file):
return []
deps_paths = bootstrap.ListAllDepsPaths(deps_file)
return set(
os.path.realpath(os.path.join(util.GetChromiumSrcDir(), os.pardir, path))
for path in deps_paths)
def FindPythonDependencies(module_path):
logging.info('Finding Python dependencies of %s' % module_path)
# Load the module to inherit its sys.path modifications.
imp.load_source(
os.path.splitext(os.path.basename(module_path))[0], module_path)
# Analyze the module for its imports.
finder = modulefinder.ModuleFinder()
finder.run_script(module_path)
# Filter for only imports in Chromium.
for module in finder.modules.itervalues():
# If it's an __init__.py, module.__path__ gives the package's folder.
module_path = module.__path__[0] if module.__path__ else module.__file__
if not module_path:
continue
module_path = os.path.realpath(module_path)
if not _InDirectory(module_path, util.GetChromiumSrcDir()):
continue
yield module_path
def FindPageSetDependencies(base_dir):
logging.info('Finding page sets in %s' % base_dir)
# Add base_dir to path so our imports relative to base_dir will work.
sys.path.append(base_dir)
tests = discover.DiscoverClasses(base_dir, base_dir, benchmark.Benchmark,
index_by_class_name=True)
for test_class in tests.itervalues():
test_obj = test_class()
# Ensure the test's default options are set if needed.
parser = optparse.OptionParser()
test_obj.AddCommandLineArgs(parser)
options = optparse.Values()
for k, v in parser.get_default_values().__dict__.iteritems():
options.ensure_value(k, v)
# Page set paths are relative to their runner script, not relative to us.
util.GetBaseDir = lambda: base_dir
# TODO: Loading the page set will automatically download its Cloud Storage
# deps. This is really expensive, and we don't want to do this by default.
page_set = test_obj.CreatePageSet(options)
# Add all of its serving_dirs as dependencies.
for serving_dir in page_set.serving_dirs:
yield serving_dir
for page in page_set:
if page.is_file:
yield page.serving_dir
def FindExcludedFiles(files, options):
def MatchesConditions(path, conditions):
for condition in conditions:
if condition(path):
return True
return False
# Define some filters for files.
def IsHidden(path):
for pathname_component in path.split(os.sep):
|
return False
def IsPyc(path):
return os.path.splitext(path)[1] == '.pyc'
def IsInCloudStorage(path):
return os.path.exists(path + '.sha1')
def MatchesExcludeOptions(path):
for pattern in options.exclude:
if (fnmatch.fnmatch(path, pattern) or
fnmatch.fnmatch(os.path.basename(path), pattern)):
return True
return False
# Collect filters we're going to use to exclude files.
exclude_conditions = [
IsHidden,
IsPyc,
IsInCloudStorage,
MatchesExcludeOptions,
]
# Check all the files against the filters.
for path in files:
if MatchesConditions(path, exclude_conditions):
yield path
def FindDependencies(paths, options):
# Verify arguments.
for path in paths:
if not os.path.exists(path):
raise ValueError('Path does not exist: %s' % path)
dependencies = path_set.PathSet()
# Including __init__.py will include Telemetry and its dependencies.
# If the user doesn't pass any arguments, we just have Telemetry.
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(util.GetTelemetryDir(), 'telemetry', '__init__.py')))
dependencies |= FindBootstrapDependencies(util.GetTelemetryDir())
# Add dependencies.
for path in paths:
base_dir = os.path.dirname(os.path.realpath(path))
dependencies.add(base_dir)
dependencies |= FindBootstrapDependencies(base_dir)
dependencies |= FindPythonDependencies(path)
if options.include_page_set_data:
dependencies |= FindPageSetDependencies(base_dir)
# Remove excluded files.
dependencies -= FindExcludedFiles(set(dependencies), options)
return dependencies
def ZipDependencies(paths, dependencies, options):
base_dir = os.path.dirname(os.path.realpath(util.GetChromiumSrcDir()))
with zipfile.ZipFile(options.zip, 'w', zipfile.ZIP_DEFLATED) as zip_file:
# Add dependencies to archive.
for path in dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(path, base_dir))
zip_file.write(path, path_in_archive)
# Add symlinks to executable paths, for ease of use.
for path in paths:
link_info = zipfile.ZipInfo(
os.path.join('telemetry', os.path.basename(path)))
link_info.create_system = 3 # Unix attributes.
# 010 is regular file, 0111 is the permission bits rwxrwxrwx.
link_info.external_attr = 0100777 << 16 # Octal.
relative_path = os.path.relpath(path, base_dir)
link_script = (
'#!/usr/bin/env python\n\n'
'import os\n'
'import sys\n\n\n'
'script = os.path.join(os.path.dirname(__file__), \'%s\')\n'
'os.execv(sys.executable, [sys.executable, script] + sys.argv[1:])'
% relative_path)
zip_file.writestr(link_info, link_script)
# Add gsutil to the archive, if it's available. The gsutil in
# depot_tools is modified to allow authentication using prodaccess.
# TODO: If there's a gsutil in telemetry/third_party/, bootstrap_deps
# will include it. Then there will be two copies of gsutil at the same
# location in the archive. This can be confusing for users.
gsutil_path = os.path.realpath(cloud_storage.FindGsutil())
if cloud_storage.SupportsProdaccess(gsutil_path):
gsutil_base_dir = os.path.join(os.path.dirname(gsutil_path), os.pardir)
gsutil_dependencies = path_set.PathSet()
gsutil_dependencies.add(os.path.dirname(gsutil_path))
# Also add modules from depot_tools that are needed by gsutil.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'boto'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'fancy_urllib'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'retry_decorator'))
gsutil_dependencies -= FindExcludedFiles(
set(gsutil_dependencies), options)
# Also add upload.py to the archive from depot_tools, if it is available.
# This allows us to post patches without requiring a full depot_tools
# install. There's no real point in including upload.py if we do not
# also have gsutil, which is why this is inside the gsutil block.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'upload.py'))
for path in gsutil_dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(util.GetTelemetryDir(), base_dir),
'third_party', os.path.relpath(path, gsutil_base_dir))
zip_file.write(path, path_in_archive)
class FindDependenciesCommand(command_line.OptparseCommand):
"""Prints all dependencies"""
@classmethod
def AddCommandLineArgs(cls, parser):
parser.add_option(
'-v', '--verbose', action='count', dest='verbosity',
help='Increase verbosity level (repeat as needed).')
parser.add_option(
'-p', '--include-page-set-data', action='store_true', default=False,
help='Scan tests for page set data and include them.')
parser.add_option(
'-e', '--exclude', action='append', default=[],
help='Exclude paths matching EXCLUDE. Can be used multiple times.')
parser.add_option(
'-z', '--zip',
help='Store files in a zip archive at ZIP.')
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
if args.verbosity >= 2:
logging.getLogger().setLevel(logging.DEBUG)
elif args.verbosity:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARNING)
def Run(self, args):
paths = args.positional_args
dependencies = FindDependencies(paths, args)
| if pathname_component.startswith('.'):
return True | conditional_block |
find_dependencies.py | this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import imp
import logging
import modulefinder
import optparse
import os
import sys
import zipfile
from telemetry import benchmark
from telemetry.core import command_line
from telemetry.core import discover
from telemetry.core import util
from telemetry.page import cloud_storage
from telemetry.util import bootstrap
from telemetry.util import path_set
DEPS_FILE = 'bootstrap_deps'
def _InDirectory(subdirectory, directory):
subdirectory = os.path.realpath(subdirectory)
directory = os.path.realpath(directory)
common_prefix = os.path.commonprefix([subdirectory, directory])
return common_prefix == directory
def FindBootstrapDependencies(base_dir):
deps_file = os.path.join(base_dir, DEPS_FILE)
if not os.path.exists(deps_file):
return []
deps_paths = bootstrap.ListAllDepsPaths(deps_file)
return set(
os.path.realpath(os.path.join(util.GetChromiumSrcDir(), os.pardir, path))
for path in deps_paths)
def FindPythonDependencies(module_path):
logging.info('Finding Python dependencies of %s' % module_path)
# Load the module to inherit its sys.path modifications.
imp.load_source(
os.path.splitext(os.path.basename(module_path))[0], module_path)
# Analyze the module for its imports.
finder = modulefinder.ModuleFinder()
finder.run_script(module_path)
# Filter for only imports in Chromium.
for module in finder.modules.itervalues():
# If it's an __init__.py, module.__path__ gives the package's folder.
module_path = module.__path__[0] if module.__path__ else module.__file__
if not module_path:
continue
module_path = os.path.realpath(module_path)
if not _InDirectory(module_path, util.GetChromiumSrcDir()):
continue
yield module_path
def FindPageSetDependencies(base_dir):
logging.info('Finding page sets in %s' % base_dir)
# Add base_dir to path so our imports relative to base_dir will work.
sys.path.append(base_dir)
tests = discover.DiscoverClasses(base_dir, base_dir, benchmark.Benchmark,
index_by_class_name=True)
for test_class in tests.itervalues():
test_obj = test_class()
# Ensure the test's default options are set if needed.
parser = optparse.OptionParser()
test_obj.AddCommandLineArgs(parser)
options = optparse.Values()
for k, v in parser.get_default_values().__dict__.iteritems():
options.ensure_value(k, v)
# Page set paths are relative to their runner script, not relative to us.
util.GetBaseDir = lambda: base_dir
# TODO: Loading the page set will automatically download its Cloud Storage
# deps. This is really expensive, and we don't want to do this by default.
page_set = test_obj.CreatePageSet(options)
# Add all of its serving_dirs as dependencies.
for serving_dir in page_set.serving_dirs:
yield serving_dir
for page in page_set:
if page.is_file:
yield page.serving_dir
|
def FindExcludedFiles(files, options):
def MatchesConditions(path, conditions):
for condition in conditions:
if condition(path):
return True
return False
# Define some filters for files.
def IsHidden(path):
for pathname_component in path.split(os.sep):
if pathname_component.startswith('.'):
return True
return False
def IsPyc(path):
return os.path.splitext(path)[1] == '.pyc'
def IsInCloudStorage(path):
return os.path.exists(path + '.sha1')
def MatchesExcludeOptions(path):
for pattern in options.exclude:
if (fnmatch.fnmatch(path, pattern) or
fnmatch.fnmatch(os.path.basename(path), pattern)):
return True
return False
# Collect filters we're going to use to exclude files.
exclude_conditions = [
IsHidden,
IsPyc,
IsInCloudStorage,
MatchesExcludeOptions,
]
# Check all the files against the filters.
for path in files:
if MatchesConditions(path, exclude_conditions):
yield path
def FindDependencies(paths, options):
# Verify arguments.
for path in paths:
if not os.path.exists(path):
raise ValueError('Path does not exist: %s' % path)
dependencies = path_set.PathSet()
# Including __init__.py will include Telemetry and its dependencies.
# If the user doesn't pass any arguments, we just have Telemetry.
dependencies |= FindPythonDependencies(os.path.realpath(
os.path.join(util.GetTelemetryDir(), 'telemetry', '__init__.py')))
dependencies |= FindBootstrapDependencies(util.GetTelemetryDir())
# Add dependencies.
for path in paths:
base_dir = os.path.dirname(os.path.realpath(path))
dependencies.add(base_dir)
dependencies |= FindBootstrapDependencies(base_dir)
dependencies |= FindPythonDependencies(path)
if options.include_page_set_data:
dependencies |= FindPageSetDependencies(base_dir)
# Remove excluded files.
dependencies -= FindExcludedFiles(set(dependencies), options)
return dependencies
def ZipDependencies(paths, dependencies, options):
base_dir = os.path.dirname(os.path.realpath(util.GetChromiumSrcDir()))
with zipfile.ZipFile(options.zip, 'w', zipfile.ZIP_DEFLATED) as zip_file:
# Add dependencies to archive.
for path in dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(path, base_dir))
zip_file.write(path, path_in_archive)
# Add symlinks to executable paths, for ease of use.
for path in paths:
link_info = zipfile.ZipInfo(
os.path.join('telemetry', os.path.basename(path)))
link_info.create_system = 3 # Unix attributes.
# 010 is regular file, 0111 is the permission bits rwxrwxrwx.
link_info.external_attr = 0100777 << 16 # Octal.
relative_path = os.path.relpath(path, base_dir)
link_script = (
'#!/usr/bin/env python\n\n'
'import os\n'
'import sys\n\n\n'
'script = os.path.join(os.path.dirname(__file__), \'%s\')\n'
'os.execv(sys.executable, [sys.executable, script] + sys.argv[1:])'
% relative_path)
zip_file.writestr(link_info, link_script)
# Add gsutil to the archive, if it's available. The gsutil in
# depot_tools is modified to allow authentication using prodaccess.
# TODO: If there's a gsutil in telemetry/third_party/, bootstrap_deps
# will include it. Then there will be two copies of gsutil at the same
# location in the archive. This can be confusing for users.
gsutil_path = os.path.realpath(cloud_storage.FindGsutil())
if cloud_storage.SupportsProdaccess(gsutil_path):
gsutil_base_dir = os.path.join(os.path.dirname(gsutil_path), os.pardir)
gsutil_dependencies = path_set.PathSet()
gsutil_dependencies.add(os.path.dirname(gsutil_path))
# Also add modules from depot_tools that are needed by gsutil.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'boto'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'fancy_urllib'))
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'retry_decorator'))
gsutil_dependencies -= FindExcludedFiles(
set(gsutil_dependencies), options)
# Also add upload.py to the archive from depot_tools, if it is available.
# This allows us to post patches without requiring a full depot_tools
# install. There's no real point in including upload.py if we do not
# also have gsutil, which is why this is inside the gsutil block.
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'upload.py'))
for path in gsutil_dependencies:
path_in_archive = os.path.join(
'telemetry', os.path.relpath(util.GetTelemetryDir(), base_dir),
'third_party', os.path.relpath(path, gsutil_base_dir))
zip_file.write(path, path_in_archive)
class FindDependenciesCommand(command_line.OptparseCommand):
"""Prints all dependencies"""
@classmethod
def AddCommandLineArgs(cls, parser):
parser.add_option(
'-v', '--verbose', action='count', dest='verbosity',
help='Increase verbosity level (repeat as needed).')
parser.add_option(
'-p', '--include-page-set-data', action='store_true', default=False,
help='Scan tests for page set data and include them.')
parser.add_option(
'-e', '--exclude', action='append', default=[],
help='Exclude paths matching EXCLUDE. Can be used multiple times.')
parser.add_option(
'-z', '--zip',
help='Store files in a zip archive at ZIP.')
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
if args.verbosity >= 2:
logging.getLogger().setLevel(logging.DEBUG)
elif args.verbosity:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARNING)
def Run(self, args):
paths = args.positional_args
dependencies = FindDependencies(paths, args)
| random_line_split |
|
as_unsigned.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::slice::IntSliceExt;
// pub trait IntSliceExt<U, S> {
// /// Converts the slice to an immutable slice of unsigned integers with the same width.
// fn as_unsigned<'a>(&'a self) -> &'a [U];
// /// Converts the slice to an immutable slice of signed integers with the same width.
// fn as_signed<'a>(&'a self) -> &'a [S];
//
// /// Converts the slice to a mutable slice of unsigned integers with the same width.
// fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
// /// Converts the slice to a mutable slice of signed integers with the same width.
// fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];
// }
// macro_rules! impl_int_slice {
// ($u:ty, $s:ty, $t:ty) => {
// #[unstable(feature = "core")]
// impl IntSliceExt<$u, $s> for [$t] {
// #[inline]
// fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
// #[inline]
// fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
// }
// }
// }
// macro_rules! impl_int_slices {
// ($u:ty, $s:ty) => {
// impl_int_slice! { $u, $s, $u }
// impl_int_slice! { $u, $s, $s }
// }
// }
// impl_int_slices! { u64, i64 }
type U = u64;
type S = i64;
type T = U;
#[test]
fn as_unsigned_test1() |
}
| {
let slice: &[T] = &[0xffffffffffffffff];
let as_unsigned: &[U] = slice.as_unsigned();
assert_eq!(as_unsigned[0], 18446744073709551615);
} | identifier_body |
as_unsigned.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::slice::IntSliceExt;
// pub trait IntSliceExt<U, S> {
// /// Converts the slice to an immutable slice of unsigned integers with the same width.
// fn as_unsigned<'a>(&'a self) -> &'a [U];
// /// Converts the slice to an immutable slice of signed integers with the same width.
// fn as_signed<'a>(&'a self) -> &'a [S];
//
// /// Converts the slice to a mutable slice of unsigned integers with the same width.
// fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
// /// Converts the slice to a mutable slice of signed integers with the same width.
// fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];
// }
// macro_rules! impl_int_slice {
// ($u:ty, $s:ty, $t:ty) => {
// #[unstable(feature = "core")]
// impl IntSliceExt<$u, $s> for [$t] {
// #[inline]
// fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
// #[inline]
// fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
// }
// }
// }
// macro_rules! impl_int_slices {
// ($u:ty, $s:ty) => {
// impl_int_slice! { $u, $s, $u }
// impl_int_slice! { $u, $s, $s }
// }
// }
// impl_int_slices! { u64, i64 }
type U = u64;
type S = i64;
type T = U;
#[test]
fn | () {
let slice: &[T] = &[0xffffffffffffffff];
let as_unsigned: &[U] = slice.as_unsigned();
assert_eq!(as_unsigned[0], 18446744073709551615);
}
}
| as_unsigned_test1 | identifier_name |
as_unsigned.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::slice::IntSliceExt;
// pub trait IntSliceExt<U, S> {
// /// Converts the slice to an immutable slice of unsigned integers with the same width.
// fn as_unsigned<'a>(&'a self) -> &'a [U];
// /// Converts the slice to an immutable slice of signed integers with the same width.
// fn as_signed<'a>(&'a self) -> &'a [S];
//
// /// Converts the slice to a mutable slice of unsigned integers with the same width.
// fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
// /// Converts the slice to a mutable slice of signed integers with the same width.
// fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];
// }
// macro_rules! impl_int_slice {
// ($u:ty, $s:ty, $t:ty) => {
// #[unstable(feature = "core")]
// impl IntSliceExt<$u, $s> for [$t] {
// #[inline]
// fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
// #[inline]
// fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
// }
// }
// }
// macro_rules! impl_int_slices {
// ($u:ty, $s:ty) => {
// impl_int_slice! { $u, $s, $u }
// impl_int_slice! { $u, $s, $s }
// }
// }
// impl_int_slices! { u64, i64 }
type U = u64;
type S = i64;
type T = U;
|
assert_eq!(as_unsigned[0], 18446744073709551615);
}
} | #[test]
fn as_unsigned_test1() {
let slice: &[T] = &[0xffffffffffffffff];
let as_unsigned: &[U] = slice.as_unsigned(); | random_line_split |
strconv.rs |
match sign {
SignNeg | SignAll if neg => { f('-' as u8); }
SignAll => { f('+' as u8); }
_ => ()
}
// We built the number in reverse order, so un-reverse it here
while cur > 0 {
cur -= 1;
f(buf[cur]);
}
}
/**
* Converts a number to its string representation as a byte vector.
* This is meant to be a common base implementation for all numeric string
* conversion functions like `to_str()` or `to_str_radix()`.
*
* # Arguments
* - `num` - The number to convert. Accepts any number that
* implements the numeric traits.
* - `radix` - Base to use. Accepts only the values 2-36.
* - `negative_zero` - Whether to treat the special value `-0` as
* `-0` or as `+0`.
* - `sign` - How to emit the sign. Options are:
* - `SignNone`: No sign at all. Basically emits `abs(num)`.
* - `SignNeg`: Only `-` on negative values.
* - `SignAll`: Both `+` on positive, and `-` on negative numbers.
* - `digits` - The amount of digits to use for emitting the
* fractional part, if any. Options are:
* - `DigAll`: All calculatable digits. Beware of bignums or
* fractions!
* - `DigMax(uint)`: Maximum N digits, truncating any trailing zeros.
* - `DigExact(uint)`: Exactly N digits.
*
* # Return value
* A tuple containing the byte vector, and a boolean flag indicating
* whether it represents a special value like `inf`, `-inf`, `NaN` or not.
* It returns a tuple because there can be ambiguity between a special value
* and a number representation at higher bases.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
*/
pub fn float_to_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Float+Round+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits) -> (~[u8], bool) {
assert!(2 <= radix && radix <= 36);
let _0: T = Zero::zero();
let _1: T = One::one();
match num.classify() {
FPNaN => { return ("NaN".as_bytes().to_owned(), true); }
FPInfinite if num > _0 => {
return match sign {
SignAll => ("+inf".as_bytes().to_owned(), true),
_ => ("inf".as_bytes().to_owned(), true)
};
}
FPInfinite if num < _0 => {
return match sign {
SignNone => ("inf".as_bytes().to_owned(), true),
_ => ("-inf".as_bytes().to_owned(), true),
};
}
_ => {}
}
let neg = num < _0 || (negative_zero && _1 / num == Float::neg_infinity());
let mut buf: ~[u8] = ~[];
let radix_gen: T = cast(radix as int);
// First emit the non-fractional part, looping at least once to make
// sure at least a `0` gets emitted.
let mut deccum = num.trunc();
loop {
// Calculate the absolute value of each digit instead of only
// doing it once for the whole number because a
// representable negative number doesn't necessary have an
// representable additive inverse of the same type
// (See twos complement). But we assume that for the
// numbers [-35 .. 0] we always have [0 .. 35].
let current_digit = (deccum % radix_gen).abs();
// Decrease the deccumulator one digit at a time
deccum = deccum / radix_gen;
deccum = deccum.trunc();
buf.push(char::from_digit(current_digit.to_int() as uint, radix)
.unwrap() as u8);
// No more digits to calculate for the non-fractional part -> break
if deccum == _0 { break; }
}
// If limited digits, calculate one digit more for rounding.
let (limit_digits, digit_count, exact) = match digits {
DigAll => (false, 0u, false),
DigMax(count) => (true, count+1, false),
DigExact(count) => (true, count+1, true)
};
// Decide what sign to put in front
match sign {
SignNeg | SignAll if neg => {
buf.push('-' as u8);
}
SignAll => {
buf.push('+' as u8);
}
_ => ()
}
buf.reverse();
// Remember start of the fractional digits.
// Points one beyond end of buf if none get generated,
// or at the '.' otherwise.
let start_fractional_digits = buf.len();
// Now emit the fractional part, if any
deccum = num.fract();
if deccum != _0 || (limit_digits && exact && digit_count > 0) {
buf.push('.' as u8);
let mut dig = 0u;
// calculate new digits while
// - there is no limit and there are digits left
// - or there is a limit, it's not reached yet and
// - it's exact
// - or it's a maximum, and there are still digits left
while (!limit_digits && deccum != _0)
|| (limit_digits && dig < digit_count && (
exact
|| (!exact && deccum != _0)
)
) {
// Shift first fractional digit into the integer part
deccum = deccum * radix_gen;
// Calculate the absolute value of each digit.
// See note in first loop.
let current_digit = deccum.trunc().abs();
buf.push(char::from_digit(
current_digit.to_int() as uint, radix).unwrap() as u8);
// Decrease the deccumulator one fractional digit at a time
deccum = deccum.fract();
dig += 1u;
}
// If digits are limited, and that limit has been reached,
// cut off the one extra digit, and depending on its value
// round the remaining ones.
if limit_digits && dig == digit_count {
let ascii2value = |chr: u8| {
char::to_digit(chr as char, radix).unwrap() as uint
};
let value2ascii = |val: uint| {
char::from_digit(val, radix).unwrap() as u8
};
let extra_digit = ascii2value(buf.pop());
if extra_digit >= radix / 2 { // -> need to round
let mut i: int = buf.len() as int - 1;
loop {
// If reached left end of number, have to
// insert additional digit:
if i < 0
|| buf[i] == '-' as u8
|| buf[i] == '+' as u8 {
buf.insert((i + 1) as uint, value2ascii(1));
break;
}
// Skip the '.'
if buf[i] == '.' as u8 { i -= 1; loop; }
// Either increment the digit,
// or set to 0 if max and carry the 1.
let current_digit = ascii2value(buf[i]);
if current_digit < (radix - 1) {
buf[i] = value2ascii(current_digit+1);
break;
} else {
buf[i] = value2ascii(0);
i -= 1;
}
}
}
}
}
// if number of digits is not exact, remove all trailing '0's up to
// and including the '.'
if !exact {
let buf_max_i = buf.len() - 1;
// index to truncate from
let mut i = buf_max_i;
// discover trailing zeros of fractional part
while i > start_fractional_digits && buf[i] == '0' as u8 {
i -= 1;
}
// Only attempt to truncate digits if buf has fractional digits
if i >= start_fractional_digits {
// If buf ends with '.', cut that too.
if buf[i] == '.' as u8 { i -= 1 }
// only resize buf if we actually remove digits
if i < buf_max_i {
buf = buf.slice(0, i + 1).to_owned();
}
}
} // If exact and trailing '.', just cut that
else {
let max_i = buf.len() - 1;
if buf[max_i] == '.' as u8 {
buf = buf.slice(0, max_i).to_owned();
}
}
(buf, false) | }
/**
* Converts a number to its string representation. This is a wrapper for | random_line_split |
|
strconv.rs | 2, 36]", radix),
_ => ()
}
let _0: T = Zero::zero();
let _1: T = One::one();
let radix_gen: T = cast(radix as int);
let len = buf.len();
if len == 0 {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
if special {
if buf == INF_BUF || buf == POS_INF_BUF {
return NumStrConv::inf();
} else if buf == NEG_INF_BUF {
if negative {
return NumStrConv::neg_inf();
} else {
return None;
}
} else if buf == NAN_BUF {
return NumStrConv::nan();
}
}
let (start, accum_positive) = match buf[0] as char {
'-' if !negative => return None,
'-' => (1u, false),
'+' => (1u, true),
_ => (0u, true)
};
// Initialize accumulator with signed zero for floating point parsing to
// work
let mut accum = if accum_positive { _0.clone() } else { -_1 * _0};
let mut last_accum = accum.clone(); // Necessary to detect overflow
let mut i = start;
let mut exp_found = false;
// Parse integer part of number
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// shift accum one digit left
accum = accum * radix_gen.clone();
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + cast(digit as int);
} else {
accum = accum - cast(digit as int);
}
// Detect overflow by comparing to last value, except
// if we've not seen any non-zero digits.
if last_accum != _0 {
if accum_positive && accum <= last_accum { return NumStrConv::inf(); }
if !accum_positive && accum >= last_accum { return NumStrConv::neg_inf(); }
// Detect overflow by reversing the shift-and-add proccess
if accum_positive &&
(last_accum != ((accum - cast(digit as int))/radix_gen.clone())) {
return NumStrConv::inf();
}
if !accum_positive &&
(last_accum != ((accum + cast(digit as int))/radix_gen.clone())) {
return NumStrConv::neg_inf();
}
}
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
'.' if fractional => {
i += 1u; // skip the '.'
break; // start of fractional part
}
_ => return None // invalid number
}
}
i += 1u;
}
// Parse fractional part of number
// Skip if already reached start of exponent
if !exp_found {
let mut power = _1.clone();
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// Decrease power one order of magnitude
power = power / radix_gen;
let digit_t: T = cast(digit);
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + digit_t * power;
} else {
accum = accum - digit_t * power;
}
// Detect overflow by comparing to last value
if accum_positive && accum < last_accum { return NumStrConv::inf(); }
if !accum_positive && accum > last_accum { return NumStrConv::neg_inf(); }
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
_ => return None // invalid number
}
}
i += 1u;
}
}
// Special case: buf not empty, but does not contain any digit in front
// of the exponent sign -> number is empty string
if i == start {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
let mut multiplier = _1.clone();
if exp_found {
let c = buf[i] as char;
let base = match (c, exponent) {
// c is never _ so don't need to handle specially
('e', ExpDec) | ('E', ExpDec) => 10u,
('p', ExpBin) | ('P', ExpBin) => 2u,
_ => return None // char doesn't fit given exponent format
};
// parse remaining bytes as decimal integer,
// skipping the exponent char
let exp: Option<int> = from_str_bytes_common(
buf.slice(i+1, len), 10, true, false, false, ExpNone, false,
ignore_underscores);
match exp {
Some(exp_pow) => {
multiplier = if exp_pow < 0 {
_1 / pow_with_uint::<T>(base, (-exp_pow.to_int()) as uint)
} else {
pow_with_uint::<T>(base, exp_pow.to_int() as uint)
}
}
None => return None // invalid exponent -> invalid number
}
}
Some(accum * multiplier)
}
/**
* Parses a string as a number. This is a wrapper for
* `from_str_bytes_common()`, for details see there.
*/
#[inline]
pub fn from_str_common<T:NumCast+Zero+One+Eq+Ord+Div<T,T>+Mul<T,T>+
Sub<T,T>+Neg<T>+Add<T,T>+NumStrConv+Clone>(
buf: &str, radix: uint, negative: bool, fractional: bool,
special: bool, exponent: ExponentFormat, empty_zero: bool,
ignore_underscores: bool
) -> Option<T> {
from_str_bytes_common(buf.as_bytes(), radix, negative,
fractional, special, exponent, empty_zero,
ignore_underscores)
}
#[cfg(test)]
mod test {
use super::*;
use option::*;
#[test]
fn from_str_ignore_underscores() {
let s : Option<u8> = from_str_common("__1__", 2, false, false, false,
ExpNone, false, true);
assert_eq!(s, Some(1u8));
let n : Option<u8> = from_str_common("__1__", 2, false, false, false,
ExpNone, false, false);
assert_eq!(n, None);
let f : Option<f32> = from_str_common("_1_._5_e_1_", 10, false, true, false,
ExpDec, false, true);
assert_eq!(f, Some(1.5e1f32));
}
#[test]
fn from_str_issue5770() {
// try to parse 0b1_1111_1111 = 511 as a u8. Caused problems
// since 255*2+1 == 255 (mod 256) so the overflow wasn't
// detected.
let n : Option<u8> = from_str_common("111111111", 2, false, false, false,
ExpNone, false, false);
assert_eq!(n, None);
}
#[test]
fn from_str_issue7588() {
let u : Option<u8> = from_str_common("1000", 10, false, false, false,
ExpNone, false, false);
assert_eq!(u, None);
let s : Option<i16> = from_str_common("80000", 10, false, false, false,
ExpNone, false, false);
assert_eq!(s, None);
let f : Option<f32> = from_str_common(
"10000000000000000000000000000000000000000", 10, false, false, false,
ExpNone, false, false);
assert_eq!(f, NumStrConv::inf())
let fe : Option<f32> = from_str_common("1e40", 10, false, false, false,
ExpDec, false, false);
assert_eq!(fe, NumStrConv::inf())
}
}
#[cfg(test)]
mod bench {
use extra::test::BenchHarness;
use rand::{XorShiftRng, Rng};
use float;
use to_str::ToStr;
#[bench]
fn uint_to_str_rand(bh: &mut BenchHarness) | {
let mut rng = XorShiftRng::new();
do bh.iter {
rng.gen::<uint>().to_str();
}
} | identifier_body |
|
strconv.rs | (current_digit+1);
break;
} else {
buf[i] = value2ascii(0);
i -= 1;
}
}
}
}
}
// if number of digits is not exact, remove all trailing '0's up to
// and including the '.'
if !exact {
let buf_max_i = buf.len() - 1;
// index to truncate from
let mut i = buf_max_i;
// discover trailing zeros of fractional part
while i > start_fractional_digits && buf[i] == '0' as u8 {
i -= 1;
}
// Only attempt to truncate digits if buf has fractional digits
if i >= start_fractional_digits {
// If buf ends with '.', cut that too.
if buf[i] == '.' as u8 { i -= 1 }
// only resize buf if we actually remove digits
if i < buf_max_i {
buf = buf.slice(0, i + 1).to_owned();
}
}
} // If exact and trailing '.', just cut that
else {
let max_i = buf.len() - 1;
if buf[max_i] == '.' as u8 {
buf = buf.slice(0, max_i).to_owned();
}
}
(buf, false)
}
/**
* Converts a number to its string representation. This is a wrapper for
* `to_str_bytes_common()`, for details see there.
*/
#[inline]
pub fn float_to_str_common<T:NumCast+Zero+One+Eq+Ord+NumStrConv+Float+Round+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits) -> (~str, bool) {
let (bytes, special) = float_to_str_bytes_common(num, radix,
negative_zero, sign, digits);
(str::from_utf8(bytes), special)
}
// Some constants for from_str_bytes_common's input validation,
// they define minimum radix values for which the character is a valid digit.
static DIGIT_P_RADIX: uint = ('p' as uint) - ('a' as uint) + 11u;
static DIGIT_I_RADIX: uint = ('i' as uint) - ('a' as uint) + 11u;
static DIGIT_E_RADIX: uint = ('e' as uint) - ('a' as uint) + 11u;
/**
* Parses a byte slice as a number. This is meant to
* be a common base implementation for all numeric string conversion
* functions like `from_str()` or `from_str_radix()`.
*
* # Arguments
* - `buf` - The byte slice to parse.
* - `radix` - Which base to parse the number as. Accepts 2-36.
* - `negative` - Whether to accept negative numbers.
* - `fractional` - Whether to accept numbers with fractional parts.
* - `special` - Whether to accept special values like `inf`
* and `NaN`. Can conflict with `radix`, see Failure.
* - `exponent` - Which exponent format to accept. Options are:
* - `ExpNone`: No Exponent, accepts just plain numbers like `42` or
* `-8.2`.
* - `ExpDec`: Accepts numbers with a decimal exponent like `42e5` or
* `8.2E-2`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `ExpBin`: Accepts numbers with a binary exponent like `42P-8` or
* `FFp128`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `empty_zero` - Whether to accept a empty `buf` as a 0 or not.
* - `ignore_underscores` - Whether all underscores within the string should
* be ignored.
*
* # Return value
* Returns `Some(n)` if `buf` parses to a number n without overflowing, and
* `None` otherwise, depending on the constraints set by the remaining
* arguments.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
* - Fails if `radix` > 14 and `exponent` is `ExpDec` due to conflict
* between digit and exponent sign `'e'`.
* - Fails if `radix` > 25 and `exponent` is `ExpBin` due to conflict
* between digit and exponent sign `'p'`.
* - Fails if `radix` > 18 and `special == true` due to conflict
* between digit and lowest first character in `inf` and `NaN`, the `'i'`.
*/
pub fn from_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Div<T,T>+
Mul<T,T>+Sub<T,T>+Neg<T>+Add<T,T>+
NumStrConv+Clone>(
buf: &[u8], radix: uint, negative: bool, fractional: bool,
special: bool, exponent: ExponentFormat, empty_zero: bool,
ignore_underscores: bool
) -> Option<T> {
match exponent {
ExpDec if radix >= DIGIT_E_RADIX // decimal exponent 'e'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'e' as decimal exponent", radix),
ExpBin if radix >= DIGIT_P_RADIX // binary exponent 'p'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'p' as binary exponent", radix),
_ if special && radix >= DIGIT_I_RADIX // first digit of 'inf'
=> fail!("from_str_bytes_common: radix %? incompatible with \
special values 'inf' and 'NaN'", radix),
_ if (radix as int) < 2
=> fail!("from_str_bytes_common: radix %? to low, \
must lie in the range [2, 36]", radix),
_ if (radix as int) > 36
=> fail!("from_str_bytes_common: radix %? to high, \
must lie in the range [2, 36]", radix),
_ => ()
}
let _0: T = Zero::zero();
let _1: T = One::one();
let radix_gen: T = cast(radix as int);
let len = buf.len();
if len == 0 {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
if special {
if buf == INF_BUF || buf == POS_INF_BUF {
return NumStrConv::inf();
} else if buf == NEG_INF_BUF {
if negative {
return NumStrConv::neg_inf();
} else {
return None;
}
} else if buf == NAN_BUF {
return NumStrConv::nan();
}
}
let (start, accum_positive) = match buf[0] as char {
'-' if !negative => return None,
'-' => (1u, false),
'+' => (1u, true),
_ => (0u, true)
};
// Initialize accumulator with signed zero for floating point parsing to
// work
let mut accum = if accum_positive { _0.clone() } else { -_1 * _0};
let mut last_accum = accum.clone(); // Necessary to detect overflow
let mut i = start;
let mut exp_found = false;
// Parse integer part of number
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// shift accum one digit left
accum = accum * radix_gen.clone();
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + cast(digit as int);
} else {
accum = accum - cast(digit as int);
}
// Detect overflow by comparing to last value, except
// if we've not seen any non-zero digits.
if last_accum != _0 {
if accum_positive && accum <= last_accum { return NumStrConv::inf(); }
if !accum_positive && accum >= last_accum { return NumStrConv::neg_inf(); }
// Detect overflow by reversing the shift-and-add proccess
if accum_positive &&
(last_accum != ((accum - cast(digit as int))/radix_gen.clone())) {
return NumStrConv::inf();
}
if !accum_positive &&
(last_accum != ((accum + cast(digit as int))/radix_gen.clone())) {
return NumStrConv::neg_inf();
}
}
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
'.' if fractional => | {
i += 1u; // skip the '.'
break; // start of fractional part
} | conditional_block |
|
strconv.rs | `fractional` - Whether to accept numbers with fractional parts.
* - `special` - Whether to accept special values like `inf`
* and `NaN`. Can conflict with `radix`, see Failure.
* - `exponent` - Which exponent format to accept. Options are:
* - `ExpNone`: No Exponent, accepts just plain numbers like `42` or
* `-8.2`.
* - `ExpDec`: Accepts numbers with a decimal exponent like `42e5` or
* `8.2E-2`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `ExpBin`: Accepts numbers with a binary exponent like `42P-8` or
* `FFp128`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `empty_zero` - Whether to accept a empty `buf` as a 0 or not.
* - `ignore_underscores` - Whether all underscores within the string should
* be ignored.
*
* # Return value
* Returns `Some(n)` if `buf` parses to a number n without overflowing, and
* `None` otherwise, depending on the constraints set by the remaining
* arguments.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
* - Fails if `radix` > 14 and `exponent` is `ExpDec` due to conflict
* between digit and exponent sign `'e'`.
* - Fails if `radix` > 25 and `exponent` is `ExpBin` due to conflict
* between digit and exponent sign `'p'`.
* - Fails if `radix` > 18 and `special == true` due to conflict
* between digit and lowest first character in `inf` and `NaN`, the `'i'`.
*/
pub fn from_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Div<T,T>+
Mul<T,T>+Sub<T,T>+Neg<T>+Add<T,T>+
NumStrConv+Clone>(
buf: &[u8], radix: uint, negative: bool, fractional: bool,
special: bool, exponent: ExponentFormat, empty_zero: bool,
ignore_underscores: bool
) -> Option<T> {
match exponent {
ExpDec if radix >= DIGIT_E_RADIX // decimal exponent 'e'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'e' as decimal exponent", radix),
ExpBin if radix >= DIGIT_P_RADIX // binary exponent 'p'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'p' as binary exponent", radix),
_ if special && radix >= DIGIT_I_RADIX // first digit of 'inf'
=> fail!("from_str_bytes_common: radix %? incompatible with \
special values 'inf' and 'NaN'", radix),
_ if (radix as int) < 2
=> fail!("from_str_bytes_common: radix %? to low, \
must lie in the range [2, 36]", radix),
_ if (radix as int) > 36
=> fail!("from_str_bytes_common: radix %? to high, \
must lie in the range [2, 36]", radix),
_ => ()
}
let _0: T = Zero::zero();
let _1: T = One::one();
let radix_gen: T = cast(radix as int);
let len = buf.len();
if len == 0 {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
if special {
if buf == INF_BUF || buf == POS_INF_BUF {
return NumStrConv::inf();
} else if buf == NEG_INF_BUF {
if negative {
return NumStrConv::neg_inf();
} else {
return None;
}
} else if buf == NAN_BUF {
return NumStrConv::nan();
}
}
let (start, accum_positive) = match buf[0] as char {
'-' if !negative => return None,
'-' => (1u, false),
'+' => (1u, true),
_ => (0u, true)
};
// Initialize accumulator with signed zero for floating point parsing to
// work
let mut accum = if accum_positive { _0.clone() } else { -_1 * _0};
let mut last_accum = accum.clone(); // Necessary to detect overflow
let mut i = start;
let mut exp_found = false;
// Parse integer part of number
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// shift accum one digit left
accum = accum * radix_gen.clone();
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + cast(digit as int);
} else {
accum = accum - cast(digit as int);
}
// Detect overflow by comparing to last value, except
// if we've not seen any non-zero digits.
if last_accum != _0 {
if accum_positive && accum <= last_accum { return NumStrConv::inf(); }
if !accum_positive && accum >= last_accum { return NumStrConv::neg_inf(); }
// Detect overflow by reversing the shift-and-add proccess
if accum_positive &&
(last_accum != ((accum - cast(digit as int))/radix_gen.clone())) {
return NumStrConv::inf();
}
if !accum_positive &&
(last_accum != ((accum + cast(digit as int))/radix_gen.clone())) {
return NumStrConv::neg_inf();
}
}
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
'.' if fractional => {
i += 1u; // skip the '.'
break; // start of fractional part
}
_ => return None // invalid number
}
}
i += 1u;
}
// Parse fractional part of number
// Skip if already reached start of exponent
if !exp_found {
let mut power = _1.clone();
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// Decrease power one order of magnitude
power = power / radix_gen;
let digit_t: T = cast(digit);
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + digit_t * power;
} else {
accum = accum - digit_t * power;
}
// Detect overflow by comparing to last value
if accum_positive && accum < last_accum { return NumStrConv::inf(); }
if !accum_positive && accum > last_accum { return NumStrConv::neg_inf(); }
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
_ => return None // invalid number
}
}
i += 1u;
}
}
// Special case: buf not empty, but does not contain any digit in front
// of the exponent sign -> number is empty string
if i == start {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
let mut multiplier = _1.clone();
if exp_found {
let c = buf[i] as char;
let base = match (c, exponent) {
// c is never _ so don't need to handle specially
('e', ExpDec) | ('E', ExpDec) => 10u,
('p', ExpBin) | ('P', ExpBin) => 2u,
_ => return None // char doesn't fit given exponent format
};
// parse remaining bytes as decimal integer,
// skipping the exponent char
let exp: Option<int> = from_str_bytes_common(
buf.slice(i+1, len), 10, true, false, false, ExpNone, false,
ignore_underscores);
match exp {
Some(exp_pow) => {
multiplier = if exp_pow < 0 {
_1 / pow_with_uint::<T>(base, (-exp_pow.to_int()) as uint)
} else {
pow_with_uint::<T>(base, exp_pow.to_int() as uint)
}
}
None => return None // invalid exponent -> invalid number
}
}
Some(accum * multiplier)
}
/**
* Parses a string as a number. This is a wrapper for
* `from_str_bytes_common()`, for details see there.
*/
#[inline]
pub fn | from_str_common | identifier_name |
|
sequence.ts | import { Applicative, Traversable } from '../ramda/dist/src/$types';
import * as R_sequence from '../ramda/dist/src/sequence';
declare const any_applicative: Applicative<any>;
declare const number_applicative: Applicative<number>;
declare const any_applicative_traverable: Traversable<Applicative<any>>;
| R_sequence(number_applicative.of, [number_applicative]);
// @dts-jest:pass
R_sequence(any_applicative.of, [any_applicative]);
// @dts-jest:pass
R_sequence<number>(any_applicative.of, [any_applicative]);
// @dts-jest:pass
R_sequence(number_applicative.of, any_applicative_traverable);
// @dts-jest:pass
R_sequence(any_applicative.of, any_applicative_traverable);
// @dts-jest:pass
R_sequence<number>(any_applicative.of, any_applicative_traverable); | // @dts-jest:pass | random_line_split |
lightBaseTheme.js | import Colors from '../colors';
import ColorManipulator from '../../utils/color-manipulator';
import Spacing from '../spacing';
/*
* Light Theme is the default theme used in material-ui. It is guaranteed to
* have all theme variables needed for every component. Variables not defined
* in a custom theme will default to these values.
*/
export default {
spacing: Spacing,
fontFamily: 'Roboto, sans-serif',
palette: {
primary1Color: Colors.cyan500,
primary2Color: Colors.cyan700,
primary3Color: Colors.grey400,
accent1Color: Colors.pinkA200,
accent2Color: Colors.grey100,
accent3Color: Colors.grey500, | borderColor: Colors.grey300,
disabledColor: ColorManipulator.fade(Colors.darkBlack, 0.3),
pickerHeaderColor: Colors.cyan500,
clockCircleColor: ColorManipulator.fade(Colors.darkBlack, 0.07),
shadowColor: Colors.fullBlack,
},
}; | textColor: Colors.darkBlack,
alternateTextColor: Colors.white,
canvasColor: Colors.white, | random_line_split |
yaml.js | "use strict";
const { ParserError } = require("../util/errors");
const yaml = require("js-yaml");
module.exports = {
/**
* The order that this parser will run, in relation to other parsers.
*
* @type {number}
*/
order: 200,
/**
* Whether to allow "empty" files. This includes zero-byte files, as well as empty JSON objects.
*
* @type {boolean}
*/
allowEmpty: true,
/**
* Determines whether this parser can parse a given file reference.
* Parsers that match will be tried, in order, until one successfully parses the file.
* Parsers that don't match will be skipped, UNLESS none of the parsers match, in which case
* every parser will be tried.
*
* @type {RegExp|string[]|function}
*/
canParse: [".yaml", ".yml", ".json"], // JSON is valid YAML
/**
* Parses the given file as YAML
*
* @param {object} file - An object containing information about the referenced file
* @param {string} file.url - The full URL of the referenced file
* @param {string} file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.)
* @param {*} file.data - The file contents. This will be whatever data type was returned by the resolver
* @returns {Promise}
*/
async parse (file) { // eslint-disable-line require-await
let data = file.data;
if (Buffer.isBuffer(data)) {
data = data.toString();
}
if (typeof data === "string") {
try {
return yaml.load(data);
}
catch (e) {
throw new ParserError(e.message, file.url);
}
}
else |
}
};
| {
// data is already a JavaScript value (object, array, number, null, NaN, etc.)
return data;
} | conditional_block |
yaml.js | "use strict";
const { ParserError } = require("../util/errors");
const yaml = require("js-yaml"); |
module.exports = {
/**
* The order that this parser will run, in relation to other parsers.
*
* @type {number}
*/
order: 200,
/**
* Whether to allow "empty" files. This includes zero-byte files, as well as empty JSON objects.
*
* @type {boolean}
*/
allowEmpty: true,
/**
* Determines whether this parser can parse a given file reference.
* Parsers that match will be tried, in order, until one successfully parses the file.
* Parsers that don't match will be skipped, UNLESS none of the parsers match, in which case
* every parser will be tried.
*
* @type {RegExp|string[]|function}
*/
canParse: [".yaml", ".yml", ".json"], // JSON is valid YAML
/**
* Parses the given file as YAML
*
* @param {object} file - An object containing information about the referenced file
* @param {string} file.url - The full URL of the referenced file
* @param {string} file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.)
* @param {*} file.data - The file contents. This will be whatever data type was returned by the resolver
* @returns {Promise}
*/
async parse (file) { // eslint-disable-line require-await
let data = file.data;
if (Buffer.isBuffer(data)) {
data = data.toString();
}
if (typeof data === "string") {
try {
return yaml.load(data);
}
catch (e) {
throw new ParserError(e.message, file.url);
}
}
else {
// data is already a JavaScript value (object, array, number, null, NaN, etc.)
return data;
}
}
}; | random_line_split |
|
yaml.js | "use strict";
const { ParserError } = require("../util/errors");
const yaml = require("js-yaml");
module.exports = {
/**
* The order that this parser will run, in relation to other parsers.
*
* @type {number}
*/
order: 200,
/**
* Whether to allow "empty" files. This includes zero-byte files, as well as empty JSON objects.
*
* @type {boolean}
*/
allowEmpty: true,
/**
* Determines whether this parser can parse a given file reference.
* Parsers that match will be tried, in order, until one successfully parses the file.
* Parsers that don't match will be skipped, UNLESS none of the parsers match, in which case
* every parser will be tried.
*
* @type {RegExp|string[]|function}
*/
canParse: [".yaml", ".yml", ".json"], // JSON is valid YAML
/**
* Parses the given file as YAML
*
* @param {object} file - An object containing information about the referenced file
* @param {string} file.url - The full URL of the referenced file
* @param {string} file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.)
* @param {*} file.data - The file contents. This will be whatever data type was returned by the resolver
* @returns {Promise}
*/
async parse (file) |
};
| { // eslint-disable-line require-await
let data = file.data;
if (Buffer.isBuffer(data)) {
data = data.toString();
}
if (typeof data === "string") {
try {
return yaml.load(data);
}
catch (e) {
throw new ParserError(e.message, file.url);
}
}
else {
// data is already a JavaScript value (object, array, number, null, NaN, etc.)
return data;
}
} | identifier_body |
yaml.js | "use strict";
const { ParserError } = require("../util/errors");
const yaml = require("js-yaml");
module.exports = {
/**
* The order that this parser will run, in relation to other parsers.
*
* @type {number}
*/
order: 200,
/**
* Whether to allow "empty" files. This includes zero-byte files, as well as empty JSON objects.
*
* @type {boolean}
*/
allowEmpty: true,
/**
* Determines whether this parser can parse a given file reference.
* Parsers that match will be tried, in order, until one successfully parses the file.
* Parsers that don't match will be skipped, UNLESS none of the parsers match, in which case
* every parser will be tried.
*
* @type {RegExp|string[]|function}
*/
canParse: [".yaml", ".yml", ".json"], // JSON is valid YAML
/**
* Parses the given file as YAML
*
* @param {object} file - An object containing information about the referenced file
* @param {string} file.url - The full URL of the referenced file
* @param {string} file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.)
* @param {*} file.data - The file contents. This will be whatever data type was returned by the resolver
* @returns {Promise}
*/
async | (file) { // eslint-disable-line require-await
let data = file.data;
if (Buffer.isBuffer(data)) {
data = data.toString();
}
if (typeof data === "string") {
try {
return yaml.load(data);
}
catch (e) {
throw new ParserError(e.message, file.url);
}
}
else {
// data is already a JavaScript value (object, array, number, null, NaN, etc.)
return data;
}
}
};
| parse | identifier_name |
settings.py | """
Django settings for cui project.
Generated by 'django-admin startproject' using Django 1.10.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
""" |
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%g1*8#gej6qsrz@*psc1t=#nh)ym#$)i=rio)eqk8im3)iyi7-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'channels',
'home.apps.HomeConfig',
'data.apps.DataConfig',
'bootstrap3',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'cui.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cui.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'es-ar'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
LOGIN_REDIRECT_URL="/data/"
LOGIN_URL="/home/login/"
LOGOUT_REDIRECT_URL="/home/login/" | random_line_split |
|
score_board_test.py | import unittest
import datetime
from classes.score_board import ScoreBoard
class DateTimeStub(object):
def now(self):
return "test_date_time"
class ScoreBoardTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(ScoreBoardTest, self).__init__(*args, **kwargs)
self.score_board = ScoreBoard("testdatabase.db")
def seedScores(self):
self.score_board.clear()
dummy_scores = [
("player1", 5, datetime.datetime.now() - datetime.timedelta(days=3)),
("player2", 6, datetime.datetime.now()),
("player3", 3, datetime.datetime.now() - datetime.timedelta(days=2))
]
self.score_board.cursor.executemany(
"INSERT INTO scores(name, score, recorded_at) VALUES(?,?,?)",
dummy_scores
)
self.score_board.db.commit()
def testRecorderAt(self):
self.assertEqual(type(self.score_board.recordedAt()), datetime.datetime)
def testCount(self):
self.seedScores()
self.assertEqual(self.score_board.count(), 3)
def testClear(self):
|
def testAdd(self):
self.seedScores()
self.score_board.add("player4", 15)
self.assertEqual(self.score_board.count(), 4)
def testHighest(self):
self.seedScores()
scores = self.score_board.highest()
self.assertEqual(scores[0][0], "player2")
self.assertEqual(scores[2][0], "player3")
def testRecent(self):
self.seedScores()
scores = self.score_board.recent()
self.assertEqual(scores[0][0], "player2")
self.assertEqual(scores[2][0], "player1") | self.seedScores()
self.score_board.clear()
self.assertEqual(self.score_board.count(), 0) | identifier_body |
score_board_test.py | import unittest | from classes.score_board import ScoreBoard
class DateTimeStub(object):
def now(self):
return "test_date_time"
class ScoreBoardTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(ScoreBoardTest, self).__init__(*args, **kwargs)
self.score_board = ScoreBoard("testdatabase.db")
def seedScores(self):
self.score_board.clear()
dummy_scores = [
("player1", 5, datetime.datetime.now() - datetime.timedelta(days=3)),
("player2", 6, datetime.datetime.now()),
("player3", 3, datetime.datetime.now() - datetime.timedelta(days=2))
]
self.score_board.cursor.executemany(
"INSERT INTO scores(name, score, recorded_at) VALUES(?,?,?)",
dummy_scores
)
self.score_board.db.commit()
def testRecorderAt(self):
self.assertEqual(type(self.score_board.recordedAt()), datetime.datetime)
def testCount(self):
self.seedScores()
self.assertEqual(self.score_board.count(), 3)
def testClear(self):
self.seedScores()
self.score_board.clear()
self.assertEqual(self.score_board.count(), 0)
def testAdd(self):
self.seedScores()
self.score_board.add("player4", 15)
self.assertEqual(self.score_board.count(), 4)
def testHighest(self):
self.seedScores()
scores = self.score_board.highest()
self.assertEqual(scores[0][0], "player2")
self.assertEqual(scores[2][0], "player3")
def testRecent(self):
self.seedScores()
scores = self.score_board.recent()
self.assertEqual(scores[0][0], "player2")
self.assertEqual(scores[2][0], "player1") | import datetime | random_line_split |
score_board_test.py | import unittest
import datetime
from classes.score_board import ScoreBoard
class DateTimeStub(object):
def now(self):
return "test_date_time"
class | (unittest.TestCase):
def __init__(self, *args, **kwargs):
super(ScoreBoardTest, self).__init__(*args, **kwargs)
self.score_board = ScoreBoard("testdatabase.db")
def seedScores(self):
self.score_board.clear()
dummy_scores = [
("player1", 5, datetime.datetime.now() - datetime.timedelta(days=3)),
("player2", 6, datetime.datetime.now()),
("player3", 3, datetime.datetime.now() - datetime.timedelta(days=2))
]
self.score_board.cursor.executemany(
"INSERT INTO scores(name, score, recorded_at) VALUES(?,?,?)",
dummy_scores
)
self.score_board.db.commit()
def testRecorderAt(self):
self.assertEqual(type(self.score_board.recordedAt()), datetime.datetime)
def testCount(self):
self.seedScores()
self.assertEqual(self.score_board.count(), 3)
def testClear(self):
self.seedScores()
self.score_board.clear()
self.assertEqual(self.score_board.count(), 0)
def testAdd(self):
self.seedScores()
self.score_board.add("player4", 15)
self.assertEqual(self.score_board.count(), 4)
def testHighest(self):
self.seedScores()
scores = self.score_board.highest()
self.assertEqual(scores[0][0], "player2")
self.assertEqual(scores[2][0], "player3")
def testRecent(self):
self.seedScores()
scores = self.score_board.recent()
self.assertEqual(scores[0][0], "player2")
self.assertEqual(scores[2][0], "player1") | ScoreBoardTest | identifier_name |
produtos.js | $(document).ready(function(){
var $nomeInput = $('#nomeInput');
var $descricaoInput = $('#descricaoInput');
var $precoInput = $('#precoInput');
var $categoriaSelect = $('#categoriaSelect');
var $novidadeSelect = $('#novidadeSelect');
var $carregandoImg = $('#carregandoImg');
$carregandoImg.hide();
var $btnSalvar = $('#salvar');
var $btnFechar = $('#fechar');
var $listaProdutos = $('#listaProdutos');
$listaProdutos.hide();
function obterValoresdeProdutos(){
return {'nome' : $nomeInput.val() , 'descricao' : $descricaoInput.val(), 'preco': $precoInput.val() , 'categoria' : $categoriaSelect.val(), 'novidade' : $novidadeSelect.val()}
}
function limparValores(){
$('input[type="text"]').val('');
$novidadeSelect.val('');
}
$.get('/produtos/admin/rest/listar').success(function(produtos){
$.each(produtos,function(index, p){
adicionarProduto(p);
})
});
function adicionarProduto (produto) {
var msg = '<tr id="tr-produto_'+produto.id+'"><td>'+produto.id+'</td><td>'+produto.nome+'</td><td>'+produto.categoria+'</td><td>'+produto.descricao+'</td><td>'+produto.preco+'</td><td>'+(produto.novidade == "1" ? 'Sim' : 'Não')+'</td><td><a href="/produtos/admin/editar/'+produto.id+'" class="btn btn-warning glyphicon glyphicon-pencil"></a></td><td><button id="btn-deletar_'+produto.id+'" class="btn btn-danger"><i class="glyphicon glyphicon-trash"></i></button></td></tr>';
$listaProdutos.show();
$listaProdutos.append(msg);
$('#btn-deletar_'+produto.id).click(function(){
if (confirm("Deseja apagar esse registro? "))
{ |
});
}
$btnSalvar.click(function(){
$('.has-error').removeClass('has-error');
$('.help-block').empty();
$btnSalvar.attr('disabled' , 'disabled');
$carregandoImg.fadeIn('fast');
var resp = $.post('/produtos/admin/rest/salvar', obterValoresdeProdutos());
resp.success(function(produto){
limparValores();
$btnFechar.click();
adicionarProduto(produto);
})
resp.error(function(erros){
for(campos in erros.responseJSON)
{
$('#'+campos+'Div').addClass('has-error');
$('#'+campos+'Span').text(erros.responseJSON[campos]);
}
}).always(function(){
$btnSalvar.removeAttr('disabled','disabled');
$carregandoImg.hide();
});
});
});
|
var resp = $.post('/produtos/admin/rest/deletar' , {produto_id : produto.id});
resp.success(function(){
$('#tr-produto_'+produto.id).remove()
});
}
| conditional_block |
produtos.js | $(document).ready(function(){
var $nomeInput = $('#nomeInput');
var $descricaoInput = $('#descricaoInput');
var $precoInput = $('#precoInput');
var $categoriaSelect = $('#categoriaSelect');
var $novidadeSelect = $('#novidadeSelect');
var $carregandoImg = $('#carregandoImg');
$carregandoImg.hide();
var $btnSalvar = $('#salvar');
var $btnFechar = $('#fechar');
var $listaProdutos = $('#listaProdutos');
$listaProdutos.hide();
function obterValoresdeProdutos(){
return {'nome' : $nomeInput.val() , 'descricao' : $descricaoInput.val(), 'preco': $precoInput.val() , 'categoria' : $categoriaSelect.val(), 'novidade' : $novidadeSelect.val()}
}
function limparValores() |
$.get('/produtos/admin/rest/listar').success(function(produtos){
$.each(produtos,function(index, p){
adicionarProduto(p);
})
});
function adicionarProduto (produto) {
var msg = '<tr id="tr-produto_'+produto.id+'"><td>'+produto.id+'</td><td>'+produto.nome+'</td><td>'+produto.categoria+'</td><td>'+produto.descricao+'</td><td>'+produto.preco+'</td><td>'+(produto.novidade == "1" ? 'Sim' : 'Não')+'</td><td><a href="/produtos/admin/editar/'+produto.id+'" class="btn btn-warning glyphicon glyphicon-pencil"></a></td><td><button id="btn-deletar_'+produto.id+'" class="btn btn-danger"><i class="glyphicon glyphicon-trash"></i></button></td></tr>';
$listaProdutos.show();
$listaProdutos.append(msg);
$('#btn-deletar_'+produto.id).click(function(){
if (confirm("Deseja apagar esse registro? "))
{
var resp = $.post('/produtos/admin/rest/deletar' , {produto_id : produto.id});
resp.success(function(){
$('#tr-produto_'+produto.id).remove()
});
}
});
}
$btnSalvar.click(function(){
$('.has-error').removeClass('has-error');
$('.help-block').empty();
$btnSalvar.attr('disabled' , 'disabled');
$carregandoImg.fadeIn('fast');
var resp = $.post('/produtos/admin/rest/salvar', obterValoresdeProdutos());
resp.success(function(produto){
limparValores();
$btnFechar.click();
adicionarProduto(produto);
})
resp.error(function(erros){
for(campos in erros.responseJSON)
{
$('#'+campos+'Div').addClass('has-error');
$('#'+campos+'Span').text(erros.responseJSON[campos]);
}
}).always(function(){
$btnSalvar.removeAttr('disabled','disabled');
$carregandoImg.hide();
});
});
});
| {
$('input[type="text"]').val('');
$novidadeSelect.val('');
} | identifier_body |
produtos.js | $(document).ready(function(){
var $nomeInput = $('#nomeInput');
var $descricaoInput = $('#descricaoInput');
var $precoInput = $('#precoInput');
var $categoriaSelect = $('#categoriaSelect');
var $novidadeSelect = $('#novidadeSelect');
var $carregandoImg = $('#carregandoImg');
$carregandoImg.hide();
var $btnSalvar = $('#salvar');
var $btnFechar = $('#fechar');
var $listaProdutos = $('#listaProdutos');
$listaProdutos.hide();
function obterValoresdeProdutos(){
return {'nome' : $nomeInput.val() , 'descricao' : $descricaoInput.val(), 'preco': $precoInput.val() , 'categoria' : $categoriaSelect.val(), 'novidade' : $novidadeSelect.val()}
}
function limparValores(){
$('input[type="text"]').val('');
$novidadeSelect.val('');
}
$.get('/produtos/admin/rest/listar').success(function(produtos){
$.each(produtos,function(index, p){
adicionarProduto(p); | var msg = '<tr id="tr-produto_'+produto.id+'"><td>'+produto.id+'</td><td>'+produto.nome+'</td><td>'+produto.categoria+'</td><td>'+produto.descricao+'</td><td>'+produto.preco+'</td><td>'+(produto.novidade == "1" ? 'Sim' : 'Não')+'</td><td><a href="/produtos/admin/editar/'+produto.id+'" class="btn btn-warning glyphicon glyphicon-pencil"></a></td><td><button id="btn-deletar_'+produto.id+'" class="btn btn-danger"><i class="glyphicon glyphicon-trash"></i></button></td></tr>';
$listaProdutos.show();
$listaProdutos.append(msg);
$('#btn-deletar_'+produto.id).click(function(){
if (confirm("Deseja apagar esse registro? "))
{
var resp = $.post('/produtos/admin/rest/deletar' , {produto_id : produto.id});
resp.success(function(){
$('#tr-produto_'+produto.id).remove()
});
}
});
}
$btnSalvar.click(function(){
$('.has-error').removeClass('has-error');
$('.help-block').empty();
$btnSalvar.attr('disabled' , 'disabled');
$carregandoImg.fadeIn('fast');
var resp = $.post('/produtos/admin/rest/salvar', obterValoresdeProdutos());
resp.success(function(produto){
limparValores();
$btnFechar.click();
adicionarProduto(produto);
})
resp.error(function(erros){
for(campos in erros.responseJSON)
{
$('#'+campos+'Div').addClass('has-error');
$('#'+campos+'Span').text(erros.responseJSON[campos]);
}
}).always(function(){
$btnSalvar.removeAttr('disabled','disabled');
$carregandoImg.hide();
});
});
}); | })
});
function adicionarProduto (produto) { | random_line_split |
produtos.js | $(document).ready(function(){
var $nomeInput = $('#nomeInput');
var $descricaoInput = $('#descricaoInput');
var $precoInput = $('#precoInput');
var $categoriaSelect = $('#categoriaSelect');
var $novidadeSelect = $('#novidadeSelect');
var $carregandoImg = $('#carregandoImg');
$carregandoImg.hide();
var $btnSalvar = $('#salvar');
var $btnFechar = $('#fechar');
var $listaProdutos = $('#listaProdutos');
$listaProdutos.hide();
function | (){
return {'nome' : $nomeInput.val() , 'descricao' : $descricaoInput.val(), 'preco': $precoInput.val() , 'categoria' : $categoriaSelect.val(), 'novidade' : $novidadeSelect.val()}
}
function limparValores(){
$('input[type="text"]').val('');
$novidadeSelect.val('');
}
$.get('/produtos/admin/rest/listar').success(function(produtos){
$.each(produtos,function(index, p){
adicionarProduto(p);
})
});
function adicionarProduto (produto) {
var msg = '<tr id="tr-produto_'+produto.id+'"><td>'+produto.id+'</td><td>'+produto.nome+'</td><td>'+produto.categoria+'</td><td>'+produto.descricao+'</td><td>'+produto.preco+'</td><td>'+(produto.novidade == "1" ? 'Sim' : 'Não')+'</td><td><a href="/produtos/admin/editar/'+produto.id+'" class="btn btn-warning glyphicon glyphicon-pencil"></a></td><td><button id="btn-deletar_'+produto.id+'" class="btn btn-danger"><i class="glyphicon glyphicon-trash"></i></button></td></tr>';
$listaProdutos.show();
$listaProdutos.append(msg);
$('#btn-deletar_'+produto.id).click(function(){
if (confirm("Deseja apagar esse registro? "))
{
var resp = $.post('/produtos/admin/rest/deletar' , {produto_id : produto.id});
resp.success(function(){
$('#tr-produto_'+produto.id).remove()
});
}
});
}
$btnSalvar.click(function(){
$('.has-error').removeClass('has-error');
$('.help-block').empty();
$btnSalvar.attr('disabled' , 'disabled');
$carregandoImg.fadeIn('fast');
var resp = $.post('/produtos/admin/rest/salvar', obterValoresdeProdutos());
resp.success(function(produto){
limparValores();
$btnFechar.click();
adicionarProduto(produto);
})
resp.error(function(erros){
for(campos in erros.responseJSON)
{
$('#'+campos+'Div').addClass('has-error');
$('#'+campos+'Span').text(erros.responseJSON[campos]);
}
}).always(function(){
$btnSalvar.removeAttr('disabled','disabled');
$carregandoImg.hide();
});
});
});
| obterValoresdeProdutos | identifier_name |
profile.js | Template.profile.helpers({
info: function() {
if(Meteor.userId()) {
var target_id = Router.current().data();
var profile = Profiles.findOne({ user_id: target_id });
return profile;
}
},
isCurrentUser: function() {
var userId = Meteor.userId();
if(userId) |
},
editProfile: function() {
var editing = Session.get('editingProfile');
if(editing)
return true;
},
editState: function() {
var editing = Session.get('editingProfile');
if(editing)
return 'Done';
else
return 'Edit Profile';
}
});
Template.profile.events({
'click #editProfile': function(evt) {
var editingState = Session.get('editingProfile');
if(editingState==true) {
var name = $('#name').val();
var surname = $('#surname').val();
var company = $('#company').val();
var phone = $('#phone').val();
var website = $('#website').val();
var description = $('#description').val();
Meteor.call('editProfile', name, surname, company, phone, website, description, function(err, data) {
if(!err) {
$('#name').val('');
$('#surname').val('');
$('#company').val('');
$('#phone').val('');
$('#website').val('');
$('#description').val('');
Session.set('editingProfile', false);
}
else {
console.log(String(err));
}
});
}
else {
Session.set('editingProfile', true);
}
}
});
| {
/* determine if the profile belongs to currently logged in user */
var targetId = Router.current().data();
if(userId==targetId)
return true;
} | conditional_block |
profile.js | Template.profile.helpers({
info: function() {
if(Meteor.userId()) {
var target_id = Router.current().data();
var profile = Profiles.findOne({ user_id: target_id });
return profile;
}
},
isCurrentUser: function() {
var userId = Meteor.userId();
if(userId) {
/* determine if the profile belongs to currently logged in user */
var targetId = Router.current().data();
if(userId==targetId)
return true;
}
},
editProfile: function() {
var editing = Session.get('editingProfile');
if(editing)
return true;
},
editState: function() {
var editing = Session.get('editingProfile');
if(editing)
return 'Done';
else
return 'Edit Profile';
}
});
Template.profile.events({
'click #editProfile': function(evt) {
var editingState = Session.get('editingProfile');
if(editingState==true) {
var name = $('#name').val();
var surname = $('#surname').val();
var company = $('#company').val();
var phone = $('#phone').val();
var website = $('#website').val();
var description = $('#description').val();
Meteor.call('editProfile', name, surname, company, phone, website, description, function(err, data) {
if(!err) {
$('#name').val('');
$('#surname').val('');
$('#company').val('');
$('#phone').val('');
$('#website').val('');
$('#description').val('');
Session.set('editingProfile', false);
}
else {
console.log(String(err));
}
}); | Session.set('editingProfile', true);
}
}
}); | }
else { | random_line_split |
mod.rs | /*
use std::fmt;
use std::io::{self, Write};
use std::marker::PhantomData;
use std::sync::mpsc;
use url::Url;
use tick;
use time::now_utc;
use header::{self, Headers};
use http::{self, conn};
use method::Method;
use net::{Fresh, Streaming};
use status::StatusCode;
use version::HttpVersion;
*/
pub use self::decode::Decoder;
pub use self::encode::Encoder;
pub use self::parse::parse;
mod decode;
mod encode;
mod parse;
/*
fn should_have_response_body(method: &Method, status: u16) -> bool {
trace!("should_have_response_body({:?}, {})", method, status);
match (method, status) {
(&Method::Head, _) |
(_, 100...199) |
(_, 204) |
(_, 304) |
(&Method::Connect, 200...299) => false,
_ => true
}
}
*/
/*
const MAX_INVALID_RESPONSE_BYTES: usize = 1024 * 128;
impl HttpMessage for Http11Message {
fn get_incoming(&mut self) -> ::Result<ResponseHead> {
unimplemented!();
/*
try!(self.flush_outgoing());
let stream = match self.stream.take() {
Some(stream) => stream,
None => {
// The message was already in the reading state...
// TODO Decide what happens in case we try to get a new incoming at that point
return Err(From::from(
io::Error::new(io::ErrorKind::Other,
"Read already in progress")));
}
};
let expected_no_content = stream.previous_response_expected_no_content();
trace!("previous_response_expected_no_content = {}", expected_no_content);
let mut stream = BufReader::new(stream);
let mut invalid_bytes_read = 0;
let head;
loop {
head = match parse_response(&mut stream) {
Ok(head) => head,
Err(::Error::Version)
if expected_no_content && invalid_bytes_read < MAX_INVALID_RESPONSE_BYTES => {
trace!("expected_no_content, found content");
invalid_bytes_read += 1;
stream.consume(1);
continue;
}
Err(e) => {
self.stream = Some(stream.into_inner());
return Err(e);
}
};
break;
}
let raw_status = head.subject;
let headers = head.headers;
let method = self.method.take().unwrap_or(Method::Get);
let is_empty = !should_have_response_body(&method, raw_status.0);
stream.get_mut().set_previous_response_expected_no_content(is_empty);
// According to https://tools.ietf.org/html/rfc7230#section-3.3.3
// 1. HEAD reponses, and Status 1xx, 204, and 304 cannot have a body.
// 2. Status 2xx to a CONNECT cannot have a body.
// 3. Transfer-Encoding: chunked has a chunked body.
// 4. If multiple differing Content-Length headers or invalid, close connection.
// 5. Content-Length header has a sized body.
// 6. Not Client.
// 7. Read till EOF.
self.reader = Some(if is_empty {
SizedReader(stream, 0)
} else {
if let Some(&TransferEncoding(ref codings)) = headers.get() {
if codings.last() == Some(&Chunked) {
ChunkedReader(stream, None)
} else {
trace!("not chuncked. read till eof");
EofReader(stream)
}
} else if let Some(&ContentLength(len)) = headers.get() {
SizedReader(stream, len)
} else if headers.has::<ContentLength>() {
trace!("illegal Content-Length: {:?}", headers.get_raw("Content-Length"));
return Err(Error::Header);
} else {
trace!("neither Transfer-Encoding nor Content-Length");
EofReader(stream)
}
});
trace!("Http11Message.reader = {:?}", self.reader);
Ok(ResponseHead { | }
}
*/ | headers: headers,
raw_status: raw_status,
version: head.version,
})
*/ | random_line_split |
repo.py | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ["BaseRepo", "TacticRepo"]
import os, sys, re
from pyasm.common import Environment, System
from pyasm.biz import File
from pyasm.search import FileUndo
from .checkin import CheckinException
class BaseRepo(object):
'''abstract class defining repositories'''
def has_file_codes(self):
return True
def handle_system_commands(self, snapshot, files, file_objects, mode, md5s, source_paths=[], commit=False):
pass
class TacticRepo(BaseRepo):
def handle_system_commands(self, snapshot, files, file_objects, mode, md5s, source_paths=[], file_sizes=[], commit=False):
'''move the tmp files in the appropriate directory'''
# if mode is local then nothing happens here
if mode == 'local':
return
if commit in ['false', False]:
commit = False
else:
commit = True
# inplace mode does not move the file. It just registers the file
# object
if mode == 'inplace':
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
to_path = file
# This is handled in create_file_types
#file_type = snapshot.get_type_by_file_name(to_name)
#file_object.set_value('type', file_type)
if not os.path.isdir(to_path):
md5_checksum = None
if md5s:
md5_checksum = md5s[i]
if not md5_checksum:
md5_checksum = File.get_md5(to_path)
if md5_checksum:
file_object.set_value("md5", md5_checksum)
if commit:
file_object.commit(triggers="none")
return
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
file_type = snapshot.get_type_by_file_name(to_name)
if mode == 'preallocate':
to_path = file
else:
lib_dir = snapshot.get_lib_dir(file_type=file_type, file_object=file_object)
# it should have been created in postprocess_snapshot
System().makedirs(lib_dir)
to_path = "%s/%s" % (lib_dir, to_name )
#print "path: ", i, files[i]
#print to_path, os.path.exists(to_path)
# first make sure that the to path does not exist, if so, just skip
if os.path.exists(to_path) and mode not in ['inplace','preallocate']:
raise CheckinException('This path [%s] already exists'%to_path)
# add the file
try:
# inplace undo used to not touch the file,
# now it will be moved to cache on undo
io_action = True
if mode in ['preallocate']:
io_action = False
if mode == 'move':
FileUndo.move( source_paths[i], to_path )
#elif mode == 'copy': # was free_copy
#FileUndo.create( source_paths[i], to_path, io_action=io_action )
# make it look like the files was created in the repository
else: # mode ='create'
md5 = file_object.get_value("md5")
st_size = file_object.get_value("st_size")
rel_dir = file_object.get_value("relative_dir")
if mode == 'copy':
io_action = 'copy'
src_path = source_paths[i]
else:
src_path = files[i]
file_name = to_name
rel_path = "%s/%s" % (rel_dir, file_name)
FileUndo.create( src_path, to_path, io_action=io_action, extra={ "md5": md5, "st_size": st_size, "rel_path": rel_path } )
except IOError as e:
raise CheckinException('IO Error occurred. %s' %e.__str__())
# check to see that the file exists.
if not os.path.exists( to_path ):
if mode in ["inplace", "preallocate"]:
raise CheckinException("File not found in repo at [%s]" % to_path )
else:
raise CheckinException("Failed move [%s] to [%s]" % \
(files[i], to_path) )
file_object.set_value('type', file_type)
if md5s != "ignore" and not os.path.isdir(to_path):
md5_checksum = None
if md5s:
md5_checksum = md5s[i]
if not md5_checksum:
md5_checksum = File.get_md5(to_path)
#md5_checksum = ""
if md5_checksum:
file_object.set_value("md5", md5_checksum)
if commit:
file_object.commit(triggers="none")
__all__.append("S3Repo")
class S3Repo(BaseRepo):
'''This uploads the files to s3 directly'''
def | (self, snapshot, files, file_objects, mode, md5s, source_paths=[], file_sizes=[], commit=False):
try:
import boto3
from botocore.exceptions import ClientError
except:
raise("Python [boto3] module not installed")
session = boto3.Session()
s3_client = session.client('s3')
s3_resource = session.client('s3')
sobject = snapshot.get_parent()
from pyasm.security import Site
site = Site.get_site()
project_code = sobject.get_project_code()
#!!! TEST
bucket = "tactic01"
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
file_type = snapshot.get_type_by_file_name(to_name)
web_dir = snapshot.get_relative_dir(file_type=file_type, file_object=file_object)
object_name = "%s/%s/%s" % (site, web_dir, to_name)
print("objct: ", object_name)
# push these files to s3
try:
s3_client.upload_file(source_paths[i], bucket, object_name)
except ClientError as e:
raise
| handle_system_commands | identifier_name |
repo.py | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ["BaseRepo", "TacticRepo"]
import os, sys, re
from pyasm.common import Environment, System
from pyasm.biz import File
from pyasm.search import FileUndo
from .checkin import CheckinException
class BaseRepo(object):
'''abstract class defining repositories'''
def has_file_codes(self):
return True
def handle_system_commands(self, snapshot, files, file_objects, mode, md5s, source_paths=[], commit=False):
pass
class TacticRepo(BaseRepo):
def handle_system_commands(self, snapshot, files, file_objects, mode, md5s, source_paths=[], file_sizes=[], commit=False):
'''move the tmp files in the appropriate directory'''
# if mode is local then nothing happens here
if mode == 'local':
return
if commit in ['false', False]:
commit = False
else:
commit = True
# inplace mode does not move the file. It just registers the file
# object
if mode == 'inplace':
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
to_path = file
# This is handled in create_file_types
#file_type = snapshot.get_type_by_file_name(to_name)
#file_object.set_value('type', file_type)
if not os.path.isdir(to_path):
md5_checksum = None
if md5s:
md5_checksum = md5s[i]
if not md5_checksum:
md5_checksum = File.get_md5(to_path)
if md5_checksum:
file_object.set_value("md5", md5_checksum)
if commit:
file_object.commit(triggers="none")
return
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
file_type = snapshot.get_type_by_file_name(to_name)
if mode == 'preallocate':
to_path = file
else:
lib_dir = snapshot.get_lib_dir(file_type=file_type, file_object=file_object)
# it should have been created in postprocess_snapshot
System().makedirs(lib_dir)
to_path = "%s/%s" % (lib_dir, to_name )
#print "path: ", i, files[i]
#print to_path, os.path.exists(to_path)
# first make sure that the to path does not exist, if so, just skip
if os.path.exists(to_path) and mode not in ['inplace','preallocate']:
raise CheckinException('This path [%s] already exists'%to_path)
# add the file
try:
# inplace undo used to not touch the file,
# now it will be moved to cache on undo
io_action = True
if mode in ['preallocate']:
io_action = False
if mode == 'move':
FileUndo.move( source_paths[i], to_path )
#elif mode == 'copy': # was free_copy
#FileUndo.create( source_paths[i], to_path, io_action=io_action )
# make it look like the files was created in the repository
else: # mode ='create'
md5 = file_object.get_value("md5")
st_size = file_object.get_value("st_size")
rel_dir = file_object.get_value("relative_dir")
if mode == 'copy':
io_action = 'copy'
src_path = source_paths[i]
else:
src_path = files[i]
file_name = to_name
rel_path = "%s/%s" % (rel_dir, file_name)
FileUndo.create( src_path, to_path, io_action=io_action, extra={ "md5": md5, "st_size": st_size, "rel_path": rel_path } )
except IOError as e:
raise CheckinException('IO Error occurred. %s' %e.__str__())
# check to see that the file exists.
if not os.path.exists( to_path ):
if mode in ["inplace", "preallocate"]:
raise CheckinException("File not found in repo at [%s]" % to_path )
else:
raise CheckinException("Failed move [%s] to [%s]" % \
(files[i], to_path) )
file_object.set_value('type', file_type)
if md5s != "ignore" and not os.path.isdir(to_path):
md5_checksum = None
if md5s:
md5_checksum = md5s[i]
if not md5_checksum: |
if md5_checksum:
file_object.set_value("md5", md5_checksum)
if commit:
file_object.commit(triggers="none")
__all__.append("S3Repo")
class S3Repo(BaseRepo):
'''This uploads the files to s3 directly'''
def handle_system_commands(self, snapshot, files, file_objects, mode, md5s, source_paths=[], file_sizes=[], commit=False):
try:
import boto3
from botocore.exceptions import ClientError
except:
raise("Python [boto3] module not installed")
session = boto3.Session()
s3_client = session.client('s3')
s3_resource = session.client('s3')
sobject = snapshot.get_parent()
from pyasm.security import Site
site = Site.get_site()
project_code = sobject.get_project_code()
#!!! TEST
bucket = "tactic01"
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
file_type = snapshot.get_type_by_file_name(to_name)
web_dir = snapshot.get_relative_dir(file_type=file_type, file_object=file_object)
object_name = "%s/%s/%s" % (site, web_dir, to_name)
print("objct: ", object_name)
# push these files to s3
try:
s3_client.upload_file(source_paths[i], bucket, object_name)
except ClientError as e:
raise | md5_checksum = File.get_md5(to_path)
#md5_checksum = "" | random_line_split |
repo.py | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ["BaseRepo", "TacticRepo"]
import os, sys, re
from pyasm.common import Environment, System
from pyasm.biz import File
from pyasm.search import FileUndo
from .checkin import CheckinException
class BaseRepo(object):
'''abstract class defining repositories'''
def has_file_codes(self):
return True
def handle_system_commands(self, snapshot, files, file_objects, mode, md5s, source_paths=[], commit=False):
pass
class TacticRepo(BaseRepo):
def handle_system_commands(self, snapshot, files, file_objects, mode, md5s, source_paths=[], file_sizes=[], commit=False):
'''move the tmp files in the appropriate directory'''
# if mode is local then nothing happens here
if mode == 'local':
return
if commit in ['false', False]:
commit = False
else:
commit = True
# inplace mode does not move the file. It just registers the file
# object
if mode == 'inplace':
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
to_path = file
# This is handled in create_file_types
#file_type = snapshot.get_type_by_file_name(to_name)
#file_object.set_value('type', file_type)
if not os.path.isdir(to_path):
md5_checksum = None
if md5s:
md5_checksum = md5s[i]
if not md5_checksum:
md5_checksum = File.get_md5(to_path)
if md5_checksum:
file_object.set_value("md5", md5_checksum)
if commit:
file_object.commit(triggers="none")
return
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
file_type = snapshot.get_type_by_file_name(to_name)
if mode == 'preallocate':
to_path = file
else:
|
#print "path: ", i, files[i]
#print to_path, os.path.exists(to_path)
# first make sure that the to path does not exist, if so, just skip
if os.path.exists(to_path) and mode not in ['inplace','preallocate']:
raise CheckinException('This path [%s] already exists'%to_path)
# add the file
try:
# inplace undo used to not touch the file,
# now it will be moved to cache on undo
io_action = True
if mode in ['preallocate']:
io_action = False
if mode == 'move':
FileUndo.move( source_paths[i], to_path )
#elif mode == 'copy': # was free_copy
#FileUndo.create( source_paths[i], to_path, io_action=io_action )
# make it look like the files was created in the repository
else: # mode ='create'
md5 = file_object.get_value("md5")
st_size = file_object.get_value("st_size")
rel_dir = file_object.get_value("relative_dir")
if mode == 'copy':
io_action = 'copy'
src_path = source_paths[i]
else:
src_path = files[i]
file_name = to_name
rel_path = "%s/%s" % (rel_dir, file_name)
FileUndo.create( src_path, to_path, io_action=io_action, extra={ "md5": md5, "st_size": st_size, "rel_path": rel_path } )
except IOError as e:
raise CheckinException('IO Error occurred. %s' %e.__str__())
# check to see that the file exists.
if not os.path.exists( to_path ):
if mode in ["inplace", "preallocate"]:
raise CheckinException("File not found in repo at [%s]" % to_path )
else:
raise CheckinException("Failed move [%s] to [%s]" % \
(files[i], to_path) )
file_object.set_value('type', file_type)
if md5s != "ignore" and not os.path.isdir(to_path):
md5_checksum = None
if md5s:
md5_checksum = md5s[i]
if not md5_checksum:
md5_checksum = File.get_md5(to_path)
#md5_checksum = ""
if md5_checksum:
file_object.set_value("md5", md5_checksum)
if commit:
file_object.commit(triggers="none")
__all__.append("S3Repo")
class S3Repo(BaseRepo):
'''This uploads the files to s3 directly'''
def handle_system_commands(self, snapshot, files, file_objects, mode, md5s, source_paths=[], file_sizes=[], commit=False):
try:
import boto3
from botocore.exceptions import ClientError
except:
raise("Python [boto3] module not installed")
session = boto3.Session()
s3_client = session.client('s3')
s3_resource = session.client('s3')
sobject = snapshot.get_parent()
from pyasm.security import Site
site = Site.get_site()
project_code = sobject.get_project_code()
#!!! TEST
bucket = "tactic01"
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
file_type = snapshot.get_type_by_file_name(to_name)
web_dir = snapshot.get_relative_dir(file_type=file_type, file_object=file_object)
object_name = "%s/%s/%s" % (site, web_dir, to_name)
print("objct: ", object_name)
# push these files to s3
try:
s3_client.upload_file(source_paths[i], bucket, object_name)
except ClientError as e:
raise
| lib_dir = snapshot.get_lib_dir(file_type=file_type, file_object=file_object)
# it should have been created in postprocess_snapshot
System().makedirs(lib_dir)
to_path = "%s/%s" % (lib_dir, to_name ) | conditional_block |
repo.py | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ["BaseRepo", "TacticRepo"]
import os, sys, re
from pyasm.common import Environment, System
from pyasm.biz import File
from pyasm.search import FileUndo
from .checkin import CheckinException
class BaseRepo(object):
'''abstract class defining repositories'''
def has_file_codes(self):
return True
def handle_system_commands(self, snapshot, files, file_objects, mode, md5s, source_paths=[], commit=False):
pass
class TacticRepo(BaseRepo):
|
# This is handled in create_file_types
#file_type = snapshot.get_type_by_file_name(to_name)
#file_object.set_value('type', file_type)
if not os.path.isdir(to_path):
md5_checksum = None
if md5s:
md5_checksum = md5s[i]
if not md5_checksum:
md5_checksum = File.get_md5(to_path)
if md5_checksum:
file_object.set_value("md5", md5_checksum)
if commit:
file_object.commit(triggers="none")
return
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
file_type = snapshot.get_type_by_file_name(to_name)
if mode == 'preallocate':
to_path = file
else:
lib_dir = snapshot.get_lib_dir(file_type=file_type, file_object=file_object)
# it should have been created in postprocess_snapshot
System().makedirs(lib_dir)
to_path = "%s/%s" % (lib_dir, to_name )
#print "path: ", i, files[i]
#print to_path, os.path.exists(to_path)
# first make sure that the to path does not exist, if so, just skip
if os.path.exists(to_path) and mode not in ['inplace','preallocate']:
raise CheckinException('This path [%s] already exists'%to_path)
# add the file
try:
# inplace undo used to not touch the file,
# now it will be moved to cache on undo
io_action = True
if mode in ['preallocate']:
io_action = False
if mode == 'move':
FileUndo.move( source_paths[i], to_path )
#elif mode == 'copy': # was free_copy
#FileUndo.create( source_paths[i], to_path, io_action=io_action )
# make it look like the files was created in the repository
else: # mode ='create'
md5 = file_object.get_value("md5")
st_size = file_object.get_value("st_size")
rel_dir = file_object.get_value("relative_dir")
if mode == 'copy':
io_action = 'copy'
src_path = source_paths[i]
else:
src_path = files[i]
file_name = to_name
rel_path = "%s/%s" % (rel_dir, file_name)
FileUndo.create( src_path, to_path, io_action=io_action, extra={ "md5": md5, "st_size": st_size, "rel_path": rel_path } )
except IOError as e:
raise CheckinException('IO Error occurred. %s' %e.__str__())
# check to see that the file exists.
if not os.path.exists( to_path ):
if mode in ["inplace", "preallocate"]:
raise CheckinException("File not found in repo at [%s]" % to_path )
else:
raise CheckinException("Failed move [%s] to [%s]" % \
(files[i], to_path) )
file_object.set_value('type', file_type)
if md5s != "ignore" and not os.path.isdir(to_path):
md5_checksum = None
if md5s:
md5_checksum = md5s[i]
if not md5_checksum:
md5_checksum = File.get_md5(to_path)
#md5_checksum = ""
if md5_checksum:
file_object.set_value("md5", md5_checksum)
if commit:
file_object.commit(triggers="none")
__all__.append("S3Repo")
class S3Repo(BaseRepo):
'''This uploads the files to s3 directly'''
def handle_system_commands(self, snapshot, files, file_objects, mode, md5s, source_paths=[], file_sizes=[], commit=False):
try:
import boto3
from botocore.exceptions import ClientError
except:
raise("Python [boto3] module not installed")
session = boto3.Session()
s3_client = session.client('s3')
s3_resource = session.client('s3')
sobject = snapshot.get_parent()
from pyasm.security import Site
site = Site.get_site()
project_code = sobject.get_project_code()
#!!! TEST
bucket = "tactic01"
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
file_type = snapshot.get_type_by_file_name(to_name)
web_dir = snapshot.get_relative_dir(file_type=file_type, file_object=file_object)
object_name = "%s/%s/%s" % (site, web_dir, to_name)
print("objct: ", object_name)
# push these files to s3
try:
s3_client.upload_file(source_paths[i], bucket, object_name)
except ClientError as e:
raise
| def handle_system_commands(self, snapshot, files, file_objects, mode, md5s, source_paths=[], file_sizes=[], commit=False):
'''move the tmp files in the appropriate directory'''
# if mode is local then nothing happens here
if mode == 'local':
return
if commit in ['false', False]:
commit = False
else:
commit = True
# inplace mode does not move the file. It just registers the file
# object
if mode == 'inplace':
for i, file in enumerate(files):
file_object = file_objects[i]
to_name = file_object.get_full_file_name()
to_path = file | identifier_body |
package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class DhpmmF(MakefilePackage):
"""DHPMM_P:High-precision Matrix Multiplication with Faithful Rounding"""
homepage = "http://www.math.twcu.ac.jp/ogita/post-k/"
url = "http://www.math.twcu.ac.jp/ogita/post-k/software/DHPMM_F/DHPMM_F_alpha.tar.gz"
version('alpha', sha256='35321ecbc749f2682775ffcd27833afc8c8eb4fa7753ce769727c9d1fe097848')
depends_on('blas', type='link')
depends_on('lapack', type='link')
def patch(self):
math_libs = self.spec['lapack'].libs + self.spec['blas'].libs
makefile = FileFilter('Makefile')
if self.spec.satisfies('%gcc'):
makefile.filter(r'^MKL\s+=\s1', 'MKL=0')
makefile.filter(r'^CC\s+=\sgcc',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sg\+\+',
'CXX={0}'.format(spack_cxx))
makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas',
'BLASLIBS={0}'.format(math_libs.ld_flags))
elif self.spec.satisfies('%fj'):
makefile.filter(r'^#ENV\s+=\sFX100', 'ENV=FX100')
makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC')
makefile.filter(r'^MKL\s+=\s1', 'MKL=0')
makefile.filter(r'^CC\s+=\sfccpx',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sFCCpx',
'CXX={0}'.format(spack_cxx))
makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas',
'BLASLIBS={0}'.format(math_libs.ld_flags))
elif self.spec.satisfies('%intel'):
|
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('test/source4_SpMV', prefix.bin)
| makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC')
makefile.filter(r'^ENV\s+=\sICC', 'ENV=ICC')
makefile.filter(r'^CC\s+=\sicc',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sicc',
'CXX={0}'.format(spack_cxx)) | conditional_block |
package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class DhpmmF(MakefilePackage):
| 'BLASLIBS={0}'.format(math_libs.ld_flags))
elif self.spec.satisfies('%fj'):
makefile.filter(r'^#ENV\s+=\sFX100', 'ENV=FX100')
makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC')
makefile.filter(r'^MKL\s+=\s1', 'MKL=0')
makefile.filter(r'^CC\s+=\sfccpx',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sFCCpx',
'CXX={0}'.format(spack_cxx))
makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas',
'BLASLIBS={0}'.format(math_libs.ld_flags))
elif self.spec.satisfies('%intel'):
makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC')
makefile.filter(r'^ENV\s+=\sICC', 'ENV=ICC')
makefile.filter(r'^CC\s+=\sicc',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sicc',
'CXX={0}'.format(spack_cxx))
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('test/source4_SpMV', prefix.bin)
| """DHPMM_P:High-precision Matrix Multiplication with Faithful Rounding"""
homepage = "http://www.math.twcu.ac.jp/ogita/post-k/"
url = "http://www.math.twcu.ac.jp/ogita/post-k/software/DHPMM_F/DHPMM_F_alpha.tar.gz"
version('alpha', sha256='35321ecbc749f2682775ffcd27833afc8c8eb4fa7753ce769727c9d1fe097848')
depends_on('blas', type='link')
depends_on('lapack', type='link')
def patch(self):
math_libs = self.spec['lapack'].libs + self.spec['blas'].libs
makefile = FileFilter('Makefile')
if self.spec.satisfies('%gcc'):
makefile.filter(r'^MKL\s+=\s1', 'MKL=0')
makefile.filter(r'^CC\s+=\sgcc',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sg\+\+',
'CXX={0}'.format(spack_cxx))
makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas', | identifier_body |
package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class DhpmmF(MakefilePackage):
"""DHPMM_P:High-precision Matrix Multiplication with Faithful Rounding"""
homepage = "http://www.math.twcu.ac.jp/ogita/post-k/"
url = "http://www.math.twcu.ac.jp/ogita/post-k/software/DHPMM_F/DHPMM_F_alpha.tar.gz"
version('alpha', sha256='35321ecbc749f2682775ffcd27833afc8c8eb4fa7753ce769727c9d1fe097848')
depends_on('blas', type='link')
depends_on('lapack', type='link')
def | (self):
math_libs = self.spec['lapack'].libs + self.spec['blas'].libs
makefile = FileFilter('Makefile')
if self.spec.satisfies('%gcc'):
makefile.filter(r'^MKL\s+=\s1', 'MKL=0')
makefile.filter(r'^CC\s+=\sgcc',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sg\+\+',
'CXX={0}'.format(spack_cxx))
makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas',
'BLASLIBS={0}'.format(math_libs.ld_flags))
elif self.spec.satisfies('%fj'):
makefile.filter(r'^#ENV\s+=\sFX100', 'ENV=FX100')
makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC')
makefile.filter(r'^MKL\s+=\s1', 'MKL=0')
makefile.filter(r'^CC\s+=\sfccpx',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sFCCpx',
'CXX={0}'.format(spack_cxx))
makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas',
'BLASLIBS={0}'.format(math_libs.ld_flags))
elif self.spec.satisfies('%intel'):
makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC')
makefile.filter(r'^ENV\s+=\sICC', 'ENV=ICC')
makefile.filter(r'^CC\s+=\sicc',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sicc',
'CXX={0}'.format(spack_cxx))
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('test/source4_SpMV', prefix.bin)
| patch | identifier_name |
package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class DhpmmF(MakefilePackage):
"""DHPMM_P:High-precision Matrix Multiplication with Faithful Rounding"""
homepage = "http://www.math.twcu.ac.jp/ogita/post-k/"
url = "http://www.math.twcu.ac.jp/ogita/post-k/software/DHPMM_F/DHPMM_F_alpha.tar.gz"
version('alpha', sha256='35321ecbc749f2682775ffcd27833afc8c8eb4fa7753ce769727c9d1fe097848')
depends_on('blas', type='link')
depends_on('lapack', type='link')
def patch(self):
math_libs = self.spec['lapack'].libs + self.spec['blas'].libs
makefile = FileFilter('Makefile')
if self.spec.satisfies('%gcc'):
makefile.filter(r'^MKL\s+=\s1', 'MKL=0')
makefile.filter(r'^CC\s+=\sgcc',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sg\+\+',
'CXX={0}'.format(spack_cxx))
makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas',
'BLASLIBS={0}'.format(math_libs.ld_flags))
elif self.spec.satisfies('%fj'):
makefile.filter(r'^#ENV\s+=\sFX100', 'ENV=FX100')
makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC')
makefile.filter(r'^MKL\s+=\s1', 'MKL=0')
makefile.filter(r'^CC\s+=\sfccpx',
'CC={0}'.format(spack_cc)) | elif self.spec.satisfies('%intel'):
makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC')
makefile.filter(r'^ENV\s+=\sICC', 'ENV=ICC')
makefile.filter(r'^CC\s+=\sicc',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sicc',
'CXX={0}'.format(spack_cxx))
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('test/source4_SpMV', prefix.bin) | makefile.filter(r'^CXX\s+=\sFCCpx',
'CXX={0}'.format(spack_cxx))
makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas',
'BLASLIBS={0}'.format(math_libs.ld_flags)) | random_line_split |
DirectionalLightHelper.js | /**
* @author alteredq / http://alteredqualia.com/
* @author mrdoob / http://mrdoob.com/
* @author WestLangley / http://github.com/WestLangley
*/
THREE.DirectionalLightHelper = function ( light, size ) {
THREE.Object3D.call( this );
this.light = light;
this.light.updateMatrixWorld();
this.matrixWorld = light.matrixWorld;
this.matrixAutoUpdate = false;
size = size || 1;
var geometry = new THREE.PlaneGeometry( size, size );
var material = new THREE.MeshBasicMaterial( { wireframe: true, fog: false } );
material.color.copy( this.light.color ).multiplyScalar( this.light.intensity );
this.lightPlane = new THREE.Mesh( geometry, material );
this.add( this.lightPlane );
geometry = new THREE.Geometry();
geometry.vertices.push( new THREE.Vector3() );
geometry.vertices.push( new THREE.Vector3() );
material = new THREE.LineBasicMaterial( { fog: false } ); |
this.update();
};
THREE.DirectionalLightHelper.prototype = Object.create( THREE.Object3D.prototype );
THREE.DirectionalLightHelper.prototype.dispose = function () {
this.lightPlane.geometry.dispose();
this.lightPlane.material.dispose();
this.targetLine.geometry.dispose();
this.targetLine.material.dispose();
};
THREE.DirectionalLightHelper.prototype.update = function () {
var v1 = new THREE.Vector3();
var v2 = new THREE.Vector3();
var v3 = new THREE.Vector3();
return function () {
v1.setFromMatrixPosition( this.light.matrixWorld );
v2.setFromMatrixPosition( this.light.target.matrixWorld );
v3.subVectors( v2, v1 );
this.lightPlane.lookAt( v3 );
this.lightPlane.material.color.copy( this.light.color ).multiplyScalar( this.light.intensity );
this.targetLine.geometry.vertices[ 1 ].copy( v3 );
this.targetLine.geometry.verticesNeedUpdate = true;
this.targetLine.material.color.copy( this.lightPlane.material.color );
}
}(); | material.color.copy( this.light.color ).multiplyScalar( this.light.intensity );
this.targetLine = new THREE.Line( geometry, material );
this.add( this.targetLine ); | random_line_split |
setup.py | # -*- coding: utf-8 *-*
import os
import subprocess
import sys
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
from distutils.cmd import Command
with open('README.rst') as f:
readme_content = f.read()
class DocCommand(Command):
description = "generate or test documentation"
user_options = [("test", "t",
"run doctests instead of generating documentation")]
boolean_options = ["test"]
def | (self):
self.test = False
def finalize_options(self):
pass
def run(self):
if self.test:
path = "docs/_build/doctest"
mode = "doctest"
else:
path = "docs/_build/%s" % __version__
mode = "html"
try:
os.makedirs(path)
except:
pass
status = subprocess.call(["sphinx-build", "-E",
"-b", mode, "docs", path])
if status:
raise RuntimeError("documentation step '%s' failed" % (mode,))
sys.stdout.write("\nDocumentation step '%s' performed, results here:\n"
" %s/\n" % (mode, path))
setup(
name='asyncflux',
version='0.0+',
url='https://github.com/puentesarrin/asyncflux',
description='Asynchronous client for InfluxDB and Tornado.',
long_description=readme_content,
author='Jorge Puente-Sarrín',
author_email='[email protected]',
packages=['asyncflux'],
keywords=['asyncflux', 'tornado', 'influxdb', 'influx', 'async'],
install_requires=['tornado>=3.0'],
license='Apache License, Version 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'],
test_suite='tests.runtests',
cmdclass={"doc": DocCommand}
)
| initialize_options | identifier_name |
setup.py | # -*- coding: utf-8 *-*
import os
import subprocess
import sys
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
from distutils.cmd import Command
with open('README.rst') as f:
readme_content = f.read()
| user_options = [("test", "t",
"run doctests instead of generating documentation")]
boolean_options = ["test"]
def initialize_options(self):
self.test = False
def finalize_options(self):
pass
def run(self):
if self.test:
path = "docs/_build/doctest"
mode = "doctest"
else:
path = "docs/_build/%s" % __version__
mode = "html"
try:
os.makedirs(path)
except:
pass
status = subprocess.call(["sphinx-build", "-E",
"-b", mode, "docs", path])
if status:
raise RuntimeError("documentation step '%s' failed" % (mode,))
sys.stdout.write("\nDocumentation step '%s' performed, results here:\n"
" %s/\n" % (mode, path))
setup(
name='asyncflux',
version='0.0+',
url='https://github.com/puentesarrin/asyncflux',
description='Asynchronous client for InfluxDB and Tornado.',
long_description=readme_content,
author='Jorge Puente-Sarrín',
author_email='[email protected]',
packages=['asyncflux'],
keywords=['asyncflux', 'tornado', 'influxdb', 'influx', 'async'],
install_requires=['tornado>=3.0'],
license='Apache License, Version 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'],
test_suite='tests.runtests',
cmdclass={"doc": DocCommand}
) |
class DocCommand(Command):
description = "generate or test documentation" | random_line_split |
setup.py | # -*- coding: utf-8 *-*
import os
import subprocess
import sys
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
from distutils.cmd import Command
with open('README.rst') as f:
readme_content = f.read()
class DocCommand(Command):
description = "generate or test documentation"
user_options = [("test", "t",
"run doctests instead of generating documentation")]
boolean_options = ["test"]
def initialize_options(self):
self.test = False
def finalize_options(self):
pass
def run(self):
if self.test:
|
else:
path = "docs/_build/%s" % __version__
mode = "html"
try:
os.makedirs(path)
except:
pass
status = subprocess.call(["sphinx-build", "-E",
"-b", mode, "docs", path])
if status:
raise RuntimeError("documentation step '%s' failed" % (mode,))
sys.stdout.write("\nDocumentation step '%s' performed, results here:\n"
" %s/\n" % (mode, path))
setup(
name='asyncflux',
version='0.0+',
url='https://github.com/puentesarrin/asyncflux',
description='Asynchronous client for InfluxDB and Tornado.',
long_description=readme_content,
author='Jorge Puente-Sarrín',
author_email='[email protected]',
packages=['asyncflux'],
keywords=['asyncflux', 'tornado', 'influxdb', 'influx', 'async'],
install_requires=['tornado>=3.0'],
license='Apache License, Version 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'],
test_suite='tests.runtests',
cmdclass={"doc": DocCommand}
)
| path = "docs/_build/doctest"
mode = "doctest" | conditional_block |
setup.py | # -*- coding: utf-8 *-*
import os
import subprocess
import sys
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
from distutils.cmd import Command
with open('README.rst') as f:
readme_content = f.read()
class DocCommand(Command):
description = "generate or test documentation"
user_options = [("test", "t",
"run doctests instead of generating documentation")]
boolean_options = ["test"]
def initialize_options(self):
self.test = False
def finalize_options(self):
pass
def run(self):
|
setup(
name='asyncflux',
version='0.0+',
url='https://github.com/puentesarrin/asyncflux',
description='Asynchronous client for InfluxDB and Tornado.',
long_description=readme_content,
author='Jorge Puente-Sarrín',
author_email='[email protected]',
packages=['asyncflux'],
keywords=['asyncflux', 'tornado', 'influxdb', 'influx', 'async'],
install_requires=['tornado>=3.0'],
license='Apache License, Version 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'],
test_suite='tests.runtests',
cmdclass={"doc": DocCommand}
)
| if self.test:
path = "docs/_build/doctest"
mode = "doctest"
else:
path = "docs/_build/%s" % __version__
mode = "html"
try:
os.makedirs(path)
except:
pass
status = subprocess.call(["sphinx-build", "-E",
"-b", mode, "docs", path])
if status:
raise RuntimeError("documentation step '%s' failed" % (mode,))
sys.stdout.write("\nDocumentation step '%s' performed, results here:\n"
" %s/\n" % (mode, path)) | identifier_body |
test_token_api.py | # coding: utf-8
"""
Nordigen Account Information Services API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2.0 (v2)
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import nordigen
from nordigen.api.token_api import TokenApi # noqa: E501
from nordigen.rest import ApiException
class TestTokenApi(unittest.TestCase):
"""TokenApi unit test stubs"""
def setUp(self):
self.api = TokenApi() # noqa: E501
def tearDown(self):
pass
def test_j_wt_obtain(self):
"""Test case for j_wt_obtain
"""
pass
def test_j_wt_refresh(self):
"""Test case for j_wt_refresh
"""
pass
if __name__ == '__main__':
| unittest.main() | conditional_block |
|
test_token_api.py | # coding: utf-8
""" |
OpenAPI spec version: 2.0 (v2)
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import nordigen
from nordigen.api.token_api import TokenApi # noqa: E501
from nordigen.rest import ApiException
class TestTokenApi(unittest.TestCase):
"""TokenApi unit test stubs"""
def setUp(self):
self.api = TokenApi() # noqa: E501
def tearDown(self):
pass
def test_j_wt_obtain(self):
"""Test case for j_wt_obtain
"""
pass
def test_j_wt_refresh(self):
"""Test case for j_wt_refresh
"""
pass
if __name__ == '__main__':
unittest.main() | Nordigen Account Information Services API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 | random_line_split |
test_token_api.py | # coding: utf-8
"""
Nordigen Account Information Services API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2.0 (v2)
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import nordigen
from nordigen.api.token_api import TokenApi # noqa: E501
from nordigen.rest import ApiException
class TestTokenApi(unittest.TestCase):
"""TokenApi unit test stubs"""
def setUp(self):
|
def tearDown(self):
pass
def test_j_wt_obtain(self):
"""Test case for j_wt_obtain
"""
pass
def test_j_wt_refresh(self):
"""Test case for j_wt_refresh
"""
pass
if __name__ == '__main__':
unittest.main()
| self.api = TokenApi() # noqa: E501 | identifier_body |
test_token_api.py | # coding: utf-8
"""
Nordigen Account Information Services API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2.0 (v2)
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import nordigen
from nordigen.api.token_api import TokenApi # noqa: E501
from nordigen.rest import ApiException
class TestTokenApi(unittest.TestCase):
"""TokenApi unit test stubs"""
def | (self):
self.api = TokenApi() # noqa: E501
def tearDown(self):
pass
def test_j_wt_obtain(self):
"""Test case for j_wt_obtain
"""
pass
def test_j_wt_refresh(self):
"""Test case for j_wt_refresh
"""
pass
if __name__ == '__main__':
unittest.main()
| setUp | identifier_name |
S15.1.2.2_A8.js | // Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* parseInt may interpret only a leading portion of the string as
* a number value; it ignores any characters that cannot be interpreted as part
* of the notation of an decimal literal, and no indication is given that any such
* characters were ignored.
*
* @path ch15/15.1/15.1.2/15.1.2.2/S15.1.2.2_A8.js
* @description Complex test without eval
*/
//CHECK
var errorCount = 0;
var count = 0;
var indexP;
var indexO = 0;
for (var index = 0; index <= 65535; index++) {
if ((index < 0x0030) || (index > 0x0039) &&
(index < 0x0041) || (index > 0x005A) &&
(index < 0x0061) || (index > 0x007A)) {
var hex = decimalToHexString(index);
if (parseInt("1Z" + String.fromCharCode(index), 36) !== 71) {
if (indexO === 0) {
indexO = index;
} else {
if ((index - indexP) !== 1) {
if ((indexP - indexO) !== 0) {
var hexP = decimalToHexString(indexP);
var hexO = decimalToHexString(indexO);
$ERROR('#' + hexO + '-' + hexP + ' ');
} | }
indexO = index;
}
}
indexP = index;
errorCount++;
}
count++;
}
}
if (errorCount > 0) {
if ((indexP - indexO) !== 0) {
var hexP = decimalToHexString(indexP);
var hexO = decimalToHexString(indexO);
$ERROR('#' + hexO + '-' + hexP + ' ');
} else {
var hexP = decimalToHexString(indexP);
$ERROR('#' + hexP + ' ');
}
$ERROR('Total error: ' + errorCount + ' bad Unicode character in ' + count + ' ');
}
function decimalToHexString(n) {
n = Number(n);
var h = "";
for (var i = 3; i >= 0; i--) {
if (n >= Math.pow(16, i)) {
var t = Math.floor(n / Math.pow(16, i));
n -= t * Math.pow(16, i);
if ( t >= 10 ) {
if ( t == 10 ) { h += "A"; }
if ( t == 11 ) { h += "B"; }
if ( t == 12 ) { h += "C"; }
if ( t == 13 ) { h += "D"; }
if ( t == 14 ) { h += "E"; }
if ( t == 15 ) { h += "F"; }
} else {
h += String(t);
}
} else {
h += "0";
}
}
return h;
} | else {
var hexP = decimalToHexString(indexP);
$ERROR('#' + hexP + ' '); | random_line_split |
S15.1.2.2_A8.js | // Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* parseInt may interpret only a leading portion of the string as
* a number value; it ignores any characters that cannot be interpreted as part
* of the notation of an decimal literal, and no indication is given that any such
* characters were ignored.
*
* @path ch15/15.1/15.1.2/15.1.2.2/S15.1.2.2_A8.js
* @description Complex test without eval
*/
//CHECK
var errorCount = 0;
var count = 0;
var indexP;
var indexO = 0;
for (var index = 0; index <= 65535; index++) {
if ((index < 0x0030) || (index > 0x0039) &&
(index < 0x0041) || (index > 0x005A) &&
(index < 0x0061) || (index > 0x007A)) {
var hex = decimalToHexString(index);
if (parseInt("1Z" + String.fromCharCode(index), 36) !== 71) {
if (indexO === 0) {
indexO = index;
} else {
if ((index - indexP) !== 1) {
if ((indexP - indexO) !== 0) {
var hexP = decimalToHexString(indexP);
var hexO = decimalToHexString(indexO);
$ERROR('#' + hexO + '-' + hexP + ' ');
}
else {
var hexP = decimalToHexString(indexP);
$ERROR('#' + hexP + ' ');
}
indexO = index;
}
}
indexP = index;
errorCount++;
}
count++;
}
}
if (errorCount > 0) {
if ((indexP - indexO) !== 0) {
var hexP = decimalToHexString(indexP);
var hexO = decimalToHexString(indexO);
$ERROR('#' + hexO + '-' + hexP + ' ');
} else {
var hexP = decimalToHexString(indexP);
$ERROR('#' + hexP + ' ');
}
$ERROR('Total error: ' + errorCount + ' bad Unicode character in ' + count + ' ');
}
function | (n) {
n = Number(n);
var h = "";
for (var i = 3; i >= 0; i--) {
if (n >= Math.pow(16, i)) {
var t = Math.floor(n / Math.pow(16, i));
n -= t * Math.pow(16, i);
if ( t >= 10 ) {
if ( t == 10 ) { h += "A"; }
if ( t == 11 ) { h += "B"; }
if ( t == 12 ) { h += "C"; }
if ( t == 13 ) { h += "D"; }
if ( t == 14 ) { h += "E"; }
if ( t == 15 ) { h += "F"; }
} else {
h += String(t);
}
} else {
h += "0";
}
}
return h;
}
| decimalToHexString | identifier_name |
S15.1.2.2_A8.js | // Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* parseInt may interpret only a leading portion of the string as
* a number value; it ignores any characters that cannot be interpreted as part
* of the notation of an decimal literal, and no indication is given that any such
* characters were ignored.
*
* @path ch15/15.1/15.1.2/15.1.2.2/S15.1.2.2_A8.js
* @description Complex test without eval
*/
//CHECK
var errorCount = 0;
var count = 0;
var indexP;
var indexO = 0;
for (var index = 0; index <= 65535; index++) {
if ((index < 0x0030) || (index > 0x0039) &&
(index < 0x0041) || (index > 0x005A) &&
(index < 0x0061) || (index > 0x007A)) {
var hex = decimalToHexString(index);
if (parseInt("1Z" + String.fromCharCode(index), 36) !== 71) {
if (indexO === 0) {
indexO = index;
} else {
if ((index - indexP) !== 1) {
if ((indexP - indexO) !== 0) {
var hexP = decimalToHexString(indexP);
var hexO = decimalToHexString(indexO);
$ERROR('#' + hexO + '-' + hexP + ' ');
}
else {
var hexP = decimalToHexString(indexP);
$ERROR('#' + hexP + ' ');
}
indexO = index;
}
}
indexP = index;
errorCount++;
}
count++;
}
}
if (errorCount > 0) {
if ((indexP - indexO) !== 0) {
var hexP = decimalToHexString(indexP);
var hexO = decimalToHexString(indexO);
$ERROR('#' + hexO + '-' + hexP + ' ');
} else {
var hexP = decimalToHexString(indexP);
$ERROR('#' + hexP + ' ');
}
$ERROR('Total error: ' + errorCount + ' bad Unicode character in ' + count + ' ');
}
function decimalToHexString(n) | }
return h;
}
| {
n = Number(n);
var h = "";
for (var i = 3; i >= 0; i--) {
if (n >= Math.pow(16, i)) {
var t = Math.floor(n / Math.pow(16, i));
n -= t * Math.pow(16, i);
if ( t >= 10 ) {
if ( t == 10 ) { h += "A"; }
if ( t == 11 ) { h += "B"; }
if ( t == 12 ) { h += "C"; }
if ( t == 13 ) { h += "D"; }
if ( t == 14 ) { h += "E"; }
if ( t == 15 ) { h += "F"; }
} else {
h += String(t);
}
} else {
h += "0";
} | identifier_body |
S15.1.2.2_A8.js | // Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* parseInt may interpret only a leading portion of the string as
* a number value; it ignores any characters that cannot be interpreted as part
* of the notation of an decimal literal, and no indication is given that any such
* characters were ignored.
*
* @path ch15/15.1/15.1.2/15.1.2.2/S15.1.2.2_A8.js
* @description Complex test without eval
*/
//CHECK
var errorCount = 0;
var count = 0;
var indexP;
var indexO = 0;
for (var index = 0; index <= 65535; index++) {
if ((index < 0x0030) || (index > 0x0039) &&
(index < 0x0041) || (index > 0x005A) &&
(index < 0x0061) || (index > 0x007A)) {
var hex = decimalToHexString(index);
if (parseInt("1Z" + String.fromCharCode(index), 36) !== 71) {
if (indexO === 0) {
indexO = index;
} else {
if ((index - indexP) !== 1) {
if ((indexP - indexO) !== 0) {
var hexP = decimalToHexString(indexP);
var hexO = decimalToHexString(indexO);
$ERROR('#' + hexO + '-' + hexP + ' ');
}
else {
var hexP = decimalToHexString(indexP);
$ERROR('#' + hexP + ' ');
}
indexO = index;
}
}
indexP = index;
errorCount++;
}
count++;
}
}
if (errorCount > 0) {
if ((indexP - indexO) !== 0) {
var hexP = decimalToHexString(indexP);
var hexO = decimalToHexString(indexO);
$ERROR('#' + hexO + '-' + hexP + ' ');
} else {
var hexP = decimalToHexString(indexP);
$ERROR('#' + hexP + ' ');
}
$ERROR('Total error: ' + errorCount + ' bad Unicode character in ' + count + ' ');
}
function decimalToHexString(n) {
n = Number(n);
var h = "";
for (var i = 3; i >= 0; i--) |
return h;
}
| {
if (n >= Math.pow(16, i)) {
var t = Math.floor(n / Math.pow(16, i));
n -= t * Math.pow(16, i);
if ( t >= 10 ) {
if ( t == 10 ) { h += "A"; }
if ( t == 11 ) { h += "B"; }
if ( t == 12 ) { h += "C"; }
if ( t == 13 ) { h += "D"; }
if ( t == 14 ) { h += "E"; }
if ( t == 15 ) { h += "F"; }
} else {
h += String(t);
}
} else {
h += "0";
}
} | conditional_block |
service_type_info.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ServiceTypeInfo(Model):
"""Information about a service type that is defined in a service manifest of a | :type service_type_description: :class:`ServiceTypeDescription
<azure.servicefabric.models.ServiceTypeDescription>`
:param service_manifest_name:
:type service_manifest_name: str
:param service_manifest_version: The version of the service manifest in
which this service type is defined.
:type service_manifest_version: str
:param is_service_group: Indicates whether the service is a service group.
If it is, the property value is true otherwise false.
:type is_service_group: bool
"""
_attribute_map = {
'service_type_description': {'key': 'ServiceTypeDescription', 'type': 'ServiceTypeDescription'},
'service_manifest_name': {'key': 'ServiceManifestName', 'type': 'str'},
'service_manifest_version': {'key': 'ServiceManifestVersion', 'type': 'str'},
'is_service_group': {'key': 'IsServiceGroup', 'type': 'bool'},
}
def __init__(self, service_type_description=None, service_manifest_name=None, service_manifest_version=None, is_service_group=None):
self.service_type_description = service_type_description
self.service_manifest_name = service_manifest_name
self.service_manifest_version = service_manifest_version
self.is_service_group = is_service_group | provisioned application type.
:param service_type_description: | random_line_split |
service_type_info.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class | (Model):
"""Information about a service type that is defined in a service manifest of a
provisioned application type.
:param service_type_description:
:type service_type_description: :class:`ServiceTypeDescription
<azure.servicefabric.models.ServiceTypeDescription>`
:param service_manifest_name:
:type service_manifest_name: str
:param service_manifest_version: The version of the service manifest in
which this service type is defined.
:type service_manifest_version: str
:param is_service_group: Indicates whether the service is a service group.
If it is, the property value is true otherwise false.
:type is_service_group: bool
"""
_attribute_map = {
'service_type_description': {'key': 'ServiceTypeDescription', 'type': 'ServiceTypeDescription'},
'service_manifest_name': {'key': 'ServiceManifestName', 'type': 'str'},
'service_manifest_version': {'key': 'ServiceManifestVersion', 'type': 'str'},
'is_service_group': {'key': 'IsServiceGroup', 'type': 'bool'},
}
def __init__(self, service_type_description=None, service_manifest_name=None, service_manifest_version=None, is_service_group=None):
self.service_type_description = service_type_description
self.service_manifest_name = service_manifest_name
self.service_manifest_version = service_manifest_version
self.is_service_group = is_service_group
| ServiceTypeInfo | identifier_name |
service_type_info.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ServiceTypeInfo(Model):
"""Information about a service type that is defined in a service manifest of a
provisioned application type.
:param service_type_description:
:type service_type_description: :class:`ServiceTypeDescription
<azure.servicefabric.models.ServiceTypeDescription>`
:param service_manifest_name:
:type service_manifest_name: str
:param service_manifest_version: The version of the service manifest in
which this service type is defined.
:type service_manifest_version: str
:param is_service_group: Indicates whether the service is a service group.
If it is, the property value is true otherwise false.
:type is_service_group: bool
"""
_attribute_map = {
'service_type_description': {'key': 'ServiceTypeDescription', 'type': 'ServiceTypeDescription'},
'service_manifest_name': {'key': 'ServiceManifestName', 'type': 'str'},
'service_manifest_version': {'key': 'ServiceManifestVersion', 'type': 'str'},
'is_service_group': {'key': 'IsServiceGroup', 'type': 'bool'},
}
def __init__(self, service_type_description=None, service_manifest_name=None, service_manifest_version=None, is_service_group=None):
| self.service_type_description = service_type_description
self.service_manifest_name = service_manifest_name
self.service_manifest_version = service_manifest_version
self.is_service_group = is_service_group | identifier_body |
|
App.tsx | IPT reserves all rights not expressly granted.
*
* The security implementation provided is DEMO only and is NOT intended for production purposes.
* It is exclusively your responsisbility to seek advice from security professionals
* in order to secure the REST API implementation properly.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* IPT BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import AppBar from '@material-ui/core/AppBar';
import Badge from '@material-ui/core/Badge';
// import Box from '@material-ui/core/Box';
import Container from '@material-ui/core/Container';
import CssBaseline from '@material-ui/core/CssBaseline';
import Divider from '@material-ui/core/Divider';
import Drawer from '@material-ui/core/Drawer';
import Grid from '@material-ui/core/Grid';
import IconButton from '@material-ui/core/IconButton';
import Link from '@material-ui/core/Link';
import List from '@material-ui/core/List';
import Paper from '@material-ui/core/Paper';
import { makeStyles } from '@material-ui/core/styles';
import Toolbar from '@material-ui/core/Toolbar';
import Typography from '@material-ui/core/Typography';
import ChevronLeftIcon from '@material-ui/icons/ChevronLeft';
import MenuIcon from '@material-ui/icons/Menu';
import NotificationsIcon from '@material-ui/icons/Notifications';
import clsx from 'clsx';
import React, { useEffect } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { Redirect, Route, Switch, useHistory } from 'react-router-dom';
import Alert from '../components/Alert/Alert';
import { PostForm } from '../components/PostForm/PostForm';
import { PostList } from '../components/PostList/PostList';
import { deletePost, fetchPosts } from '../features/posts/postsSlice';
import { mainListItems, secondaryListItems } from '../listitems';
import { PostCallback } from '../shared/shared-types';
import { RootState } from './rootReducer';
import Login from '../components/Login/Login';
// import Chart from './Chart';
// import Deposits from './Deposits';
// import Orders from './Orders';
import ProtectedRoute from '../components/ProtectedRoute/ProtectedRoute';
import { FaceRecognition } from '../components/FaceRecognition/FaceRecognition';
function Copyright() | {'.'}
</Typography>
);
}
const drawerWidth = 240;
const useStyles = makeStyles((theme) => ({
root: {
display: 'flex',
},
toolbar: {
paddingRight: 24, // keep right padding when drawer closed
},
toolbarIcon: {
display: 'flex',
alignItems: 'center',
justifyContent: 'flex-end',
padding: '0 8px',
...theme.mixins.toolbar,
},
appBar: {
zIndex: theme.zIndex.drawer + 1,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
},
appBarShift: {
marginLeft: drawerWidth,
width: `calc(100% - ${drawerWidth}px)`,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
menuButton: {
marginRight: 36,
},
menuButtonHidden: {
display: 'none',
},
title: {
flexGrow: 1,
},
drawerPaper: {
position: 'relative',
whiteSpace: 'nowrap',
width: drawerWidth,
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
drawerPaperClose: {
overflowX: 'hidden',
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
width: theme.spacing(7),
[theme.breakpoints.up('sm')]: {
width: theme.spacing(9),
},
},
appBarSpacer: theme.mixins.toolbar,
content: {
flexGrow: 1,
height: '100vh',
overflow: 'auto',
},
container: {
paddingTop: theme.spacing(4),
paddingBottom: theme.spacing(4),
},
paper: {
padding: theme.spacing(2),
display: 'flex',
overflow: 'auto',
flexDirection: 'column',
},
fixedHeight: {
height: 240,
},
menuItems: {
"& a": {
display: 'block',
color: 'rgba(0,0,0,0.87)',
},
"& a.active": {
background: 'rgb(56,247,242) linear-gradient(90deg, rgba(50,50,242,1) 0%, rgba(70,80,255,0.5) 100%)',
color: 'white'
}
},
}));
export default function Dashboard() {
const classes = useStyles();
const [open, setOpen] = React.useState(true);
const history = useHistory();
const dispatch = useDispatch();
useEffect(() => {
dispatch(fetchPosts());
}, [dispatch]);
const posts = useSelector((state: RootState) => state.posts.posts);
const errors = useSelector((state: RootState) => {
return state.resources.error;
});
const messages = useSelector((state: RootState) => {
return state.resources.message;
});
const loggedUser = useSelector((state: RootState) => {
return state.auth.loggedUser;
});
const handleEditPost: PostCallback = (post) => {
history.push(`/edit-post/${post.id}`);
};
const handleDeletePost: PostCallback = (post) => {
dispatch(deletePost(post.id));
};
const handleDrawerOpen = () => {
setOpen(true);
};
const handleDrawerClose = () => {
setOpen(false);
};
const fixedHeightPaper = clsx(classes.paper, classes.fixedHeight);
return (
<div className={classes.root}>
<CssBaseline />
<AppBar position="absolute" className={clsx(classes.appBar, open && classes.appBarShift)}>
<Toolbar className={classes.toolbar}>
<IconButton
edge="start"
color="inherit"
aria-label="open drawer"
onClick={handleDrawerOpen}
className={clsx(classes.menuButton, open && classes.menuButtonHidden)}
>
<MenuIcon />
</IconButton>
<Typography component="h1" variant="h6" color="inherit" noWrap className={classes.title}>
Dashboard
</Typography>
{loggedUser && <Typography component="h1" variant="h6" color="inherit" noWrap className={classes.title}>
Welcome, {loggedUser.firstName}
</Typography>}
<IconButton color="inherit">
<Badge badgeContent={4} color="secondary">
<NotificationsIcon />
</Badge>
</IconButton>
</Toolbar>
</AppBar>
<Drawer
variant="permanent"
classes={{
paper: clsx(classes.drawerPaper, !open && classes.drawerPaperClose),
}}
open={open}
>
<div className={classes.toolbarIcon}>
<IconButton onClick={handleDrawerClose}>
<ChevronLeftIcon />
</IconButton>
</div>
<Divider />
<List className={classes.menuItems}>{mainListItems}</List>
<Divider />
<List className={classes.menuItems}>{secondaryListItems}</List>
</Drawer>
<main className={classes.content}>
<div className={classes.appBarSpacer} />
<Container maxWidth="lg" className={classes.container}>
<Switch>
<Route exact path="/">
<Redirect to="/face-recognition" />
</Route>
<Route exact path="/face-recognition">
<FaceRecognition />
</Route>
<Route exact path="/posts">
<PostList posts={posts} onEditPost={handleEditPost} onDeletePost={handleDeletePost} />
</Route>
<ProtectedRoute exact path="/add-post">
<PostForm />
</ProtectedRoute>
<Route exact path="/edit-post/:postId">
<PostForm />
</Route>
<Route exact path="/login">
<Login />
</Route>
</Switch>
<Grid container spacing={3}>
{/* Chart */}
<Grid item xs={12} md={8} lg={9}>
<Paper className={fixedHeightPaper}>
{/* <Chart />*/}
</Paper>
</Grid>
{/* Recent Deposits */}
<Grid item xs={12} md={4} lg={3}>
<Paper className={fixedHeightPaper}>
{/* <Deposits /> */}
</Paper>
</Grid>
{/* Recent Orders */}
<Grid item xs={1 | {
return (
<Typography variant="body2" color="textSecondary" align="center">
{'Copyright © '}
<Link color="inherit" href="https://github.com/iproduct">
Trayan Iliev, IPT - Intellectual Products & Technologies
</Link>{' '}
{new Date().getFullYear()}
| identifier_body |
App.tsx | IPT reserves all rights not expressly granted.
*
* The security implementation provided is DEMO only and is NOT intended for production purposes.
* It is exclusively your responsisbility to seek advice from security professionals
* in order to secure the REST API implementation properly.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* IPT BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import AppBar from '@material-ui/core/AppBar';
import Badge from '@material-ui/core/Badge';
// import Box from '@material-ui/core/Box';
import Container from '@material-ui/core/Container';
import CssBaseline from '@material-ui/core/CssBaseline';
import Divider from '@material-ui/core/Divider';
import Drawer from '@material-ui/core/Drawer';
import Grid from '@material-ui/core/Grid';
import IconButton from '@material-ui/core/IconButton';
import Link from '@material-ui/core/Link';
import List from '@material-ui/core/List';
import Paper from '@material-ui/core/Paper';
import { makeStyles } from '@material-ui/core/styles';
import Toolbar from '@material-ui/core/Toolbar';
import Typography from '@material-ui/core/Typography';
import ChevronLeftIcon from '@material-ui/icons/ChevronLeft';
import MenuIcon from '@material-ui/icons/Menu';
import NotificationsIcon from '@material-ui/icons/Notifications';
import clsx from 'clsx';
import React, { useEffect } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { Redirect, Route, Switch, useHistory } from 'react-router-dom';
import Alert from '../components/Alert/Alert';
import { PostForm } from '../components/PostForm/PostForm';
import { PostList } from '../components/PostList/PostList';
import { deletePost, fetchPosts } from '../features/posts/postsSlice';
import { mainListItems, secondaryListItems } from '../listitems';
import { PostCallback } from '../shared/shared-types';
import { RootState } from './rootReducer';
import Login from '../components/Login/Login';
// import Chart from './Chart';
// import Deposits from './Deposits';
// import Orders from './Orders';
import ProtectedRoute from '../components/ProtectedRoute/ProtectedRoute';
import { FaceRecognition } from '../components/FaceRecognition/FaceRecognition';
function | () {
return (
<Typography variant="body2" color="textSecondary" align="center">
{'Copyright © '}
<Link color="inherit" href="https://github.com/iproduct">
Trayan Iliev, IPT - Intellectual Products & Technologies
</Link>{' '}
{new Date().getFullYear()}
{'.'}
</Typography>
);
}
const drawerWidth = 240;
const useStyles = makeStyles((theme) => ({
root: {
display: 'flex',
},
toolbar: {
paddingRight: 24, // keep right padding when drawer closed
},
toolbarIcon: {
display: 'flex',
alignItems: 'center',
justifyContent: 'flex-end',
padding: '0 8px',
...theme.mixins.toolbar,
},
appBar: {
zIndex: theme.zIndex.drawer + 1,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
},
appBarShift: {
marginLeft: drawerWidth,
width: `calc(100% - ${drawerWidth}px)`,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
menuButton: {
marginRight: 36,
},
menuButtonHidden: {
display: 'none',
},
title: {
flexGrow: 1,
},
drawerPaper: {
position: 'relative',
whiteSpace: 'nowrap',
width: drawerWidth,
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
drawerPaperClose: {
overflowX: 'hidden',
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
width: theme.spacing(7),
[theme.breakpoints.up('sm')]: {
width: theme.spacing(9),
},
},
appBarSpacer: theme.mixins.toolbar,
content: {
flexGrow: 1,
height: '100vh',
overflow: 'auto',
},
container: {
paddingTop: theme.spacing(4),
paddingBottom: theme.spacing(4),
},
paper: {
padding: theme.spacing(2),
display: 'flex',
overflow: 'auto',
flexDirection: 'column',
},
fixedHeight: {
height: 240,
},
menuItems: {
"& a": {
display: 'block',
color: 'rgba(0,0,0,0.87)',
},
"& a.active": {
background: 'rgb(56,247,242) linear-gradient(90deg, rgba(50,50,242,1) 0%, rgba(70,80,255,0.5) 100%)',
color: 'white'
}
},
}));
export default function Dashboard() {
const classes = useStyles();
const [open, setOpen] = React.useState(true);
const history = useHistory();
const dispatch = useDispatch();
useEffect(() => {
dispatch(fetchPosts());
}, [dispatch]);
const posts = useSelector((state: RootState) => state.posts.posts);
const errors = useSelector((state: RootState) => {
return state.resources.error;
});
const messages = useSelector((state: RootState) => {
return state.resources.message;
});
const loggedUser = useSelector((state: RootState) => {
return state.auth.loggedUser;
});
const handleEditPost: PostCallback = (post) => {
history.push(`/edit-post/${post.id}`);
};
const handleDeletePost: PostCallback = (post) => {
dispatch(deletePost(post.id));
};
const handleDrawerOpen = () => {
setOpen(true);
};
const handleDrawerClose = () => {
setOpen(false);
};
const fixedHeightPaper = clsx(classes.paper, classes.fixedHeight);
return (
<div className={classes.root}>
<CssBaseline />
<AppBar position="absolute" className={clsx(classes.appBar, open && classes.appBarShift)}>
<Toolbar className={classes.toolbar}>
<IconButton
edge="start"
color="inherit"
aria-label="open drawer"
onClick={handleDrawerOpen}
className={clsx(classes.menuButton, open && classes.menuButtonHidden)}
>
<MenuIcon />
</IconButton>
<Typography component="h1" variant="h6" color="inherit" noWrap className={classes.title}>
Dashboard
</Typography>
{loggedUser && <Typography component="h1" variant="h6" color="inherit" noWrap className={classes.title}>
Welcome, {loggedUser.firstName}
</Typography>}
<IconButton color="inherit">
<Badge badgeContent={4} color="secondary">
<NotificationsIcon />
</Badge>
</IconButton>
</Toolbar>
</AppBar>
<Drawer
variant="permanent"
classes={{
paper: clsx(classes.drawerPaper, !open && classes.drawerPaperClose),
}}
open={open}
>
<div className={classes.toolbarIcon}>
<IconButton onClick={handleDrawerClose}>
<ChevronLeftIcon />
</IconButton>
</div>
<Divider />
<List className={classes.menuItems}>{mainListItems}</List>
<Divider />
<List className={classes.menuItems}>{secondaryListItems}</List>
</Drawer>
<main className={classes.content}>
<div className={classes.appBarSpacer} />
<Container maxWidth="lg" className={classes.container}>
<Switch>
<Route exact path="/">
<Redirect to="/face-recognition" />
</Route>
<Route exact path="/face-recognition">
<FaceRecognition />
</Route>
<Route exact path="/posts">
<PostList posts={posts} onEditPost={handleEditPost} onDeletePost={handleDeletePost} />
</Route>
<ProtectedRoute exact path="/add-post">
<PostForm />
</ProtectedRoute>
<Route exact path="/edit-post/:postId">
<PostForm />
</Route>
<Route exact path="/login">
<Login />
</Route>
</Switch>
<Grid container spacing={3}>
{/* Chart */}
<Grid item xs={12} md={8} lg={9}>
<Paper className={fixedHeightPaper}>
{/* <Chart />*/}
</Paper>
</Grid>
{/* Recent Deposits */}
<Grid item xs={12} md={4} lg={3}>
<Paper className={fixedHeightPaper}>
{/* <Deposits /> */}
</Paper>
</Grid>
{/* Recent Orders */}
<Grid item xs={1 | Copyright | identifier_name |
App.tsx | NO EVENT SHALL
* IPT BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import AppBar from '@material-ui/core/AppBar';
import Badge from '@material-ui/core/Badge';
// import Box from '@material-ui/core/Box';
import Container from '@material-ui/core/Container';
import CssBaseline from '@material-ui/core/CssBaseline';
import Divider from '@material-ui/core/Divider';
import Drawer from '@material-ui/core/Drawer';
import Grid from '@material-ui/core/Grid';
import IconButton from '@material-ui/core/IconButton';
import Link from '@material-ui/core/Link';
import List from '@material-ui/core/List';
import Paper from '@material-ui/core/Paper';
import { makeStyles } from '@material-ui/core/styles';
import Toolbar from '@material-ui/core/Toolbar';
import Typography from '@material-ui/core/Typography';
import ChevronLeftIcon from '@material-ui/icons/ChevronLeft';
import MenuIcon from '@material-ui/icons/Menu';
import NotificationsIcon from '@material-ui/icons/Notifications';
import clsx from 'clsx';
import React, { useEffect } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { Redirect, Route, Switch, useHistory } from 'react-router-dom';
import Alert from '../components/Alert/Alert';
import { PostForm } from '../components/PostForm/PostForm';
import { PostList } from '../components/PostList/PostList';
import { deletePost, fetchPosts } from '../features/posts/postsSlice';
import { mainListItems, secondaryListItems } from '../listitems';
import { PostCallback } from '../shared/shared-types';
import { RootState } from './rootReducer';
import Login from '../components/Login/Login';
// import Chart from './Chart';
// import Deposits from './Deposits';
// import Orders from './Orders';
import ProtectedRoute from '../components/ProtectedRoute/ProtectedRoute';
import { FaceRecognition } from '../components/FaceRecognition/FaceRecognition';
function Copyright() {
return (
<Typography variant="body2" color="textSecondary" align="center">
{'Copyright © '}
<Link color="inherit" href="https://github.com/iproduct">
Trayan Iliev, IPT - Intellectual Products & Technologies
</Link>{' '}
{new Date().getFullYear()}
{'.'}
</Typography>
);
}
const drawerWidth = 240;
const useStyles = makeStyles((theme) => ({
root: {
display: 'flex',
},
toolbar: {
paddingRight: 24, // keep right padding when drawer closed
},
toolbarIcon: {
display: 'flex',
alignItems: 'center',
justifyContent: 'flex-end',
padding: '0 8px',
...theme.mixins.toolbar,
},
appBar: {
zIndex: theme.zIndex.drawer + 1,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
},
appBarShift: {
marginLeft: drawerWidth,
width: `calc(100% - ${drawerWidth}px)`,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
menuButton: {
marginRight: 36,
},
menuButtonHidden: {
display: 'none',
},
title: {
flexGrow: 1,
},
drawerPaper: {
position: 'relative',
whiteSpace: 'nowrap',
width: drawerWidth,
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
drawerPaperClose: {
overflowX: 'hidden',
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
width: theme.spacing(7),
[theme.breakpoints.up('sm')]: {
width: theme.spacing(9),
},
},
appBarSpacer: theme.mixins.toolbar,
content: {
flexGrow: 1,
height: '100vh',
overflow: 'auto',
},
container: {
paddingTop: theme.spacing(4),
paddingBottom: theme.spacing(4),
},
paper: {
padding: theme.spacing(2),
display: 'flex',
overflow: 'auto',
flexDirection: 'column',
},
fixedHeight: {
height: 240,
},
menuItems: {
"& a": {
display: 'block',
color: 'rgba(0,0,0,0.87)',
},
"& a.active": {
background: 'rgb(56,247,242) linear-gradient(90deg, rgba(50,50,242,1) 0%, rgba(70,80,255,0.5) 100%)',
color: 'white'
}
},
}));
export default function Dashboard() {
const classes = useStyles();
const [open, setOpen] = React.useState(true);
const history = useHistory();
const dispatch = useDispatch();
useEffect(() => {
dispatch(fetchPosts());
}, [dispatch]);
const posts = useSelector((state: RootState) => state.posts.posts);
const errors = useSelector((state: RootState) => {
return state.resources.error;
});
const messages = useSelector((state: RootState) => {
return state.resources.message;
});
const loggedUser = useSelector((state: RootState) => {
return state.auth.loggedUser;
});
const handleEditPost: PostCallback = (post) => {
history.push(`/edit-post/${post.id}`);
};
const handleDeletePost: PostCallback = (post) => {
dispatch(deletePost(post.id));
};
const handleDrawerOpen = () => {
setOpen(true);
};
const handleDrawerClose = () => {
setOpen(false);
};
const fixedHeightPaper = clsx(classes.paper, classes.fixedHeight);
return (
<div className={classes.root}>
<CssBaseline />
<AppBar position="absolute" className={clsx(classes.appBar, open && classes.appBarShift)}>
<Toolbar className={classes.toolbar}>
<IconButton
edge="start"
color="inherit"
aria-label="open drawer"
onClick={handleDrawerOpen}
className={clsx(classes.menuButton, open && classes.menuButtonHidden)}
>
<MenuIcon />
</IconButton>
<Typography component="h1" variant="h6" color="inherit" noWrap className={classes.title}>
Dashboard
</Typography>
{loggedUser && <Typography component="h1" variant="h6" color="inherit" noWrap className={classes.title}>
Welcome, {loggedUser.firstName}
</Typography>}
<IconButton color="inherit">
<Badge badgeContent={4} color="secondary">
<NotificationsIcon />
</Badge>
</IconButton>
</Toolbar>
</AppBar>
<Drawer
variant="permanent"
classes={{
paper: clsx(classes.drawerPaper, !open && classes.drawerPaperClose),
}}
open={open}
>
<div className={classes.toolbarIcon}>
<IconButton onClick={handleDrawerClose}>
<ChevronLeftIcon />
</IconButton>
</div>
<Divider />
<List className={classes.menuItems}>{mainListItems}</List>
<Divider />
<List className={classes.menuItems}>{secondaryListItems}</List>
</Drawer>
<main className={classes.content}>
<div className={classes.appBarSpacer} />
<Container maxWidth="lg" className={classes.container}>
<Switch>
<Route exact path="/">
<Redirect to="/face-recognition" />
</Route>
<Route exact path="/face-recognition">
<FaceRecognition />
</Route>
<Route exact path="/posts">
<PostList posts={posts} onEditPost={handleEditPost} onDeletePost={handleDeletePost} />
</Route>
<ProtectedRoute exact path="/add-post">
<PostForm />
</ProtectedRoute>
<Route exact path="/edit-post/:postId">
<PostForm />
</Route>
<Route exact path="/login">
<Login />
</Route>
</Switch>
<Grid container spacing={3}>
{/* Chart */}
<Grid item xs={12} md={8} lg={9}>
<Paper className={fixedHeightPaper}>
{/* <Chart />*/}
</Paper>
</Grid>
{/* Recent Deposits */}
<Grid item xs={12} md={4} lg={3}>
<Paper className={fixedHeightPaper}>
{/* <Deposits /> */}
</Paper>
</Grid>
{/* Recent Orders */}
<Grid item xs={12}>
<Paper className={classes.paper}>
{/* <Orders /> */}
</Paper>
</Grid>
</Grid>
<Grid item xs={12}>
<Copyright />
</Grid>
</Container>
</main> | {errors && (<Alert key={errors} severity="error">{errors}</Alert>)}
{messages && (<Alert key={messages} severity="success">{messages}</Alert>)}
</div>
);
} | random_line_split |
|
vis_eye_video_overlay.py | function to find most recent valid timestamp in the future
"""
if idx == len(timestamps)-1:
# if at the end, we can't go further into the future.
return get_past_timestamp(idx,timestamps)
elif timestamps[idx]:
return timestamps[idx][0]
else:
idx = min(len(timestamps),idx+1)
return get_future_timestamp(idx,timestamps)
def get_nearest_timestamp(past_timestamp,future_timestamp,world_timestamp):
dt_past = abs(past_timestamp-world_timestamp)
dt_future = abs(future_timestamp-world_timestamp) # abs prob not necessary here, but just for sanity
if dt_past < dt_future:
return past_timestamp
else:
return future_timestamp
def correlate_eye_world(eye_timestamps,world_timestamps):
"""
This function takes a list of eye timestamps and world timestamps
and correlates one eye frame per world frame
Returns a mapping that correlates a single eye frame index with each world frame index.
Up and downsampling is used to achieve this mapping.
"""
# return framewise mapping as a list
e_ts = eye_timestamps
w_ts = list(world_timestamps)
eye_frames_by_timestamp = dict(zip(e_ts,range(len(e_ts))))
eye_timestamps_by_world_index = [[] for i in world_timestamps]
frame_idx = 0
try:
current_e_ts = e_ts.pop(0)
except:
logger.warning("No eye timestamps found.")
return eye_timestamps_by_world_index
while e_ts:
# if the current eye timestamp is before the mean of the current world frame timestamp and the next worldframe timestamp
try:
t_between_frames = ( w_ts[frame_idx]+w_ts[frame_idx+1] ) / 2.
except IndexError:
break
if current_e_ts <= t_between_frames:
eye_timestamps_by_world_index[frame_idx].append(current_e_ts)
current_e_ts = e_ts.pop(0)
else:
frame_idx+=1
idx = 0
eye_world_frame_map = []
# some entiries in the `eye_timestamps_by_world_index` might be empty -- no correlated eye timestamp
# so we will either show the previous frame or next frame - whichever is temporally closest
for candidate,world_ts in zip(eye_timestamps_by_world_index,w_ts):
# if there is no candidate, then assign it to the closest timestamp
if not candidate:
# get most recent timestamp, either in the past or future
e_past_ts = get_past_timestamp(idx,eye_timestamps_by_world_index)
e_future_ts = get_future_timestamp(idx,eye_timestamps_by_world_index)
eye_world_frame_map.append(eye_frames_by_timestamp[get_nearest_timestamp(e_past_ts,e_future_ts,world_ts)])
else:
# TODO - if there is a list of len > 1 - then we should check which is the temporally closest timestamp
eye_world_frame_map.append(eye_frames_by_timestamp[eye_timestamps_by_world_index[idx][-1]])
idx += 1
return eye_world_frame_map
class Vis_Eye_Video_Overlay(Plugin):
"""docstring This plugin allows the user to overlay the eye recording on the recording of his field of vision
Features: flip video across horiz/vert axes, click and drag around interface, scale video size from 20% to 100%,
show only 1 or 2 or both eyes
features updated by Andrew June 2015
"""
def __init__(self,g_pool,alpha=0.6,eye_scale_factor=.5,move_around=0,mirror={'0':False,'1':False}, flip={'0':False,'1':False},pos=[(640,10),(10,10)]):
super().__init__(g_pool)
self.order = .6
self.menu = None
# user controls
self.alpha = alpha #opacity level of eyes
self.eye_scale_factor = eye_scale_factor #scale
self.showeyes = 0,1 #modes: any text containg both means both eye is present, on 'only eye1' if only one eye recording
self.move_around = move_around #boolean whether allow to move clip around screen or not
self.video_size = [0,0] #video_size of recording (bc scaling)
#variables specific to each eye
self.eye_frames = []
self.eye_world_frame_map = []
self.eye_cap = []
self.mirror = mirror #do we horiz flip first eye
self.flip = flip #do we vert flip first eye
self.pos = [list(pos[0]),list(pos[1])] #positions of 2 eyes | if VersionFormat(self.g_pool.meta_info['Capture Software Version'][1:]) < VersionFormat('0.4'):
eye_video_path = os.path.join(g_pool.rec_dir,'eye.avi'),'None'
eye_timestamps_path = os.path.join(g_pool.rec_dir,'eye_timestamps.npy'),'None'
else:
eye_video_path = os.path.join(g_pool.rec_dir,'eye0.*'),os.path.join(g_pool.rec_dir,'eye1.*')
eye_timestamps_path = os.path.join(g_pool.rec_dir,'eye0_timestamps.npy'),os.path.join(g_pool.rec_dir,'eye1_timestamps.npy')
#try to load eye video and ts for each eye.
for video,ts in zip(eye_video_path,eye_timestamps_path):
try:
self.eye_cap.append(File_Source(self.g_pool,source_path=glob(video)[0],timestamps=np.load(ts)))
except(IndexError,FileCaptureError):
pass
else:
self.eye_frames.append(self.eye_cap[-1].get_frame())
try:
eye_timestamps = list(np.load(ts))
except:
pass
else:
self.eye_world_frame_map.append(correlate_eye_world(eye_timestamps,g_pool.timestamps))
if len(self.eye_cap) == 2:
logger.debug("Loaded binocular eye video data.")
elif len(self.eye_cap) == 1:
logger.debug("Loaded monocular eye video data")
self.showeyes = (0,)
else:
logger.error("Could not load eye video.")
self.alive = False
return
def unset_alive(self):
self.alive = False
def init_gui(self):
# initialize the menu
self.menu = ui.Scrolling_Menu('Eye Video Overlay')
self.update_gui()
self.g_pool.gui.append(self.menu)
def update_gui(self):
self.menu.elements[:] = []
self.menu.append(ui.Button('Close',self.unset_alive))
self.menu.append(ui.Info_Text('Show the eye video overlaid on top of the world video. Eye1 is usually the right eye'))
self.menu.append(ui.Slider('alpha',self,min=0.0,step=0.05,max=1.0,label='Opacity'))
self.menu.append(ui.Slider('eye_scale_factor',self,min=0.2,step=0.1,max=1.0,label='Video Scale'))
self.menu.append(ui.Switch('move_around',self,label="Move Overlay"))
if len(self.eye_cap) == 2:
self.menu.append(ui.Selector('showeyes',self,label='Show',selection=[(0,),(1,),(0,1)],labels= ['eye 1','eye 2','both'],setter=self.set_showeyes))
if 0 in self.showeyes:
self.menu.append(ui.Switch('0',self.mirror,label="Eye 1: Horiz. Flip"))
self.menu.append(ui.Switch('0',self.flip,label="Eye 1: Vert. Flip"))
if 1 in self.showeyes:
self.menu.append(ui.Switch('1',self.mirror,label="Eye 2: Horiz Flip"))
self.menu.append(ui.Switch('1',self.flip,label="Eye 2: Vert Flip"))
def set_showeyes(self,new_mode):
#everytime we choose eye setting (either use eye 1, 2, or both, updates the gui menu to remove certain options from list)
self.showeyes = new_mode
self.update_gui()
def deinit_gui(self):
if self.menu:
self.g_pool.gui.remove(self.menu)
self.menu = None
def update(self,frame,events):
for eye_index in self.showeyes:
requested_eye_frame_idx = self.eye_world_frame_map[eye_index][frame.index]
#1. do we need a new frame?
if requested_eye_frame_idx != self.eye_frames[eye_index].index:
# do we need to seek?
if requested_eye_frame_idx == self.eye_cap[eye_index].get_frame_index()+1:
# if we just need to seek by one frame, its faster to just read one and and throw it away.
_ = self.eye_cap[eye_index].get_frame()
if requested_eye_frame_idx != self.eye_cap[eye_index].get_frame_index():
# only now do I need to seek
self.eye_cap[eye_index].seek_to_frame(requested_eye_frame_idx)
# reading the new eye frame frame
try:
self.eye_frames[eye_index] = self.eye_cap[eye_index].get_frame()
except EndofVideoFileError:
logger.warning("Reached the end of the eye video for eye video {}.".format(eye_index))
| self.drag_offset = [None,None]
# load eye videos and eye timestamps | random_line_split |
vis_eye_video_overlay.py | function to find most recent valid timestamp in the future
"""
if idx == len(timestamps)-1:
# if at the end, we can't go further into the future.
return get_past_timestamp(idx,timestamps)
elif timestamps[idx]:
return timestamps[idx][0]
else:
idx = min(len(timestamps),idx+1)
return get_future_timestamp(idx,timestamps)
def get_nearest_timestamp(past_timestamp,future_timestamp,world_timestamp):
dt_past = abs(past_timestamp-world_timestamp)
dt_future = abs(future_timestamp-world_timestamp) # abs prob not necessary here, but just for sanity
if dt_past < dt_future:
return past_timestamp
else:
return future_timestamp
def correlate_eye_world(eye_timestamps,world_timestamps):
"""
This function takes a list of eye timestamps and world timestamps
and correlates one eye frame per world frame
Returns a mapping that correlates a single eye frame index with each world frame index.
Up and downsampling is used to achieve this mapping.
"""
# return framewise mapping as a list
e_ts = eye_timestamps
w_ts = list(world_timestamps)
eye_frames_by_timestamp = dict(zip(e_ts,range(len(e_ts))))
eye_timestamps_by_world_index = [[] for i in world_timestamps]
frame_idx = 0
try:
current_e_ts = e_ts.pop(0)
except:
logger.warning("No eye timestamps found.")
return eye_timestamps_by_world_index
while e_ts:
# if the current eye timestamp is before the mean of the current world frame timestamp and the next worldframe timestamp
try:
t_between_frames = ( w_ts[frame_idx]+w_ts[frame_idx+1] ) / 2.
except IndexError:
break
if current_e_ts <= t_between_frames:
eye_timestamps_by_world_index[frame_idx].append(current_e_ts)
current_e_ts = e_ts.pop(0)
else:
frame_idx+=1
idx = 0
eye_world_frame_map = []
# some entiries in the `eye_timestamps_by_world_index` might be empty -- no correlated eye timestamp
# so we will either show the previous frame or next frame - whichever is temporally closest
for candidate,world_ts in zip(eye_timestamps_by_world_index,w_ts):
# if there is no candidate, then assign it to the closest timestamp
if not candidate:
# get most recent timestamp, either in the past or future
e_past_ts = get_past_timestamp(idx,eye_timestamps_by_world_index)
e_future_ts = get_future_timestamp(idx,eye_timestamps_by_world_index)
eye_world_frame_map.append(eye_frames_by_timestamp[get_nearest_timestamp(e_past_ts,e_future_ts,world_ts)])
else:
# TODO - if there is a list of len > 1 - then we should check which is the temporally closest timestamp
eye_world_frame_map.append(eye_frames_by_timestamp[eye_timestamps_by_world_index[idx][-1]])
idx += 1
return eye_world_frame_map
class Vis_Eye_Video_Overlay(Plugin):
"""docstring This plugin allows the user to overlay the eye recording on the recording of his field of vision
Features: flip video across horiz/vert axes, click and drag around interface, scale video size from 20% to 100%,
show only 1 or 2 or both eyes
features updated by Andrew June 2015
"""
def __init__(self,g_pool,alpha=0.6,eye_scale_factor=.5,move_around=0,mirror={'0':False,'1':False}, flip={'0':False,'1':False},pos=[(640,10),(10,10)]):
super().__init__(g_pool)
self.order = .6
self.menu = None
# user controls
self.alpha = alpha #opacity level of eyes
self.eye_scale_factor = eye_scale_factor #scale
self.showeyes = 0,1 #modes: any text containg both means both eye is present, on 'only eye1' if only one eye recording
self.move_around = move_around #boolean whether allow to move clip around screen or not
self.video_size = [0,0] #video_size of recording (bc scaling)
#variables specific to each eye
self.eye_frames = []
self.eye_world_frame_map = []
self.eye_cap = []
self.mirror = mirror #do we horiz flip first eye
self.flip = flip #do we vert flip first eye
self.pos = [list(pos[0]),list(pos[1])] #positions of 2 eyes
self.drag_offset = [None,None]
# load eye videos and eye timestamps
if VersionFormat(self.g_pool.meta_info['Capture Software Version'][1:]) < VersionFormat('0.4'):
eye_video_path = os.path.join(g_pool.rec_dir,'eye.avi'),'None'
eye_timestamps_path = os.path.join(g_pool.rec_dir,'eye_timestamps.npy'),'None'
else:
eye_video_path = os.path.join(g_pool.rec_dir,'eye0.*'),os.path.join(g_pool.rec_dir,'eye1.*')
eye_timestamps_path = os.path.join(g_pool.rec_dir,'eye0_timestamps.npy'),os.path.join(g_pool.rec_dir,'eye1_timestamps.npy')
#try to load eye video and ts for each eye.
for video,ts in zip(eye_video_path,eye_timestamps_path):
try:
self.eye_cap.append(File_Source(self.g_pool,source_path=glob(video)[0],timestamps=np.load(ts)))
except(IndexError,FileCaptureError):
pass
else:
self.eye_frames.append(self.eye_cap[-1].get_frame())
try:
eye_timestamps = list(np.load(ts))
except:
pass
else:
self.eye_world_frame_map.append(correlate_eye_world(eye_timestamps,g_pool.timestamps))
if len(self.eye_cap) == 2:
logger.debug("Loaded binocular eye video data.")
elif len(self.eye_cap) == 1:
logger.debug("Loaded monocular eye video data")
self.showeyes = (0,)
else:
logger.error("Could not load eye video.")
self.alive = False
return
def | (self):
self.alive = False
def init_gui(self):
# initialize the menu
self.menu = ui.Scrolling_Menu('Eye Video Overlay')
self.update_gui()
self.g_pool.gui.append(self.menu)
def update_gui(self):
self.menu.elements[:] = []
self.menu.append(ui.Button('Close',self.unset_alive))
self.menu.append(ui.Info_Text('Show the eye video overlaid on top of the world video. Eye1 is usually the right eye'))
self.menu.append(ui.Slider('alpha',self,min=0.0,step=0.05,max=1.0,label='Opacity'))
self.menu.append(ui.Slider('eye_scale_factor',self,min=0.2,step=0.1,max=1.0,label='Video Scale'))
self.menu.append(ui.Switch('move_around',self,label="Move Overlay"))
if len(self.eye_cap) == 2:
self.menu.append(ui.Selector('showeyes',self,label='Show',selection=[(0,),(1,),(0,1)],labels= ['eye 1','eye 2','both'],setter=self.set_showeyes))
if 0 in self.showeyes:
self.menu.append(ui.Switch('0',self.mirror,label="Eye 1: Horiz. Flip"))
self.menu.append(ui.Switch('0',self.flip,label="Eye 1: Vert. Flip"))
if 1 in self.showeyes:
self.menu.append(ui.Switch('1',self.mirror,label="Eye 2: Horiz Flip"))
self.menu.append(ui.Switch('1',self.flip,label="Eye 2: Vert Flip"))
def set_showeyes(self,new_mode):
#everytime we choose eye setting (either use eye 1, 2, or both, updates the gui menu to remove certain options from list)
self.showeyes = new_mode
self.update_gui()
def deinit_gui(self):
if self.menu:
self.g_pool.gui.remove(self.menu)
self.menu = None
def update(self,frame,events):
for eye_index in self.showeyes:
requested_eye_frame_idx = self.eye_world_frame_map[eye_index][frame.index]
#1. do we need a new frame?
if requested_eye_frame_idx != self.eye_frames[eye_index].index:
# do we need to seek?
if requested_eye_frame_idx == self.eye_cap[eye_index].get_frame_index()+1:
# if we just need to seek by one frame, its faster to just read one and and throw it away.
_ = self.eye_cap[eye_index].get_frame()
if requested_eye_frame_idx != self.eye_cap[eye_index].get_frame_index():
# only now do I need to seek
self.eye_cap[eye_index].seek_to_frame(requested_eye_frame_idx)
# reading the new eye frame frame
try:
self.eye_frames[eye_index] = self.eye_cap[eye_index].get_frame()
except EndofVideoFileError:
logger.warning("Reached the end of the eye video for eye video {}.".format(eye | unset_alive | identifier_name |
vis_eye_video_overlay.py | function to find most recent valid timestamp in the future
"""
if idx == len(timestamps)-1:
# if at the end, we can't go further into the future.
return get_past_timestamp(idx,timestamps)
elif timestamps[idx]:
return timestamps[idx][0]
else:
idx = min(len(timestamps),idx+1)
return get_future_timestamp(idx,timestamps)
def get_nearest_timestamp(past_timestamp,future_timestamp,world_timestamp):
dt_past = abs(past_timestamp-world_timestamp)
dt_future = abs(future_timestamp-world_timestamp) # abs prob not necessary here, but just for sanity
if dt_past < dt_future:
return past_timestamp
else:
return future_timestamp
def correlate_eye_world(eye_timestamps,world_timestamps):
"""
This function takes a list of eye timestamps and world timestamps
and correlates one eye frame per world frame
Returns a mapping that correlates a single eye frame index with each world frame index.
Up and downsampling is used to achieve this mapping.
"""
# return framewise mapping as a list
e_ts = eye_timestamps
w_ts = list(world_timestamps)
eye_frames_by_timestamp = dict(zip(e_ts,range(len(e_ts))))
eye_timestamps_by_world_index = [[] for i in world_timestamps]
frame_idx = 0
try:
current_e_ts = e_ts.pop(0)
except:
logger.warning("No eye timestamps found.")
return eye_timestamps_by_world_index
while e_ts:
# if the current eye timestamp is before the mean of the current world frame timestamp and the next worldframe timestamp
try:
t_between_frames = ( w_ts[frame_idx]+w_ts[frame_idx+1] ) / 2.
except IndexError:
break
if current_e_ts <= t_between_frames:
eye_timestamps_by_world_index[frame_idx].append(current_e_ts)
current_e_ts = e_ts.pop(0)
else:
frame_idx+=1
idx = 0
eye_world_frame_map = []
# some entiries in the `eye_timestamps_by_world_index` might be empty -- no correlated eye timestamp
# so we will either show the previous frame or next frame - whichever is temporally closest
for candidate,world_ts in zip(eye_timestamps_by_world_index,w_ts):
# if there is no candidate, then assign it to the closest timestamp
if not candidate:
# get most recent timestamp, either in the past or future
e_past_ts = get_past_timestamp(idx,eye_timestamps_by_world_index)
e_future_ts = get_future_timestamp(idx,eye_timestamps_by_world_index)
eye_world_frame_map.append(eye_frames_by_timestamp[get_nearest_timestamp(e_past_ts,e_future_ts,world_ts)])
else:
# TODO - if there is a list of len > 1 - then we should check which is the temporally closest timestamp
eye_world_frame_map.append(eye_frames_by_timestamp[eye_timestamps_by_world_index[idx][-1]])
idx += 1
return eye_world_frame_map
class Vis_Eye_Video_Overlay(Plugin):
| self.eye_cap = []
self.mirror = mirror #do we horiz flip first eye
self.flip = flip #do we vert flip first eye
self.pos = [list(pos[0]),list(pos[1])] #positions of 2 eyes
self.drag_offset = [None,None]
# load eye videos and eye timestamps
if VersionFormat(self.g_pool.meta_info['Capture Software Version'][1:]) < VersionFormat('0.4'):
eye_video_path = os.path.join(g_pool.rec_dir,'eye.avi'),'None'
eye_timestamps_path = os.path.join(g_pool.rec_dir,'eye_timestamps.npy'),'None'
else:
eye_video_path = os.path.join(g_pool.rec_dir,'eye0.*'),os.path.join(g_pool.rec_dir,'eye1.*')
eye_timestamps_path = os.path.join(g_pool.rec_dir,'eye0_timestamps.npy'),os.path.join(g_pool.rec_dir,'eye1_timestamps.npy')
#try to load eye video and ts for each eye.
for video,ts in zip(eye_video_path,eye_timestamps_path):
try:
self.eye_cap.append(File_Source(self.g_pool,source_path=glob(video)[0],timestamps=np.load(ts)))
except(IndexError,FileCaptureError):
pass
else:
self.eye_frames.append(self.eye_cap[-1].get_frame())
try:
eye_timestamps = list(np.load(ts))
except:
pass
else:
self.eye_world_frame_map.append(correlate_eye_world(eye_timestamps,g_pool.timestamps))
if len(self.eye_cap) == 2:
logger.debug("Loaded binocular eye video data.")
elif len(self.eye_cap) == 1:
logger.debug("Loaded monocular eye video data")
self.showeyes = (0,)
else:
logger.error("Could not load eye video.")
self.alive = False
return
def unset_alive(self):
self.alive = False
def init_gui(self):
# initialize the menu
self.menu = ui.Scrolling_Menu('Eye Video Overlay')
self.update_gui()
self.g_pool.gui.append(self.menu)
def update_gui(self):
self.menu.elements[:] = []
self.menu.append(ui.Button('Close',self.unset_alive))
self.menu.append(ui.Info_Text('Show the eye video overlaid on top of the world video. Eye1 is usually the right eye'))
self.menu.append(ui.Slider('alpha',self,min=0.0,step=0.05,max=1.0,label='Opacity'))
self.menu.append(ui.Slider('eye_scale_factor',self,min=0.2,step=0.1,max=1.0,label='Video Scale'))
self.menu.append(ui.Switch('move_around',self,label="Move Overlay"))
if len(self.eye_cap) == 2:
self.menu.append(ui.Selector('showeyes',self,label='Show',selection=[(0,),(1,),(0,1)],labels= ['eye 1','eye 2','both'],setter=self.set_showeyes))
if 0 in self.showeyes:
self.menu.append(ui.Switch('0',self.mirror,label="Eye 1: Horiz. Flip"))
self.menu.append(ui.Switch('0',self.flip,label="Eye 1: Vert. Flip"))
if 1 in self.showeyes:
self.menu.append(ui.Switch('1',self.mirror,label="Eye 2: Horiz Flip"))
self.menu.append(ui.Switch('1',self.flip,label="Eye 2: Vert Flip"))
def set_showeyes(self,new_mode):
#everytime we choose eye setting (either use eye 1, 2, or both, updates the gui menu to remove certain options from list)
self.showeyes = new_mode
self.update_gui()
def deinit_gui(self):
if self.menu:
self.g_pool.gui.remove(self.menu)
self.menu = None
def update(self,frame,events):
for eye_index in self.showeyes:
requested_eye_frame_idx = self.eye_world_frame_map[eye_index][frame.index]
#1. do we need a new frame?
if requested_eye_frame_idx != self.eye_frames[eye_index].index:
# do we need to seek?
if requested_eye_frame_idx == self.eye_cap[eye_index].get_frame_index()+1:
# if we just need to seek by one frame, its faster to just read one and and throw it away.
_ = self.eye_cap[eye_index].get_frame()
if requested_eye_frame_idx != self.eye_cap[eye_index].get_frame_index():
# only now do I need to seek
self.eye_cap[eye_index].seek_to_frame(requested_eye_frame_idx)
# reading the new eye frame frame
try:
self.eye_frames[eye_index] = self.eye_cap[eye_index].get_frame()
except EndofVideoFileError:
logger.warning("Reached the end of the eye video for eye video {}.".format(eye_index | """docstring This plugin allows the user to overlay the eye recording on the recording of his field of vision
Features: flip video across horiz/vert axes, click and drag around interface, scale video size from 20% to 100%,
show only 1 or 2 or both eyes
features updated by Andrew June 2015
"""
def __init__(self,g_pool,alpha=0.6,eye_scale_factor=.5,move_around=0,mirror={'0':False,'1':False}, flip={'0':False,'1':False},pos=[(640,10),(10,10)]):
super().__init__(g_pool)
self.order = .6
self.menu = None
# user controls
self.alpha = alpha #opacity level of eyes
self.eye_scale_factor = eye_scale_factor #scale
self.showeyes = 0,1 #modes: any text containg both means both eye is present, on 'only eye1' if only one eye recording
self.move_around = move_around #boolean whether allow to move clip around screen or not
self.video_size = [0,0] #video_size of recording (bc scaling)
#variables specific to each eye
self.eye_frames = []
self.eye_world_frame_map = [] | identifier_body |
vis_eye_video_overlay.py |
if timestamps[idx]:
res = timestamps[idx][-1]
return res
else:
return get_past_timestamp(idx-1,timestamps)
def get_future_timestamp(idx,timestamps):
"""
recursive function to find most recent valid timestamp in the future
"""
if idx == len(timestamps)-1:
# if at the end, we can't go further into the future.
return get_past_timestamp(idx,timestamps)
elif timestamps[idx]:
return timestamps[idx][0]
else:
idx = min(len(timestamps),idx+1)
return get_future_timestamp(idx,timestamps)
def get_nearest_timestamp(past_timestamp,future_timestamp,world_timestamp):
dt_past = abs(past_timestamp-world_timestamp)
dt_future = abs(future_timestamp-world_timestamp) # abs prob not necessary here, but just for sanity
if dt_past < dt_future:
return past_timestamp
else:
return future_timestamp
def correlate_eye_world(eye_timestamps,world_timestamps):
"""
This function takes a list of eye timestamps and world timestamps
and correlates one eye frame per world frame
Returns a mapping that correlates a single eye frame index with each world frame index.
Up and downsampling is used to achieve this mapping.
"""
# return framewise mapping as a list
e_ts = eye_timestamps
w_ts = list(world_timestamps)
eye_frames_by_timestamp = dict(zip(e_ts,range(len(e_ts))))
eye_timestamps_by_world_index = [[] for i in world_timestamps]
frame_idx = 0
try:
current_e_ts = e_ts.pop(0)
except:
logger.warning("No eye timestamps found.")
return eye_timestamps_by_world_index
while e_ts:
# if the current eye timestamp is before the mean of the current world frame timestamp and the next worldframe timestamp
try:
t_between_frames = ( w_ts[frame_idx]+w_ts[frame_idx+1] ) / 2.
except IndexError:
break
if current_e_ts <= t_between_frames:
eye_timestamps_by_world_index[frame_idx].append(current_e_ts)
current_e_ts = e_ts.pop(0)
else:
frame_idx+=1
idx = 0
eye_world_frame_map = []
# some entiries in the `eye_timestamps_by_world_index` might be empty -- no correlated eye timestamp
# so we will either show the previous frame or next frame - whichever is temporally closest
for candidate,world_ts in zip(eye_timestamps_by_world_index,w_ts):
# if there is no candidate, then assign it to the closest timestamp
if not candidate:
# get most recent timestamp, either in the past or future
e_past_ts = get_past_timestamp(idx,eye_timestamps_by_world_index)
e_future_ts = get_future_timestamp(idx,eye_timestamps_by_world_index)
eye_world_frame_map.append(eye_frames_by_timestamp[get_nearest_timestamp(e_past_ts,e_future_ts,world_ts)])
else:
# TODO - if there is a list of len > 1 - then we should check which is the temporally closest timestamp
eye_world_frame_map.append(eye_frames_by_timestamp[eye_timestamps_by_world_index[idx][-1]])
idx += 1
return eye_world_frame_map
class Vis_Eye_Video_Overlay(Plugin):
"""docstring This plugin allows the user to overlay the eye recording on the recording of his field of vision
Features: flip video across horiz/vert axes, click and drag around interface, scale video size from 20% to 100%,
show only 1 or 2 or both eyes
features updated by Andrew June 2015
"""
def __init__(self,g_pool,alpha=0.6,eye_scale_factor=.5,move_around=0,mirror={'0':False,'1':False}, flip={'0':False,'1':False},pos=[(640,10),(10,10)]):
super().__init__(g_pool)
self.order = .6
self.menu = None
# user controls
self.alpha = alpha #opacity level of eyes
self.eye_scale_factor = eye_scale_factor #scale
self.showeyes = 0,1 #modes: any text containg both means both eye is present, on 'only eye1' if only one eye recording
self.move_around = move_around #boolean whether allow to move clip around screen or not
self.video_size = [0,0] #video_size of recording (bc scaling)
#variables specific to each eye
self.eye_frames = []
self.eye_world_frame_map = []
self.eye_cap = []
self.mirror = mirror #do we horiz flip first eye
self.flip = flip #do we vert flip first eye
self.pos = [list(pos[0]),list(pos[1])] #positions of 2 eyes
self.drag_offset = [None,None]
# load eye videos and eye timestamps
if VersionFormat(self.g_pool.meta_info['Capture Software Version'][1:]) < VersionFormat('0.4'):
eye_video_path = os.path.join(g_pool.rec_dir,'eye.avi'),'None'
eye_timestamps_path = os.path.join(g_pool.rec_dir,'eye_timestamps.npy'),'None'
else:
eye_video_path = os.path.join(g_pool.rec_dir,'eye0.*'),os.path.join(g_pool.rec_dir,'eye1.*')
eye_timestamps_path = os.path.join(g_pool.rec_dir,'eye0_timestamps.npy'),os.path.join(g_pool.rec_dir,'eye1_timestamps.npy')
#try to load eye video and ts for each eye.
for video,ts in zip(eye_video_path,eye_timestamps_path):
try:
self.eye_cap.append(File_Source(self.g_pool,source_path=glob(video)[0],timestamps=np.load(ts)))
except(IndexError,FileCaptureError):
pass
else:
self.eye_frames.append(self.eye_cap[-1].get_frame())
try:
eye_timestamps = list(np.load(ts))
except:
pass
else:
self.eye_world_frame_map.append(correlate_eye_world(eye_timestamps,g_pool.timestamps))
if len(self.eye_cap) == 2:
logger.debug("Loaded binocular eye video data.")
elif len(self.eye_cap) == 1:
logger.debug("Loaded monocular eye video data")
self.showeyes = (0,)
else:
logger.error("Could not load eye video.")
self.alive = False
return
def unset_alive(self):
self.alive = False
def init_gui(self):
# initialize the menu
self.menu = ui.Scrolling_Menu('Eye Video Overlay')
self.update_gui()
self.g_pool.gui.append(self.menu)
def update_gui(self):
self.menu.elements[:] = []
self.menu.append(ui.Button('Close',self.unset_alive))
self.menu.append(ui.Info_Text('Show the eye video overlaid on top of the world video. Eye1 is usually the right eye'))
self.menu.append(ui.Slider('alpha',self,min=0.0,step=0.05,max=1.0,label='Opacity'))
self.menu.append(ui.Slider('eye_scale_factor',self,min=0.2,step=0.1,max=1.0,label='Video Scale'))
self.menu.append(ui.Switch('move_around',self,label="Move Overlay"))
if len(self.eye_cap) == 2:
self.menu.append(ui.Selector('showeyes',self,label='Show',selection=[(0,),(1,),(0,1)],labels= ['eye 1','eye 2','both'],setter=self.set_showeyes))
if 0 in self.showeyes:
self.menu.append(ui.Switch('0',self.mirror,label="Eye 1: Horiz. Flip"))
self.menu.append(ui.Switch('0',self.flip,label="Eye 1: Vert. Flip"))
if 1 in self.showeyes:
self.menu.append(ui.Switch('1',self.mirror,label="Eye 2: Horiz Flip"))
self.menu.append(ui.Switch('1',self.flip,label="Eye 2: Vert Flip"))
def set_showeyes(self,new_mode):
#everytime we choose eye setting (either use eye 1, 2, or both, updates the gui menu to remove certain options from list)
self.showeyes = new_mode
self.update_gui()
def deinit_gui(self):
if self.menu:
self.g_pool.gui.remove(self.menu)
self.menu = None
def update(self,frame,events):
for eye_index in self.showeyes:
requested_eye_frame_idx = self.eye_world_frame_map[eye_index][frame.index]
#1. do we need a new frame?
if requested_eye_frame_idx != self.eye_frames[eye_index].index:
# do we need to seek?
if requested_eye_frame_idx == self.eye_cap[eye_index].get_frame_index()+1:
# if we just need to seek by one frame, its faster to just read one and and throw it away.
_ = self.eye_cap[eye_index].get_frame()
if requested_eye_frame_idx != self.eye_cap[eye_index].get_frame_index():
# only now do I need to seek
self.eye_cap[eye_index].seek_to_frame(requested_eye_frame_idx)
# reading the new eye frame | return get_future_timestamp(idx,timestamps) | conditional_block |
|
helpstate.js | /**
* This file is part of Mrogue.
*
* Mrogue is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Mrogue is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of | */
"use strict";
(() => {
var HelpState = function(manager, game) {
this.manager = manager;
this.game = game;
this.name = "Help";
this.doesOwnRendering = true;
this.text = [
"Mrogue is a free and open source roguelike played directly in the browser. It focuses on simplicity and avoiding unnecessary complexity and aims to",
"introduce various concepts from the role-playing game genre to the roguelike genre. The goal of Mrogue is to find the Amulet of Yendor on the 7th",
"floor of the dungeon.",
"",
{ color: { r: 243, g: 247, b: 54 }, text: "MOVEMENT AND EXPLORING" },
"You can move your character around using the arrow keys, hjkl or the numpad. Mrogue uses 4 direction movement. In the dungeon you will also find",
"wells that you can drink from. These will restore or increase some of your attributes.",
"",
{ color: { r: 243, g: 247, b: 54 }, text: "ITEMS AND INVENTORY" },
"You can access your inventory by pressing the 'i' key. There you can see all the items your character is currently carrying. If you wish to drop an",
"item, press the 'd' key. You can also use items by pressing the 'e' key. You can find items in the dungeon as well as on bodies of creatures you've",
"slain. Pick up these items by standing on them and pressing the 'p' key.",
"",
{ color: { r: 243, g: 247, b: 54 }, text: "COMBAT AND ATTRIBUTES" },
"There's two types of combat in Mrogue; melee and magical. You can attack a creature by simply moving into them. The damage you will do depends on your",
"current power level which you gain by attacking creatures.",
"",
"Magical combat...",
"",
"Your character has three attributes; health, energy and power. The health attribute represents the amount of damage you can take before dying. Energy",
"is used to cast spells and automatically restores. The last attribute, power, increases the damage you do in melee combat as well as the effectiveness",
"of your spells."
];
};
HelpState.prototype = new State();
HelpState.prototype.input = function(key, event) {
switch(key) {
// ?, Escape - Close the help screen
case "escape":
this.manager.switchState("player");
break;
}
};
/**
* @param {CanvasRenderingContext2D} context
*/
HelpState.prototype.render = function(renderer) {
for(var i = 0; i < this.text.length; i++) {
if(typeof this.text[i] !== "string") {
renderer.queue(0, i, [ {
foreground: this.text[i].color,
characters: this.text[i].text
} ]);
} else {
renderer.queue(0, i, [ {
foreground: { r: 199, g: 199, b: 199 },
characters: this.text[i]
} ]);
}
}
};
window.HelpState = HelpState;
})(); | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Mrogue. If not, see <http://www.gnu.org/licenses/>. | random_line_split |
helpstate.js | /**
* This file is part of Mrogue.
*
* Mrogue is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Mrogue is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Mrogue. If not, see <http://www.gnu.org/licenses/>.
*/
"use strict";
(() => {
var HelpState = function(manager, game) {
this.manager = manager;
this.game = game;
this.name = "Help";
this.doesOwnRendering = true;
this.text = [
"Mrogue is a free and open source roguelike played directly in the browser. It focuses on simplicity and avoiding unnecessary complexity and aims to",
"introduce various concepts from the role-playing game genre to the roguelike genre. The goal of Mrogue is to find the Amulet of Yendor on the 7th",
"floor of the dungeon.",
"",
{ color: { r: 243, g: 247, b: 54 }, text: "MOVEMENT AND EXPLORING" },
"You can move your character around using the arrow keys, hjkl or the numpad. Mrogue uses 4 direction movement. In the dungeon you will also find",
"wells that you can drink from. These will restore or increase some of your attributes.",
"",
{ color: { r: 243, g: 247, b: 54 }, text: "ITEMS AND INVENTORY" },
"You can access your inventory by pressing the 'i' key. There you can see all the items your character is currently carrying. If you wish to drop an",
"item, press the 'd' key. You can also use items by pressing the 'e' key. You can find items in the dungeon as well as on bodies of creatures you've",
"slain. Pick up these items by standing on them and pressing the 'p' key.",
"",
{ color: { r: 243, g: 247, b: 54 }, text: "COMBAT AND ATTRIBUTES" },
"There's two types of combat in Mrogue; melee and magical. You can attack a creature by simply moving into them. The damage you will do depends on your",
"current power level which you gain by attacking creatures.",
"",
"Magical combat...",
"",
"Your character has three attributes; health, energy and power. The health attribute represents the amount of damage you can take before dying. Energy",
"is used to cast spells and automatically restores. The last attribute, power, increases the damage you do in melee combat as well as the effectiveness",
"of your spells."
];
};
HelpState.prototype = new State();
HelpState.prototype.input = function(key, event) {
switch(key) {
// ?, Escape - Close the help screen
case "escape":
this.manager.switchState("player");
break;
}
};
/**
* @param {CanvasRenderingContext2D} context
*/
HelpState.prototype.render = function(renderer) {
for(var i = 0; i < this.text.length; i++) |
};
window.HelpState = HelpState;
})();
| {
if(typeof this.text[i] !== "string") {
renderer.queue(0, i, [ {
foreground: this.text[i].color,
characters: this.text[i].text
} ]);
} else {
renderer.queue(0, i, [ {
foreground: { r: 199, g: 199, b: 199 },
characters: this.text[i]
} ]);
}
} | conditional_block |
0004_auto_20171023_2354.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-23 23:54
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class | (migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('goods', '0002_auto_20171017_2017'),
('trade', '0003_auto_20171022_1507'),
]
operations = [
migrations.AlterField(
model_name='ordergoods',
name='order',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='goods', to='trade.OrderInfo', verbose_name='订单信息'),
),
migrations.AlterField(
model_name='orderinfo',
name='order_sn',
field=models.CharField(blank=True, max_length=30, null=True, unique=True, verbose_name='订单号'),
),
migrations.AlterField(
model_name='orderinfo',
name='pay_status',
field=models.CharField(choices=[('TRADE_SUCCESS', '成功'), ('TRADE_CLOSE', '超时关闭'), ('WAIT_BUYER_PAY', '交易创建,等待付款'), ('TRADE_FINISHED', '交易结束')], default='paying', max_length=30, verbose_name='订单状态'),
),
migrations.AlterUniqueTogether(
name='shoppingcart',
unique_together=set([('user', 'goods')]),
),
]
| Migration | identifier_name |
0004_auto_20171023_2354.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-23 23:54
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
| field=models.CharField(choices=[('TRADE_SUCCESS', '成功'), ('TRADE_CLOSE', '超时关闭'), ('WAIT_BUYER_PAY', '交易创建,等待付款'), ('TRADE_FINISHED', '交易结束')], default='paying', max_length=30, verbose_name='订单状态'),
),
migrations.AlterUniqueTogether(
name='shoppingcart',
unique_together=set([('user', 'goods')]),
),
]
| dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('goods', '0002_auto_20171017_2017'),
('trade', '0003_auto_20171022_1507'),
]
operations = [
migrations.AlterField(
model_name='ordergoods',
name='order',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='goods', to='trade.OrderInfo', verbose_name='订单信息'),
),
migrations.AlterField(
model_name='orderinfo',
name='order_sn',
field=models.CharField(blank=True, max_length=30, null=True, unique=True, verbose_name='订单号'),
),
migrations.AlterField(
model_name='orderinfo',
name='pay_status',
| identifier_body |
0004_auto_20171023_2354.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-23 23:54
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('goods', '0002_auto_20171017_2017'),
('trade', '0003_auto_20171022_1507'),
]
operations = [
migrations.AlterField(
model_name='ordergoods',
name='order',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='goods', to='trade.OrderInfo', verbose_name='订单信息'),
),
migrations.AlterField(
model_name='orderinfo',
name='order_sn',
field=models.CharField(blank=True, max_length=30, null=True, unique=True, verbose_name='订单号'),
),
migrations.AlterField(
model_name='orderinfo',
name='pay_status',
field=models.CharField(choices=[('TRADE_SUCCESS', '成功'), ('TRADE_CLOSE', '超时关闭'), ('WAIT_BUYER_PAY', '交易创建,等待付款'), ('TRADE_FINISHED', '交易结束')], default='paying', max_length=30, verbose_name='订单状态'),
),
migrations.AlterUniqueTogether(
| ] | name='shoppingcart',
unique_together=set([('user', 'goods')]),
),
| random_line_split |
FriendlyTargetState.ts | /*
* This Source Code Form is subject to the terms of the Mozilla Public |
import { defaultPlayerStateModel } from './EntityState';
import { createDefaultOnUpdated, createDefaultOnReady } from '../../../_baseGame/GameClientModels/_Updatable';
import engineInit from '../../../_baseGame/GameClientModels/_Init';
declare global {
type FriendlyTargetState = AnyEntityState;
type ImmutableFriendlyTargetState = ImmutableEntityState;
}
export const FriendlyTarget_Update = 'friendlyTargetPlayerState.update';
function initDefault(): FriendlyTargetState {
return {
...defaultPlayerStateModel(),
position: { x: 0, y: 0, z: 0 },
isReady: false,
updateEventName: FriendlyTarget_Update,
onUpdated: createDefaultOnUpdated(FriendlyTarget_Update),
onReady: createDefaultOnReady(FriendlyTarget_Update),
};
}
/**
* Initialize this model with the game engine.
*/
export default function() {
engineInit(
FriendlyTarget_Update,
initDefault,
() => camelotunchained._devGame.friendlyTargetState,
(model: AnyEntityState) => {
camelotunchained._devGame.friendlyTargetState = model as FriendlyTargetState;
});
} | * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/ | random_line_split |
FriendlyTargetState.ts | /*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
import { defaultPlayerStateModel } from './EntityState';
import { createDefaultOnUpdated, createDefaultOnReady } from '../../../_baseGame/GameClientModels/_Updatable';
import engineInit from '../../../_baseGame/GameClientModels/_Init';
declare global {
type FriendlyTargetState = AnyEntityState;
type ImmutableFriendlyTargetState = ImmutableEntityState;
}
export const FriendlyTarget_Update = 'friendlyTargetPlayerState.update';
function | (): FriendlyTargetState {
return {
...defaultPlayerStateModel(),
position: { x: 0, y: 0, z: 0 },
isReady: false,
updateEventName: FriendlyTarget_Update,
onUpdated: createDefaultOnUpdated(FriendlyTarget_Update),
onReady: createDefaultOnReady(FriendlyTarget_Update),
};
}
/**
* Initialize this model with the game engine.
*/
export default function() {
engineInit(
FriendlyTarget_Update,
initDefault,
() => camelotunchained._devGame.friendlyTargetState,
(model: AnyEntityState) => {
camelotunchained._devGame.friendlyTargetState = model as FriendlyTargetState;
});
}
| initDefault | identifier_name |
FriendlyTargetState.ts | /*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
import { defaultPlayerStateModel } from './EntityState';
import { createDefaultOnUpdated, createDefaultOnReady } from '../../../_baseGame/GameClientModels/_Updatable';
import engineInit from '../../../_baseGame/GameClientModels/_Init';
declare global {
type FriendlyTargetState = AnyEntityState;
type ImmutableFriendlyTargetState = ImmutableEntityState;
}
export const FriendlyTarget_Update = 'friendlyTargetPlayerState.update';
function initDefault(): FriendlyTargetState |
/**
* Initialize this model with the game engine.
*/
export default function() {
engineInit(
FriendlyTarget_Update,
initDefault,
() => camelotunchained._devGame.friendlyTargetState,
(model: AnyEntityState) => {
camelotunchained._devGame.friendlyTargetState = model as FriendlyTargetState;
});
}
| {
return {
...defaultPlayerStateModel(),
position: { x: 0, y: 0, z: 0 },
isReady: false,
updateEventName: FriendlyTarget_Update,
onUpdated: createDefaultOnUpdated(FriendlyTarget_Update),
onReady: createDefaultOnReady(FriendlyTarget_Update),
};
} | identifier_body |
demo.ts | class TableDemoCtrl {
static $inject = ['$httpBackend', 'dataListManager'];
constructor(private $httpBackend, private dataListManager: ddui.DataListManager) {
this.createFakeApiResponse();
this.initList();
}
dataList: ddui.DataList<DemoDataRow>;
pageChanged() {
console.log('Page changed');
}
private initList() {
var config: ddui.ListConfig = {
id: 'tableDemo',
url: '/api/table/',
paging: true
};
this.dataList = this.dataListManager.init<DemoDataRow>(config);
var filter: ddui.FilterModel = {
'name': { value: 'My name' }
};
this.dataList.setFilter(() => filter);
this.dataList.fetchPage();
}
private createFakeApiResponse() {
this.$httpBackend.whenGET(function (url) { return url.startsWith('/api/table/'); }).respond(function (method, url) {
var result = {
items: [
{
'index': 0,
'isActive': false,
'balance': '$2,962.75',
'picture': 'http://placehold.it/32x32',
'age': 36,
'eyeColor': 'brown',
'name': 'Virgie Patrick'
},
{
'index': 1,
'isActive': true,
'balance': '$1,662.43',
'picture': 'http://placehold.it/32x32',
'age': 32,
'eyeColor': 'green',
'name': 'Burton Warren'
},
{
'index': 2,
'isActive': false,
'balance': '$3,988.63',
'picture': 'http://placehold.it/32x32',
'age': 27,
'eyeColor': 'brown',
'name': 'Mercedes Horne'
},
{
'index': 3,
'isActive': false,
'balance': '$3,914.68',
'picture': 'http://placehold.it/32x32',
'age': 21,
'eyeColor': 'brown',
'name': 'Harper Reilly'
},
{ | 'age': 35,
'eyeColor': 'brown',
'name': 'Pierce Callahan'
}
],
count: 300
};
return [200, result];
});
}
}
angular.module('dd.ui.demo')
.controller('TableDemoCtrl', TableDemoCtrl); | 'index': 4,
'isActive': true,
'balance': '$1,237.50',
'picture': 'http://placehold.it/32x32', | random_line_split |
demo.ts |
class TableDemoCtrl {
static $inject = ['$httpBackend', 'dataListManager'];
| (private $httpBackend, private dataListManager: ddui.DataListManager) {
this.createFakeApiResponse();
this.initList();
}
dataList: ddui.DataList<DemoDataRow>;
pageChanged() {
console.log('Page changed');
}
private initList() {
var config: ddui.ListConfig = {
id: 'tableDemo',
url: '/api/table/',
paging: true
};
this.dataList = this.dataListManager.init<DemoDataRow>(config);
var filter: ddui.FilterModel = {
'name': { value: 'My name' }
};
this.dataList.setFilter(() => filter);
this.dataList.fetchPage();
}
private createFakeApiResponse() {
this.$httpBackend.whenGET(function (url) { return url.startsWith('/api/table/'); }).respond(function (method, url) {
var result = {
items: [
{
'index': 0,
'isActive': false,
'balance': '$2,962.75',
'picture': 'http://placehold.it/32x32',
'age': 36,
'eyeColor': 'brown',
'name': 'Virgie Patrick'
},
{
'index': 1,
'isActive': true,
'balance': '$1,662.43',
'picture': 'http://placehold.it/32x32',
'age': 32,
'eyeColor': 'green',
'name': 'Burton Warren'
},
{
'index': 2,
'isActive': false,
'balance': '$3,988.63',
'picture': 'http://placehold.it/32x32',
'age': 27,
'eyeColor': 'brown',
'name': 'Mercedes Horne'
},
{
'index': 3,
'isActive': false,
'balance': '$3,914.68',
'picture': 'http://placehold.it/32x32',
'age': 21,
'eyeColor': 'brown',
'name': 'Harper Reilly'
},
{
'index': 4,
'isActive': true,
'balance': '$1,237.50',
'picture': 'http://placehold.it/32x32',
'age': 35,
'eyeColor': 'brown',
'name': 'Pierce Callahan'
}
],
count: 300
};
return [200, result];
});
}
}
angular.module('dd.ui.demo')
.controller('TableDemoCtrl', TableDemoCtrl); | constructor | identifier_name |
demo.ts |
class TableDemoCtrl {
static $inject = ['$httpBackend', 'dataListManager'];
constructor(private $httpBackend, private dataListManager: ddui.DataListManager) {
this.createFakeApiResponse();
this.initList();
}
dataList: ddui.DataList<DemoDataRow>;
pageChanged() {
console.log('Page changed');
}
private initList() {
var config: ddui.ListConfig = {
id: 'tableDemo',
url: '/api/table/',
paging: true
};
this.dataList = this.dataListManager.init<DemoDataRow>(config);
var filter: ddui.FilterModel = {
'name': { value: 'My name' }
};
this.dataList.setFilter(() => filter);
this.dataList.fetchPage();
}
private createFakeApiResponse() | 'name': 'Burton Warren'
},
{
'index': 2,
'isActive': false,
'balance': '$3,988.63',
'picture': 'http://placehold.it/32x32',
'age': 27,
'eyeColor': 'brown',
'name': 'Mercedes Horne'
},
{
'index': 3,
'isActive': false,
'balance': '$3,914.68',
'picture': 'http://placehold.it/32x32',
'age': 21,
'eyeColor': 'brown',
'name': 'Harper Reilly'
},
{
'index': 4,
'isActive': true,
'balance': '$1,237.50',
'picture': 'http://placehold.it/32x32',
'age': 35,
'eyeColor': 'brown',
'name': 'Pierce Callahan'
}
],
count: 300
};
return [200, result];
});
}
}
angular.module('dd.ui.demo')
.controller('TableDemoCtrl', TableDemoCtrl); | {
this.$httpBackend.whenGET(function (url) { return url.startsWith('/api/table/'); }).respond(function (method, url) {
var result = {
items: [
{
'index': 0,
'isActive': false,
'balance': '$2,962.75',
'picture': 'http://placehold.it/32x32',
'age': 36,
'eyeColor': 'brown',
'name': 'Virgie Patrick'
},
{
'index': 1,
'isActive': true,
'balance': '$1,662.43',
'picture': 'http://placehold.it/32x32',
'age': 32,
'eyeColor': 'green', | identifier_body |
nav-crud-page.component.ts | import { OnInit, OnDestroy } from '@angular/core';
import { MdSnackBar, MdSnackBarConfig, MdDialog, MdDialogConfig, ComponentType } from '@angular/material';
import { ObservableMedia } from '@angular/flex-layout';
import { environment } from '../../environments/environment';
import { Cache } from '../../cache';
import { Model } from '../../model/model';
import { ModelService } from '../../service/model-service';
import { DetailDialogComponent } from './detail-dialog.component';
import { CrudPageComponent } from './crud-page.component';
export abstract class NavCrudPageComponent<TModel extends Model, TModelService extends ModelService<TModel>, TDialog extends DetailDialogComponent<TModel, TModelService, TDialog>> extends CrudPageComponent<TModel, TModelService, TDialog> implements OnInit, OnDestroy {
parentId: string;
parents: TModel[] = [];
rootTitle: string;
constructor(protected cache: Cache, protected media: ObservableMedia, protected modelService: TModelService, protected snackBar: MdSnackBar, protected snackBarConfig: MdSnackBarConfig, protected dialog: MdDialog) {
super(cache, media, modelService, snackBar, snackBarConfig, dialog);
}
ngOnInit() {
super.ngOnInit();
this.rootTitle = this.cache.title;
}
ngOnDestroy() {
super.ngOnDestroy();
this.cache.backFunction = null;
}
enter(model: TModel, push: boolean = true) {
if (model) {
if (push === true) {
this.parents.push(model);
}
this.parentId = model.id;
this.cache.title = this.getName(model);
this.cache.backFunction = this.back.bind(this);
}
else {
this.parents = [];
this.parentId = null;
this.cache.title = this.rootTitle;
this.cache.backFunction = null;
} | let parent = this.parents.splice(this.parents.length - 1, 1);
if (this.parents.length === 0) {
this.enter(null);
}
else {
this.enter(this.parents[this.parents.length - 1], false);
}
}
} | this.cache.createFunctionData = this.parentId;
this.find();
}
back() { | random_line_split |
nav-crud-page.component.ts | import { OnInit, OnDestroy } from '@angular/core';
import { MdSnackBar, MdSnackBarConfig, MdDialog, MdDialogConfig, ComponentType } from '@angular/material';
import { ObservableMedia } from '@angular/flex-layout';
import { environment } from '../../environments/environment';
import { Cache } from '../../cache';
import { Model } from '../../model/model';
import { ModelService } from '../../service/model-service';
import { DetailDialogComponent } from './detail-dialog.component';
import { CrudPageComponent } from './crud-page.component';
export abstract class NavCrudPageComponent<TModel extends Model, TModelService extends ModelService<TModel>, TDialog extends DetailDialogComponent<TModel, TModelService, TDialog>> extends CrudPageComponent<TModel, TModelService, TDialog> implements OnInit, OnDestroy {
parentId: string;
parents: TModel[] = [];
rootTitle: string;
constructor(protected cache: Cache, protected media: ObservableMedia, protected modelService: TModelService, protected snackBar: MdSnackBar, protected snackBarConfig: MdSnackBarConfig, protected dialog: MdDialog) |
ngOnInit() {
super.ngOnInit();
this.rootTitle = this.cache.title;
}
ngOnDestroy() {
super.ngOnDestroy();
this.cache.backFunction = null;
}
enter(model: TModel, push: boolean = true) {
if (model) {
if (push === true) {
this.parents.push(model);
}
this.parentId = model.id;
this.cache.title = this.getName(model);
this.cache.backFunction = this.back.bind(this);
}
else {
this.parents = [];
this.parentId = null;
this.cache.title = this.rootTitle;
this.cache.backFunction = null;
}
this.cache.createFunctionData = this.parentId;
this.find();
}
back() {
let parent = this.parents.splice(this.parents.length - 1, 1);
if (this.parents.length === 0) {
this.enter(null);
}
else {
this.enter(this.parents[this.parents.length - 1], false);
}
}
} | {
super(cache, media, modelService, snackBar, snackBarConfig, dialog);
} | identifier_body |
nav-crud-page.component.ts | import { OnInit, OnDestroy } from '@angular/core';
import { MdSnackBar, MdSnackBarConfig, MdDialog, MdDialogConfig, ComponentType } from '@angular/material';
import { ObservableMedia } from '@angular/flex-layout';
import { environment } from '../../environments/environment';
import { Cache } from '../../cache';
import { Model } from '../../model/model';
import { ModelService } from '../../service/model-service';
import { DetailDialogComponent } from './detail-dialog.component';
import { CrudPageComponent } from './crud-page.component';
export abstract class NavCrudPageComponent<TModel extends Model, TModelService extends ModelService<TModel>, TDialog extends DetailDialogComponent<TModel, TModelService, TDialog>> extends CrudPageComponent<TModel, TModelService, TDialog> implements OnInit, OnDestroy {
parentId: string;
parents: TModel[] = [];
rootTitle: string;
constructor(protected cache: Cache, protected media: ObservableMedia, protected modelService: TModelService, protected snackBar: MdSnackBar, protected snackBarConfig: MdSnackBarConfig, protected dialog: MdDialog) {
super(cache, media, modelService, snackBar, snackBarConfig, dialog);
}
ngOnInit() {
super.ngOnInit();
this.rootTitle = this.cache.title;
}
ngOnDestroy() {
super.ngOnDestroy();
this.cache.backFunction = null;
}
enter(model: TModel, push: boolean = true) {
if (model) {
if (push === true) {
this.parents.push(model);
}
this.parentId = model.id;
this.cache.title = this.getName(model);
this.cache.backFunction = this.back.bind(this);
}
else {
this.parents = [];
this.parentId = null;
this.cache.title = this.rootTitle;
this.cache.backFunction = null;
}
this.cache.createFunctionData = this.parentId;
this.find();
}
back() {
let parent = this.parents.splice(this.parents.length - 1, 1);
if (this.parents.length === 0) |
else {
this.enter(this.parents[this.parents.length - 1], false);
}
}
} | {
this.enter(null);
} | conditional_block |
nav-crud-page.component.ts | import { OnInit, OnDestroy } from '@angular/core';
import { MdSnackBar, MdSnackBarConfig, MdDialog, MdDialogConfig, ComponentType } from '@angular/material';
import { ObservableMedia } from '@angular/flex-layout';
import { environment } from '../../environments/environment';
import { Cache } from '../../cache';
import { Model } from '../../model/model';
import { ModelService } from '../../service/model-service';
import { DetailDialogComponent } from './detail-dialog.component';
import { CrudPageComponent } from './crud-page.component';
export abstract class NavCrudPageComponent<TModel extends Model, TModelService extends ModelService<TModel>, TDialog extends DetailDialogComponent<TModel, TModelService, TDialog>> extends CrudPageComponent<TModel, TModelService, TDialog> implements OnInit, OnDestroy {
parentId: string;
parents: TModel[] = [];
rootTitle: string;
constructor(protected cache: Cache, protected media: ObservableMedia, protected modelService: TModelService, protected snackBar: MdSnackBar, protected snackBarConfig: MdSnackBarConfig, protected dialog: MdDialog) {
super(cache, media, modelService, snackBar, snackBarConfig, dialog);
}
ngOnInit() {
super.ngOnInit();
this.rootTitle = this.cache.title;
}
ngOnDestroy() {
super.ngOnDestroy();
this.cache.backFunction = null;
}
| (model: TModel, push: boolean = true) {
if (model) {
if (push === true) {
this.parents.push(model);
}
this.parentId = model.id;
this.cache.title = this.getName(model);
this.cache.backFunction = this.back.bind(this);
}
else {
this.parents = [];
this.parentId = null;
this.cache.title = this.rootTitle;
this.cache.backFunction = null;
}
this.cache.createFunctionData = this.parentId;
this.find();
}
back() {
let parent = this.parents.splice(this.parents.length - 1, 1);
if (this.parents.length === 0) {
this.enter(null);
}
else {
this.enter(this.parents[this.parents.length - 1], false);
}
}
} | enter | identifier_name |
translate.py | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from collections import OrderedDict
from django.conf import settings
from pootle.core.url_helpers import get_previous_url
from pootle_app.models.permissions import check_permission
from pootle_misc.checks import (
CATEGORY_IDS, check_names,
get_qualitycheck_schema, get_qualitychecks)
from pootle_misc.forms import make_search_form
from pootle_store.constants import AMAGAMA_SOURCE_LANGUAGES
from .base import PootleDetailView
class PootleTranslateView(PootleDetailView):
template_name = "editor/main.html"
page_name = "translate"
view_name = ""
@property
def check_data(self):
return self.object.data_tool.get_checks()
@property
def checks(self):
check_data = self.check_data
checks = get_qualitychecks()
schema = {sc["code"]: sc for sc in get_qualitycheck_schema()}
_checks = {}
for check, checkid in checks.items():
if check not in check_data:
continue
_checkid = schema[checkid]["name"]
_checks[_checkid] = _checks.get(
_checkid, dict(checks=[], title=schema[checkid]["title"]))
_checks[_checkid]["checks"].append(
dict(
code=check,
title=check_names[check],
count=check_data[check]))
return OrderedDict(
(k, _checks[k])
for k in CATEGORY_IDS.keys()
if _checks.get(k))
@property
def ctx_path(self):
|
@property
def vfolder_pk(self):
return ""
@property
def display_vfolder_priority(self):
return False
@property
def chunk_size(self):
return self.request.user.get_unit_rows()
def get_context_data(self, *args, **kwargs):
ctx = super(PootleTranslateView, self).get_context_data(*args, **kwargs)
ctx.update(
{'page': self.page_name,
'chunk_size': self.chunk_size,
'current_vfolder_pk': self.vfolder_pk,
'ctx_path': self.ctx_path,
'display_priority': self.display_vfolder_priority,
'checks': self.checks,
'cantranslate': check_permission("translate", self.request),
'cansuggest': check_permission("suggest", self.request),
'canreview': check_permission("review", self.request),
'search_form': make_search_form(request=self.request),
'previous_url': get_previous_url(self.request),
'POOTLE_MT_BACKENDS': settings.POOTLE_MT_BACKENDS,
'AMAGAMA_URL': settings.AMAGAMA_URL,
'AMAGAMA_SOURCE_LANGUAGES': AMAGAMA_SOURCE_LANGUAGES,
'editor_extends': self.template_extends})
return ctx
| return self.pootle_path | identifier_body |
translate.py | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from collections import OrderedDict
from django.conf import settings
from pootle.core.url_helpers import get_previous_url
from pootle_app.models.permissions import check_permission
from pootle_misc.checks import (
CATEGORY_IDS, check_names,
get_qualitycheck_schema, get_qualitychecks) | from .base import PootleDetailView
class PootleTranslateView(PootleDetailView):
template_name = "editor/main.html"
page_name = "translate"
view_name = ""
@property
def check_data(self):
return self.object.data_tool.get_checks()
@property
def checks(self):
check_data = self.check_data
checks = get_qualitychecks()
schema = {sc["code"]: sc for sc in get_qualitycheck_schema()}
_checks = {}
for check, checkid in checks.items():
if check not in check_data:
continue
_checkid = schema[checkid]["name"]
_checks[_checkid] = _checks.get(
_checkid, dict(checks=[], title=schema[checkid]["title"]))
_checks[_checkid]["checks"].append(
dict(
code=check,
title=check_names[check],
count=check_data[check]))
return OrderedDict(
(k, _checks[k])
for k in CATEGORY_IDS.keys()
if _checks.get(k))
@property
def ctx_path(self):
return self.pootle_path
@property
def vfolder_pk(self):
return ""
@property
def display_vfolder_priority(self):
return False
@property
def chunk_size(self):
return self.request.user.get_unit_rows()
def get_context_data(self, *args, **kwargs):
ctx = super(PootleTranslateView, self).get_context_data(*args, **kwargs)
ctx.update(
{'page': self.page_name,
'chunk_size': self.chunk_size,
'current_vfolder_pk': self.vfolder_pk,
'ctx_path': self.ctx_path,
'display_priority': self.display_vfolder_priority,
'checks': self.checks,
'cantranslate': check_permission("translate", self.request),
'cansuggest': check_permission("suggest", self.request),
'canreview': check_permission("review", self.request),
'search_form': make_search_form(request=self.request),
'previous_url': get_previous_url(self.request),
'POOTLE_MT_BACKENDS': settings.POOTLE_MT_BACKENDS,
'AMAGAMA_URL': settings.AMAGAMA_URL,
'AMAGAMA_SOURCE_LANGUAGES': AMAGAMA_SOURCE_LANGUAGES,
'editor_extends': self.template_extends})
return ctx | from pootle_misc.forms import make_search_form
from pootle_store.constants import AMAGAMA_SOURCE_LANGUAGES
| random_line_split |
translate.py | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from collections import OrderedDict
from django.conf import settings
from pootle.core.url_helpers import get_previous_url
from pootle_app.models.permissions import check_permission
from pootle_misc.checks import (
CATEGORY_IDS, check_names,
get_qualitycheck_schema, get_qualitychecks)
from pootle_misc.forms import make_search_form
from pootle_store.constants import AMAGAMA_SOURCE_LANGUAGES
from .base import PootleDetailView
class PootleTranslateView(PootleDetailView):
template_name = "editor/main.html"
page_name = "translate"
view_name = ""
@property
def check_data(self):
return self.object.data_tool.get_checks()
@property
def checks(self):
check_data = self.check_data
checks = get_qualitychecks()
schema = {sc["code"]: sc for sc in get_qualitycheck_schema()}
_checks = {}
for check, checkid in checks.items():
if check not in check_data:
continue
_checkid = schema[checkid]["name"]
_checks[_checkid] = _checks.get(
_checkid, dict(checks=[], title=schema[checkid]["title"]))
_checks[_checkid]["checks"].append(
dict(
code=check,
title=check_names[check],
count=check_data[check]))
return OrderedDict(
(k, _checks[k])
for k in CATEGORY_IDS.keys()
if _checks.get(k))
@property
def ctx_path(self):
return self.pootle_path
@property
def vfolder_pk(self):
return ""
@property
def | (self):
return False
@property
def chunk_size(self):
return self.request.user.get_unit_rows()
def get_context_data(self, *args, **kwargs):
ctx = super(PootleTranslateView, self).get_context_data(*args, **kwargs)
ctx.update(
{'page': self.page_name,
'chunk_size': self.chunk_size,
'current_vfolder_pk': self.vfolder_pk,
'ctx_path': self.ctx_path,
'display_priority': self.display_vfolder_priority,
'checks': self.checks,
'cantranslate': check_permission("translate", self.request),
'cansuggest': check_permission("suggest", self.request),
'canreview': check_permission("review", self.request),
'search_form': make_search_form(request=self.request),
'previous_url': get_previous_url(self.request),
'POOTLE_MT_BACKENDS': settings.POOTLE_MT_BACKENDS,
'AMAGAMA_URL': settings.AMAGAMA_URL,
'AMAGAMA_SOURCE_LANGUAGES': AMAGAMA_SOURCE_LANGUAGES,
'editor_extends': self.template_extends})
return ctx
| display_vfolder_priority | identifier_name |
translate.py | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from collections import OrderedDict
from django.conf import settings
from pootle.core.url_helpers import get_previous_url
from pootle_app.models.permissions import check_permission
from pootle_misc.checks import (
CATEGORY_IDS, check_names,
get_qualitycheck_schema, get_qualitychecks)
from pootle_misc.forms import make_search_form
from pootle_store.constants import AMAGAMA_SOURCE_LANGUAGES
from .base import PootleDetailView
class PootleTranslateView(PootleDetailView):
template_name = "editor/main.html"
page_name = "translate"
view_name = ""
@property
def check_data(self):
return self.object.data_tool.get_checks()
@property
def checks(self):
check_data = self.check_data
checks = get_qualitychecks()
schema = {sc["code"]: sc for sc in get_qualitycheck_schema()}
_checks = {}
for check, checkid in checks.items():
if check not in check_data:
|
_checkid = schema[checkid]["name"]
_checks[_checkid] = _checks.get(
_checkid, dict(checks=[], title=schema[checkid]["title"]))
_checks[_checkid]["checks"].append(
dict(
code=check,
title=check_names[check],
count=check_data[check]))
return OrderedDict(
(k, _checks[k])
for k in CATEGORY_IDS.keys()
if _checks.get(k))
@property
def ctx_path(self):
return self.pootle_path
@property
def vfolder_pk(self):
return ""
@property
def display_vfolder_priority(self):
return False
@property
def chunk_size(self):
return self.request.user.get_unit_rows()
def get_context_data(self, *args, **kwargs):
ctx = super(PootleTranslateView, self).get_context_data(*args, **kwargs)
ctx.update(
{'page': self.page_name,
'chunk_size': self.chunk_size,
'current_vfolder_pk': self.vfolder_pk,
'ctx_path': self.ctx_path,
'display_priority': self.display_vfolder_priority,
'checks': self.checks,
'cantranslate': check_permission("translate", self.request),
'cansuggest': check_permission("suggest", self.request),
'canreview': check_permission("review", self.request),
'search_form': make_search_form(request=self.request),
'previous_url': get_previous_url(self.request),
'POOTLE_MT_BACKENDS': settings.POOTLE_MT_BACKENDS,
'AMAGAMA_URL': settings.AMAGAMA_URL,
'AMAGAMA_SOURCE_LANGUAGES': AMAGAMA_SOURCE_LANGUAGES,
'editor_extends': self.template_extends})
return ctx
| continue | conditional_block |
mainwindow_8h.js | var mainwindow_8h =
[
[ "MainWindow", "d9/dc6/class_main_window.html", "d9/dc6/class_main_window" ], | [ "DEFAULT_DATA_TEAM_FILE", "d9/d53/mainwindow_8h.html#af219f407cc9a763920d8e798c8d6031b", null ]
]; | [ "BINARY_FILE", "d9/d53/mainwindow_8h.html#a235764c67fdcb87b7484bdc1ef801959", null ],
[ "DEFAULT_DATA_DIR", "d9/d53/mainwindow_8h.html#a89d9b90dbc1df3ab30a2d08519e40e49", null ],
[ "DEFAULT_DATA_FILE_EXT", "d9/d53/mainwindow_8h.html#a7009a0f1670c12ad66218104884f43a8", null ],
[ "DEFAULT_DATA_PLAYER_FILE", "d9/d53/mainwindow_8h.html#a92e4f26a961aad80bec26b9f44030c3d", null ], | random_line_split |
setup.py | # Copyright 2013 Allen Institute
# This file is part of dipde
# dipde is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# dipde is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with dipde. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import dipde
import io
import os
import sys
here = os.path.abspath(os.path.dirname(__file__))
def read(*filenames, **kwargs):
encoding = kwargs.get('encoding', 'utf-8')
sep = kwargs.get('sep', '\n')
buf = []
for filename in filenames:
with io.open(filename, encoding=encoding) as f:
buf.append(f.read())
return sep.join(buf)
#long_description = read('README.md')
def prepend_find_packages(*roots):
''' Recursively traverse nested packages under the root directories
''' | packages += [root + '.' + s for s in find_packages(root)]
return packages
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = ['--junitxml=result.xml']
self.test_args_cov = self.test_args + ['--cov=dipde', '--cov-report=term', '--cov-report=html','--cov-config=.coveragerc']
self.test_suite = True
def run_tests(self):
import pytest
try:
errcode = pytest.main(self.test_args_cov)
except:
errcode = pytest.main(self.test_args)
sys.exit(errcode)
setup(
name='dipde',
version=dipde.__version__,
url='https://github.com/AllenBrainAtlas/DiPDE',
author='Nicholas Cain',
tests_require=['pytest'],
install_requires=[],
cmdclass={'test': PyTest},
author_email='[email protected]',
description='Numerical solver for coupled population density equations',
long_description='',
packages=prepend_find_packages('dipde'),
include_package_data=True,
package_data={'':['*.md', '*.txt', '*.cfg']},
platforms='any',
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: Apache Software License :: 2.0',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
extras_require={
'testing': ['pytest'],
}
) | packages = []
for root in roots:
packages += [root] | random_line_split |
setup.py | # Copyright 2013 Allen Institute
# This file is part of dipde
# dipde is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# dipde is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with dipde. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import dipde
import io
import os
import sys
here = os.path.abspath(os.path.dirname(__file__))
def read(*filenames, **kwargs):
encoding = kwargs.get('encoding', 'utf-8')
sep = kwargs.get('sep', '\n')
buf = []
for filename in filenames:
with io.open(filename, encoding=encoding) as f:
buf.append(f.read())
return sep.join(buf)
#long_description = read('README.md')
def prepend_find_packages(*roots):
''' Recursively traverse nested packages under the root directories
'''
packages = []
for root in roots:
|
return packages
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = ['--junitxml=result.xml']
self.test_args_cov = self.test_args + ['--cov=dipde', '--cov-report=term', '--cov-report=html','--cov-config=.coveragerc']
self.test_suite = True
def run_tests(self):
import pytest
try:
errcode = pytest.main(self.test_args_cov)
except:
errcode = pytest.main(self.test_args)
sys.exit(errcode)
setup(
name='dipde',
version=dipde.__version__,
url='https://github.com/AllenBrainAtlas/DiPDE',
author='Nicholas Cain',
tests_require=['pytest'],
install_requires=[],
cmdclass={'test': PyTest},
author_email='[email protected]',
description='Numerical solver for coupled population density equations',
long_description='',
packages=prepend_find_packages('dipde'),
include_package_data=True,
package_data={'':['*.md', '*.txt', '*.cfg']},
platforms='any',
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: Apache Software License :: 2.0',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
extras_require={
'testing': ['pytest'],
}
)
| packages += [root]
packages += [root + '.' + s for s in find_packages(root)] | conditional_block |
setup.py | # Copyright 2013 Allen Institute
# This file is part of dipde
# dipde is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# dipde is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with dipde. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import dipde
import io
import os
import sys
here = os.path.abspath(os.path.dirname(__file__))
def read(*filenames, **kwargs):
encoding = kwargs.get('encoding', 'utf-8')
sep = kwargs.get('sep', '\n')
buf = []
for filename in filenames:
with io.open(filename, encoding=encoding) as f:
buf.append(f.read())
return sep.join(buf)
#long_description = read('README.md')
def prepend_find_packages(*roots):
''' Recursively traverse nested packages under the root directories
'''
packages = []
for root in roots:
packages += [root]
packages += [root + '.' + s for s in find_packages(root)]
return packages
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = ['--junitxml=result.xml']
self.test_args_cov = self.test_args + ['--cov=dipde', '--cov-report=term', '--cov-report=html','--cov-config=.coveragerc']
self.test_suite = True
def | (self):
import pytest
try:
errcode = pytest.main(self.test_args_cov)
except:
errcode = pytest.main(self.test_args)
sys.exit(errcode)
setup(
name='dipde',
version=dipde.__version__,
url='https://github.com/AllenBrainAtlas/DiPDE',
author='Nicholas Cain',
tests_require=['pytest'],
install_requires=[],
cmdclass={'test': PyTest},
author_email='[email protected]',
description='Numerical solver for coupled population density equations',
long_description='',
packages=prepend_find_packages('dipde'),
include_package_data=True,
package_data={'':['*.md', '*.txt', '*.cfg']},
platforms='any',
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: Apache Software License :: 2.0',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
extras_require={
'testing': ['pytest'],
}
)
| run_tests | identifier_name |
setup.py | # Copyright 2013 Allen Institute
# This file is part of dipde
# dipde is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# dipde is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with dipde. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import dipde
import io
import os
import sys
here = os.path.abspath(os.path.dirname(__file__))
def read(*filenames, **kwargs):
encoding = kwargs.get('encoding', 'utf-8')
sep = kwargs.get('sep', '\n')
buf = []
for filename in filenames:
with io.open(filename, encoding=encoding) as f:
buf.append(f.read())
return sep.join(buf)
#long_description = read('README.md')
def prepend_find_packages(*roots):
''' Recursively traverse nested packages under the root directories
'''
packages = []
for root in roots:
packages += [root]
packages += [root + '.' + s for s in find_packages(root)]
return packages
class PyTest(TestCommand):
def finalize_options(self):
|
def run_tests(self):
import pytest
try:
errcode = pytest.main(self.test_args_cov)
except:
errcode = pytest.main(self.test_args)
sys.exit(errcode)
setup(
name='dipde',
version=dipde.__version__,
url='https://github.com/AllenBrainAtlas/DiPDE',
author='Nicholas Cain',
tests_require=['pytest'],
install_requires=[],
cmdclass={'test': PyTest},
author_email='[email protected]',
description='Numerical solver for coupled population density equations',
long_description='',
packages=prepend_find_packages('dipde'),
include_package_data=True,
package_data={'':['*.md', '*.txt', '*.cfg']},
platforms='any',
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: Apache Software License :: 2.0',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
extras_require={
'testing': ['pytest'],
}
)
| TestCommand.finalize_options(self)
self.test_args = ['--junitxml=result.xml']
self.test_args_cov = self.test_args + ['--cov=dipde', '--cov-report=term', '--cov-report=html','--cov-config=.coveragerc']
self.test_suite = True | identifier_body |
20171009145235-create-user.ts | module.exports = {
up: (queryInterface: any, Sequelize: any) => {
return queryInterface.createTable('users', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
email: {
type: Sequelize.STRING
},
password: {
type: Sequelize.STRING
},
first_name: {
type: Sequelize.STRING
},
last_name: {
type: Sequelize.STRING
},
bio: {
type: Sequelize.TEXT
},
verified: Sequelize.BOOLEAN, | type: Sequelize.DATE
},
created_at: {
allowNull: false,
type: Sequelize.DATE
},
updated_at: {
allowNull: true,
type: Sequelize.DATE
},
deleted_at: {
allowNull: true,
type: Sequelize.DATE
}
});
},
down: (queryInterface: any, Sequelize: any) => {
return queryInterface.dropTable('users');
}
}; | verify_token: Sequelize.STRING,
password_reset_token: Sequelize.STRING,
password_reset_token_expired_at: {
allowNull: true, | random_line_split |
function_example.py | import collections | from sym2num import function, var
def reload_all():
"""Reload modules for testing."""
import imp
for m in (var, function):
imp.reload(m)
if __name__ == '__main__':
reload_all()
g = var.UnivariateCallable('g')
h = var.UnivariateCallable('h')
from sympy.abc import t, w, x, y, z, m
output = [x**2 + sympy.erf(x) + g(x),
sympy.cos(y) + 2*t + sympy.GoldenRatio,
z*sympy.sqrt(sympy.sin(w)+2)*h(x, 2)]
obj = {'data': [w], 'extra': {'other': [m, z]}, 'gg': g}
arguments = function.Arguments(self=obj, t=t, state=[x, y], H=h)
f = function.FunctionPrinter('f', output, arguments)
print(f.print_def())
sf = function.SymbolicSubsFunction(function.Arguments(t=t, m=[x,y]), t**2+x)
print( "\n" + "*" * 80 + "\n")
print(sf(w**4, [2*x,3*z])) |
import numpy as np
import sympy
| random_line_split |
function_example.py | import collections
import numpy as np
import sympy
from sym2num import function, var
def | ():
"""Reload modules for testing."""
import imp
for m in (var, function):
imp.reload(m)
if __name__ == '__main__':
reload_all()
g = var.UnivariateCallable('g')
h = var.UnivariateCallable('h')
from sympy.abc import t, w, x, y, z, m
output = [x**2 + sympy.erf(x) + g(x),
sympy.cos(y) + 2*t + sympy.GoldenRatio,
z*sympy.sqrt(sympy.sin(w)+2)*h(x, 2)]
obj = {'data': [w], 'extra': {'other': [m, z]}, 'gg': g}
arguments = function.Arguments(self=obj, t=t, state=[x, y], H=h)
f = function.FunctionPrinter('f', output, arguments)
print(f.print_def())
sf = function.SymbolicSubsFunction(function.Arguments(t=t, m=[x,y]), t**2+x)
print( "\n" + "*" * 80 + "\n")
print(sf(w**4, [2*x,3*z]))
| reload_all | identifier_name |
function_example.py | import collections
import numpy as np
import sympy
from sym2num import function, var
def reload_all():
|
if __name__ == '__main__':
reload_all()
g = var.UnivariateCallable('g')
h = var.UnivariateCallable('h')
from sympy.abc import t, w, x, y, z, m
output = [x**2 + sympy.erf(x) + g(x),
sympy.cos(y) + 2*t + sympy.GoldenRatio,
z*sympy.sqrt(sympy.sin(w)+2)*h(x, 2)]
obj = {'data': [w], 'extra': {'other': [m, z]}, 'gg': g}
arguments = function.Arguments(self=obj, t=t, state=[x, y], H=h)
f = function.FunctionPrinter('f', output, arguments)
print(f.print_def())
sf = function.SymbolicSubsFunction(function.Arguments(t=t, m=[x,y]), t**2+x)
print( "\n" + "*" * 80 + "\n")
print(sf(w**4, [2*x,3*z]))
| """Reload modules for testing."""
import imp
for m in (var, function):
imp.reload(m) | identifier_body |
function_example.py | import collections
import numpy as np
import sympy
from sym2num import function, var
def reload_all():
"""Reload modules for testing."""
import imp
for m in (var, function):
|
if __name__ == '__main__':
reload_all()
g = var.UnivariateCallable('g')
h = var.UnivariateCallable('h')
from sympy.abc import t, w, x, y, z, m
output = [x**2 + sympy.erf(x) + g(x),
sympy.cos(y) + 2*t + sympy.GoldenRatio,
z*sympy.sqrt(sympy.sin(w)+2)*h(x, 2)]
obj = {'data': [w], 'extra': {'other': [m, z]}, 'gg': g}
arguments = function.Arguments(self=obj, t=t, state=[x, y], H=h)
f = function.FunctionPrinter('f', output, arguments)
print(f.print_def())
sf = function.SymbolicSubsFunction(function.Arguments(t=t, m=[x,y]), t**2+x)
print( "\n" + "*" * 80 + "\n")
print(sf(w**4, [2*x,3*z]))
| imp.reload(m) | conditional_block |
decode.rs | use std::{iter, fs, path};
use image::ImageFormat;
use criterion::{Criterion, criterion_group, criterion_main};
#[derive(Clone, Copy)]
struct BenchDef {
dir: &'static [&'static str],
files: &'static [&'static str],
format: ImageFormat,
}
fn load_all(c: &mut Criterion) | files: &[
"alpha_gif_a.gif",
"sample_1.gif",
],
format: ImageFormat::Gif,
},
BenchDef {
dir: &["hdr", "images"],
files: &[
"image1.hdr",
"rgbr4x4.hdr",
],
format: ImageFormat::Hdr,
},
BenchDef {
dir: &["ico", "images"],
files: &[
"bmp-24bpp-mask.ico",
"bmp-32bpp-alpha.ico",
"png-32bpp-alpha.ico",
"smile.ico",
],
format: ImageFormat::Ico,
},
BenchDef {
dir: &["jpg", "progressive"],
files: &[
"3.jpg",
"cat.jpg",
"test.jpg",
],
format: ImageFormat::Jpeg,
},
// TODO: pnm
// TODO: png
BenchDef {
dir: &["tga", "testsuite"],
files: &[
"cbw8.tga",
"ctc24.tga",
"ubw8.tga",
"utc24.tga",
],
format: ImageFormat::Tga,
},
BenchDef {
dir: &["tiff", "testsuite"],
files: &[
"hpredict.tiff",
"hpredict_packbits.tiff",
"mandrill.tiff",
"rgb-3c-16b.tiff",
],
format: ImageFormat::Tiff,
},
BenchDef {
dir: &["webp", "images"],
files: &[
"simple-gray.webp",
"simple-rgb.webp",
"vp8x-gray.webp",
"vp8x-rgb.webp",
],
format: ImageFormat::WebP,
},
];
for bench in BENCH_DEFS {
bench_load(c, bench);
}
}
criterion_group!(benches, load_all);
criterion_main!(benches);
fn bench_load(c: &mut Criterion, def: &BenchDef) {
let group_name = format!("load-{:?}", def.format);
let mut group = c.benchmark_group(&group_name);
let paths = IMAGE_DIR.iter().chain(def.dir);
for file_name in def.files {
let path: path::PathBuf = paths.clone().chain(iter::once(file_name)).collect();
let buf = fs::read(path).unwrap();
group.bench_function(file_name.to_owned(), |b| b.iter(|| {
image::load_from_memory_with_format(&buf, def.format).unwrap();
}));
}
}
const IMAGE_DIR: [&'static str; 3] = [".", "tests", "images"];
| {
const BENCH_DEFS: &'static [BenchDef] = &[
BenchDef {
dir: &["bmp", "images"],
files: &[
"Core_1_Bit.bmp",
"Core_4_Bit.bmp",
"Core_8_Bit.bmp",
"rgb16.bmp",
"rgb24.bmp",
"rgb32.bmp",
"pal4rle.bmp",
"pal8rle.bmp",
"rgb16-565.bmp",
"rgb32bf.bmp",
],
format: ImageFormat::Bmp,
},
BenchDef {
dir: &["gif", "simple"], | identifier_body |
decode.rs | use std::{iter, fs, path};
use image::ImageFormat;
use criterion::{Criterion, criterion_group, criterion_main};
#[derive(Clone, Copy)]
struct BenchDef {
dir: &'static [&'static str],
files: &'static [&'static str],
format: ImageFormat,
}
fn | (c: &mut Criterion) {
const BENCH_DEFS: &'static [BenchDef] = &[
BenchDef {
dir: &["bmp", "images"],
files: &[
"Core_1_Bit.bmp",
"Core_4_Bit.bmp",
"Core_8_Bit.bmp",
"rgb16.bmp",
"rgb24.bmp",
"rgb32.bmp",
"pal4rle.bmp",
"pal8rle.bmp",
"rgb16-565.bmp",
"rgb32bf.bmp",
],
format: ImageFormat::Bmp,
},
BenchDef {
dir: &["gif", "simple"],
files: &[
"alpha_gif_a.gif",
"sample_1.gif",
],
format: ImageFormat::Gif,
},
BenchDef {
dir: &["hdr", "images"],
files: &[
"image1.hdr",
"rgbr4x4.hdr",
],
format: ImageFormat::Hdr,
},
BenchDef {
dir: &["ico", "images"],
files: &[
"bmp-24bpp-mask.ico",
"bmp-32bpp-alpha.ico",
"png-32bpp-alpha.ico",
"smile.ico",
],
format: ImageFormat::Ico,
},
BenchDef {
dir: &["jpg", "progressive"],
files: &[
"3.jpg",
"cat.jpg",
"test.jpg",
],
format: ImageFormat::Jpeg,
},
// TODO: pnm
// TODO: png
BenchDef {
dir: &["tga", "testsuite"],
files: &[
"cbw8.tga",
"ctc24.tga",
"ubw8.tga",
"utc24.tga",
],
format: ImageFormat::Tga,
},
BenchDef {
dir: &["tiff", "testsuite"],
files: &[
"hpredict.tiff",
"hpredict_packbits.tiff",
"mandrill.tiff",
"rgb-3c-16b.tiff",
],
format: ImageFormat::Tiff,
},
BenchDef {
dir: &["webp", "images"],
files: &[
"simple-gray.webp",
"simple-rgb.webp",
"vp8x-gray.webp",
"vp8x-rgb.webp",
],
format: ImageFormat::WebP,
},
];
for bench in BENCH_DEFS {
bench_load(c, bench);
}
}
criterion_group!(benches, load_all);
criterion_main!(benches);
fn bench_load(c: &mut Criterion, def: &BenchDef) {
let group_name = format!("load-{:?}", def.format);
let mut group = c.benchmark_group(&group_name);
let paths = IMAGE_DIR.iter().chain(def.dir);
for file_name in def.files {
let path: path::PathBuf = paths.clone().chain(iter::once(file_name)).collect();
let buf = fs::read(path).unwrap();
group.bench_function(file_name.to_owned(), |b| b.iter(|| {
image::load_from_memory_with_format(&buf, def.format).unwrap();
}));
}
}
const IMAGE_DIR: [&'static str; 3] = [".", "tests", "images"];
| load_all | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.